query
stringlengths
7
9.55k
document
stringlengths
10
363k
metadata
dict
negatives
sequencelengths
0
101
negative_scores
sequencelengths
0
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Adds all specified classes/modules to the hidden list. E.g. it is a good idea to ignore +Kernel+, as this module is used almost everywhere.
def hide(*classOrModules) classOrModules.each do |classOrModule| @ignored[classOrModule] = true end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def hide_current_state\n\t\t@classes = Array.new\n\t\tObjectSpace.each_object(Class) do |klass|\n\t\t\[email protected] klass\n\t\tend\n\t\t@modules = Hash.new\n\t\tObjectSpace.each_object(Module) do |mod|\n\t\t\t@modules[mod] = true\n\t\tend\n\tend", "def hide_tree(*classOrModules)\n\t\tclassOrModules.each do |classOrModule|\n\t\t\t@ignored_tree[classOrModule] = true\n\t\tend\n\tend", "def all_modules klass, trash = []\n trash = [*trash]\n trash += [klass]\n children = shallow(klass, trash)\n all = [klass, children.\n map { |x| all_modules x, trash + children }].\n flatten.select { |x| Module === x }\n all\n end", "def add_to_classes_or_modules mod\n @classes_or_modules << mod\n end", "def remove_classes_and_modules\n initialize_classes_and_modules\n end", "def classes_and_modules\n classes + modules\n end", "def classes_and_modules\n classes + modules\n end", "def get_sorted_module_list classes\n classes.select do |klass|\n klass.display?\n end.sort\n end", "def all_classes_and_modules\n @classes_hash.values + @modules_hash.values\n end", "def boring_classes\n return [::Class, *::Class.included_modules,\n ::Module, *::Module.included_modules,\n ::Kernel, *::Kernel.included_modules,\n ::Object, *::Object.included_modules,\n ::BasicObject, *::BasicObject.included_modules].uniq\n end", "def class_modules(*mods)\n @class_modules ||= [ \n Command::ClassMethods,\n Command::Options::ClassMethods,\n ]\n @class_modules += mods\n end", "def without_modules(*modules); end", "def without_modules(*modules); end", "def included_in_modules\n modules = []\n ObjectSpace.each_object(Module) { |k| modules << k if k.included_modules.include?(self) }\n\n modules.reverse.inject([]) do |unique_modules, klass|\n unique_modules << klass unless unique_modules.collect { |k| k.to_s }.include?(klass.to_s)\n unique_modules\n end\n end", "def hide_singletons(*stuff)\n hide_methods(singleton_class, [Class], *stuff)\n end", "def all_classes_and_modules\n result = []\n ObjectSpace.each_object(Module) { |m| result << m }\n result.sort_by {|m| m.name}\nend", "def all_hidden(&block)\n @all_hidden = true\n yield\n ensure\n @all_hidden = nil\n end", "def list_known_classes names = []\n classes = []\n\n stores.each do |store|\n classes << store.module_names\n end\n\n classes = classes.flatten.uniq.sort\n\n unless names.empty? then\n filter = Regexp.union names.map { |name| /^#{name}/ }\n\n classes = classes.grep filter\n end\n\n page do |io|\n if paging? or io.tty? then\n if names.empty? then\n io.puts \"Classes and Modules known to ri:\"\n else\n io.puts \"Classes and Modules starting with #{names.join ', '}:\"\n end\n io.puts\n end\n\n io.puts classes.join(\"\\n\")\n end\n end", "def list_known_classes\n end", "def remove_invisible min_visibility\n return if [:private, :nodoc].include? min_visibility\n remove_invisible_in @method_list, min_visibility\n remove_invisible_in @attributes, min_visibility\n remove_invisible_in @constants, min_visibility\n end", "def list_known_classes(classes)\n raise NotImplementedError\n end", "def list_known_classes names = []\n classes = []\n stores.each do |store|\n classes << store.modules\n end\n classes = classes.flatten.uniq.sort\n unless names.empty? then\n filter = Regexp.union names.map { |name| /^#{name}/ }\n classes = classes.grep filter\n end\n puts classes.join(\"\\n\")\n end", "def complete min_visibility\n fix_basic_object_inheritance\n\n # cache included modules before they are removed from the documentation\n all_classes_and_modules.each { |cm| cm.ancestors }\n\n unless min_visibility == :nodoc then\n remove_nodoc @classes_hash\n remove_nodoc @modules_hash\n end\n\n @unique_classes = find_unique @classes_hash\n @unique_modules = find_unique @modules_hash\n\n unique_classes_and_modules.each do |cm|\n cm.complete min_visibility\n end\n\n @files_hash.each_key do |file_name|\n tl = @files_hash[file_name]\n\n unless tl.text? then\n tl.modules_hash.clear\n tl.classes_hash.clear\n\n tl.classes_or_modules.each do |cm|\n name = cm.full_name\n if cm.type == 'class' then\n tl.classes_hash[name] = cm if @classes_hash[name]\n else\n tl.modules_hash[name] = cm if @modules_hash[name]\n end\n end\n end\n end\n end", "def ignored_classes\n Set.new [\n # 'Vedeu::API',\n # 'Vedeu::Application',\n # 'Vedeu::Background',\n # 'Vedeu::Buffers',\n # 'Vedeu::Clear',\n 'Vedeu::Coercions',\n 'Vedeu::Colour',\n 'Vedeu::ColourTranslator',\n 'Vedeu::Common',\n # 'Vedeu::Composition',\n # 'Vedeu::Compositor',\n 'Vedeu::Configuration',\n # 'Vedeu::Cursor',\n # 'Vedeu::Cursors',\n 'Vedeu::Esc',\n 'Vedeu::Event',\n # 'Vedeu::Events',\n # 'Vedeu::Focus',\n # 'Vedeu::Foreground',\n 'Vedeu::Geometry',\n # 'Vedeu::Grid',\n # 'Vedeu::Groups',\n # 'Vedeu::Input',\n # 'Vedeu::Interface',\n # 'Vedeu::Interfaces',\n # 'Vedeu::Keymap',\n # 'Vedeu::Keymaps',\n # 'Vedeu::KeymapValidator',\n # 'Vedeu::Launcher',\n # 'Vedeu::Line',\n 'Vedeu::Log',\n # 'Vedeu::Menu',\n # 'Vedeu::Menus',\n 'Vedeu::MonoLogger',\n # 'Vedeu::Offset',\n # 'Vedeu::Offsets',\n 'Vedeu::Position',\n 'Vedeu::Presentation',\n # 'Vedeu::Refresh',\n # 'Vedeu::Registrar',\n # 'Vedeu::Render',\n 'Vedeu::Repository',\n 'Vedeu::Stream',\n 'Vedeu::Style',\n 'Vedeu::Terminal',\n 'Vedeu::Trace',\n # 'Vedeu::View',\n # 'Vedeu::Viewport',\n ]\n end", "def include_modules\n @mods = Devium::PageBuilder.new(page, details).parse_page_modules\n mods.map do |mod|\n extend Object.const_get(\"#{self.class}::#{mod}\")\n end\n end", "def instance_modules(*mods)\n @instance_modules ||= [ \n Command::InstanceMethods, \n Command::Options::InstanceMethods\n ]\n @instance_modules += mods\n end", "def add_all_commands(mode = :verbose )\n \tbegin\n \t @exclude ||= []\t\n \t all = private_methods(false) - %w(initialize method_missing) - @exclude\n \t add_commands all, mode\n \trescue Exception => e\n\t puts_ex e\n\t end \n \tend", "def mark_includes_modules\n check_definition_state\n @includes_modules = true\n self\n end", "def display_klasses(with_modules=false, show_internal=false, klasses=Thor::Base.subclasses)\n klasses -= [Thor, Main, ::Apipie::Client::CliCommand, ::Thor] unless show_internal\n\n show_modules if with_modules && !thor_yaml.empty?\n\n list = Hash.new { |h, k| h[k] = [] }\n groups = []\n\n # Get classes which inherit from Thor\n (klasses - groups).each { |k| list[k.namespace.split(\":\").first] += k.printable_tasks(false) }\n\n # Get classes which inherit from Thor::Base\n groups.map! { |k| k.printable_tasks(false).first }\n list[\"root\"] = groups\n\n # Order namespaces with default coming first\n list = list.sort { |a, b| a[0].sub(/^default/, '') <=> b[0].sub(/^default/, '') }\n list.each { |n, tasks| display_tasks(n, tasks) unless tasks.empty? }\n end", "def hide_methods(mod, except_defaults, *stuff)\n options = stuff.last.is_a?(Hash) ? stuff.pop : {}\n include_ancestors = options.fetch(:ancestors){false}\n except = Array(options.fetch(:except){except_defaults})\n protect = Array(options[:protect])\n except_methods = collect_methods(true, *except)\n protect_methods = collect_methods(true, *protect)\n methods_to_hide = collect_methods(include_ancestors, *stuff)\n (methods_to_hide - except_methods).each do |method_name|\n mod.module_eval do\n next unless method_defined?(method_name)\n if protect_methods.include?(method_name)\n protected method_name\n else\n private method_name\n end\n end\n end\n end", "def all_modules_implementing(selector)\n self.all_modules.select { |mod| mod.includes_selector?(selector) }\n end", "def hide!\n singleton_class.send(:define_method, :nodoc) { true }\n end", "def include(*modules)\n modules.reverse_each do |mod|\n if !mod.kind_of?(Module) or mod.kind_of?(Class)\n raise TypeError, \"wrong argument type #{mod.class} (expected Module)\"\n end\n\n Rubinius.privately do\n mod.append_features self\n end\n\n Rubinius.privately do\n mod.included self\n end\n end\n self\n end", "def include(*modules)\n modules.reverse_each do |mod|\n if !mod.kind_of?(Module) or mod.kind_of?(Class)\n raise TypeError, \"wrong argument type #{mod.class} (expected Module)\"\n end\n\n Rubinius.privately do\n mod.append_features self\n end\n\n Rubinius.privately do\n mod.included self\n end\n end\n self\n end", "def record_class_definitions\n extant, novel = [], []\n ObjectSpace.each_object(Class) { |k| extant << k }\n yield\n ObjectSpace.each_object(Class) { |k| novel << k if !extant.include?(k) }\n novel\n end", "def prepended_modules; end", "def included_modules() end", "def add_module_by_normal_module(mod)\n add_class_or_module mod, @modules, @store.modules_hash\n end", "def unique_classes_and_modules\n @unique_classes + @unique_modules\n end", "def uses(*classes)\n class_helpers.push(*classes).uniq!\n end", "def all_methods\n self.all_classes_and_modules.map do |klassmod|\n klassmod.own_methods.as_array\n end.flatten\n end", "def hide(*stuff)\n hide_methods(self, [Object], *stuff)\n end", "def display_klasses(with_modules = false, show_internal = false, klasses = Foreman::Thor::Base.subclasses)\n klasses -= [Foreman::Thor, Foreman::Thor::Runner, Foreman::Thor::Group] unless show_internal\n\n raise Error, \"No Foreman::Thor commands available\" if klasses.empty?\n show_modules if with_modules && !thor_yaml.empty?\n\n list = Hash.new { |h, k| h[k] = [] }\n groups = klasses.select { |k| k.ancestors.include?(Foreman::Thor::Group) }\n\n # Get classes which inherit from Foreman::Thor\n (klasses - groups).each { |k| list[k.namespace.split(\":\").first] += k.printable_commands(false) }\n\n # Get classes which inherit from Foreman::Thor::Base\n groups.map! { |k| k.printable_commands(false).first }\n list[\"root\"] = groups\n\n # Order namespaces with default coming first\n list = list.sort { |a, b| a[0].sub(/^default/, \"\") <=> b[0].sub(/^default/, \"\") }\n list.each { |n, commands| display_commands(n, commands) unless commands.empty? }\n end", "def modules; end", "def modules; end", "def modules; end", "def all_c_methods\n self.all_classes_and_modules.flat_map do |klassmod|\n klassmod.select_c_methods\n end\n end", "def include_hidden!\n @flags |= File::FNM_DOTMATCH\n end", "def hidden_class\n model.is_hidden? ? [\"kono_utils-hidden\"] : []\n end", "def each_classmodule(&block) # :yields: module\n classes_and_modules.sort.each(&block)\n end", "def each_classmodule(&block) # :yields: module\n classes_and_modules.sort.each(&block)\n end", "def modules\n @modules = @modules.call if @modules.is_a?(Proc)\n @modules\n end", "def add(klass)\n @known_classes << klass\n end", "def load_modules\n @log.info \"Loading modules...\"\n\n modules.each do |mod|\n require mod[:file]\n klass = eval(mod[:class_name])\n\n self.on_command klass.main_command do |command, from|\n self.send_message from, klass.exec_command(command)\n end\n end\n end", "def modules_for_helpers(args)\n args += all_application_helpers if args.delete(:all)\n super(args)\n end", "def prune_dependencies\n class_names = @classes.map {|klass| klass.name}\n @classes.each do |klass|\n klass.dependencies = klass.dependencies.uniq.keep_if {|dep| class_names.include?(dep)}\n end\n end", "def lookup_classes(met)\n new_klasses = @module_collection.keys\n res = Hash.new { |h, k| h[k] = [] }\n @method_collection.list.select { |method_info| method_info.name == met.to_sym }.map do |method_info|\n m_owner = method_info.owner\n # add method_info myself to res\n res[m_owner] << MethodStat.new(method_info, ModuleInfo.root(m_owner), 0)\n\n owner_module_info = find_module_info(method_info)\n new_klasses.each do |klass|\n next if klass == method_info.owner\n if (idx = klass.ancestors.index(method_info.owner))\n raise ModuleNotFoundError if owner_module_info.nil?\n res[klass] << MethodStat.new(method_info, owner_module_info, idx - self.class.offset(klass))\n end\n end\n end\n res\n end", "def remove_nodoc_children\n prefix = self.full_name + '::'\n\n modules_hash.each_key do |name|\n full_name = prefix + name\n modules_hash.delete name unless @store.modules_hash[full_name]\n end\n\n classes_hash.each_key do |name|\n full_name = prefix + name\n classes_hash.delete name unless @store.classes_hash[full_name]\n end\n end", "def modules\n yield self\n end", "def expose(*meths)\n @exposed ||= []\n meths.each do |meth|\n @exposed << meth unless @exposed.include?(meth)\n end\n @exposed\n end", "def extended_modules; end", "def included_methods\n included_modules.map(&:instance_methods).flatten\n end", "def included_modules; end", "def add_module(class_type, name)\n return @classes[name] if @classes.key? name\n\n add_class_or_module @modules, class_type, name, nil\n end", "def force_load_set\n\t\teach_module { |name, mod|\n\t\t}\n\tend", "def method_missing(method, *args)\n add_module(method, *args)\n end", "def list_modules\n pal.list_modules\n end", "def included_modules\n end", "def including_modules\n if block_given?\n ::Module.instances do |m|\n yield m if m.included_modules.include? self\n end\n else\n enum_for :including_modules\n end\n end", "def exclude\n all - methods\n end", "def include_into(*klasses)\n klasses.flatten!\n klasses.each do |klass|\n (@@class_mixins[klass] ||= []) << self\n @@class_mixins[klass].uniq!\n end\n end", "def attach_outer_class_methods!(inherited = false)\n outer_module = Object.const_get(outer_module_name(name))\n outer_module.methods(inherited).each do |m|\n attach_function m\n end\n end", "def force_load_set\n each_module { |name, mod| }\n end", "def reset!\n @modules = []\n end", "def extend(mod)\n @modules << mod\n super(mod)\n end", "def modules\n @modules ||= Array.new\n @modules\nend", "def disgems\n Recommendable.config.ratable_classes.map { |klass| disgemd_for(klass) }.flatten\n end", "def add_classes(*args)\n args.each {|x| self.add_class(x) }\n return self\n end", "def all_rb_methods\n self.all_classes_and_modules.flat_map do |klassmod|\n klassmod.select_rb_methods\n end\n end", "def remove_classes(*args)\n args.each {|x| self.remove_class(x) }\n return self\n end", "def hidden?\n classes.include?('hidden')\n end", "def add_class(new_class)\n @classes << new_class\n end", "def service_modules\n modules = []\n ObjectSpace.each_object(Module) do |clazz|\n if clazz < Thrift::Client\n modules << qualified_const(clazz.name.split('::')[0..-2].join('::')) # i miss activesupport...\n end\n end\n modules.delete(Thrift)\n modules\n end", "def get_class_or_module container, ignore_constants = false\n skip_tkspace\n name_t = get_tk\n given_name = ''.dup\n\n # class ::A -> A is in the top level\n if :on_op == name_t[:kind] and '::' == name_t[:text] then # bug\n name_t = get_tk\n container = @top_level\n given_name << '::'\n end\n\n skip_tkspace_without_nl\n given_name << name_t[:text]\n\n is_self = name_t[:kind] == :on_op && name_t[:text] == '<<'\n new_modules = []\n while !is_self && (tk = peek_tk) and :on_op == tk[:kind] and '::' == tk[:text] do\n prev_container = container\n container = container.find_module_named name_t[:text]\n container ||=\n if ignore_constants then\n c = RDoc::NormalModule.new name_t[:text]\n c.store = @store\n new_modules << [prev_container, c]\n c\n else\n c = prev_container.add_module RDoc::NormalModule, name_t[:text]\n c.ignore unless prev_container.document_children\n @top_level.add_to_classes_or_modules c\n c\n end\n\n record_location container\n\n get_tk\n skip_tkspace\n if :on_lparen == peek_tk[:kind] # ProcObjectInConstant::()\n parse_method_or_yield_parameters\n break\n end\n name_t = get_tk\n unless :on_const == name_t[:kind] || :on_ident == name_t[:kind]\n raise RDoc::Error, \"Invalid class or module definition: #{given_name}\"\n end\n if prev_container == container and !ignore_constants\n given_name = name_t[:text]\n else\n given_name << '::' + name_t[:text]\n end\n end\n\n skip_tkspace_without_nl\n\n return [container, name_t, given_name, new_modules]\n end", "def private_visibility!\n super\n set_discover_groups([], discover_groups)\n end", "def inspect_modules\n modules = []\n\n if constant.respond_to?(:ancestors)\n parent = inspect_superclass\n\n # Take all the modules included *directly* into the constant.\n modules = constant.ancestors.take_while do |ancestor|\n parent && ancestor != parent\n end\n\n # Get rid of non Module instances and modules that don't have a name.\n modules = modules.select do |mod|\n mod.instance_of?(Module) && mod.name\n end\n end\n\n return modules\n end", "def class_methods\n all_methods().find_all{|m| m.singleton && (@options.show_all || m.visibility == :public || m.visibility == :protected)}.collect{|m| method_hash(m)}\n end", "def get_all_unused_methods(access_control = nil)\n @methods.inject([]) do |unused_methods, (_class_name, methods)|\n unused_methods +=\n if access_control\n methods.select { |method| method.access_control == access_control && !method.used }\n else\n methods.reject(&:used)\n end\n end.reject { |method| method.access_control == 'public' && @possible_methods[method.method_name] }\n end", "def carries_rygsaek(*modules)\n options = modules.extract_options!.dup\n\n selected_modules = modules.map(&:to_sym).uniq.sort_by do |s|\n Rygsaek::ALL.index(s) || -1 # follow Rygsaek::ALL order\n end\n\n rygsaek_modules_hook! do\n\n selected_modules.each do |m|\n mod = Rygsaek::Models.const_get(m.to_s.classify)\n\n if mod.const_defined?(\"ClassMethods\")\n class_mod = mod.const_get(\"ClassMethods\")\n extend class_mod\n\n if class_mod.respond_to?(:available_configs)\n available_configs = class_mod.available_configs\n available_configs.each do |config|\n next unless options.key?(config)\n send(:\"#{config}=\", options.delete(config))\n end\n end\n end\n\n include mod\n end\n\n self.rygsaek_modules |= selected_modules\n options.each { |key, value| send(:\"#{key}=\", value) }\n end\n end", "def add(classes)\n classes = classes.to_s.split(' ')\n classes.each { |cls| super cls }\n self\n end", "def modules(&block)\r\n if block_given?\r\n Ragweed::Wrap32::list_modules(@pid, &block)\r\n else\r\n ret = []\r\n Ragweed::Wrap32::list_modules(@pid) {|x| ret << x}\r\n return ret\r\n end\r\n end", "def do_includes\n @body.scan(/rb_include_module\\s*\\(\\s*(\\w+?),\\s*(\\w+?)\\s*\\)/) do |c,m|\n if cls = @classes[c]\n m = @known_classes[m] || m\n cls.add_include(Include.new(m, \"\"))\n end\n end\n end", "def with_actions(mod, exclude: [])\n\t\t\t\tModule.new do\n\t\t\t\t\t@mod = mod\n\t\t\t\t\t@exclude = exclude\n\n\t\t\t\t\tdef self.included(ctrl)\n\t\t\t\t\t\tctrl.include @mod\n\n\t\t\t\t\t\t(@mod.public_instance_methods - @exclude).each do |meth|\n\t\t\t\t\t\t\tctrl.send :define_method, meth, @mod.public_instance_method(meth)\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend", "def define_missing_methods(klass, hash)\n methods = find_methods(hash)\n klass.include(Module.new do\n instance_exec do\n methods.each do |method|\n define_method method do\n if (object = instance_variable_get('@object'))\n object.public_send(method)\n end\n end\n end\n end\n end)\n end", "def hiddens\n @hiddens ||= fields.select { |f| f.class == Hidden }\n end", "def initialize_classes_and_modules\n @classes = {}\n @modules = {}\n end", "def create_class_tree(classtree = ClassTreeNode.new(Kernel),\n ignore = [ClassTreeNode, ObjectDescriber, ObjectBrowser, ObjectBrowser::UI, ObjectBrowser::UI::DescriptionFactory])\n ObjectSpace.each_object do | x |\n classnode = classtree\n x.class.ancestors.reverse[1..-1].inject(classtree){ | classnode, klass | classnode.add_class(klass) }.add_object(x)\n end \n classtree\n end", "def class_list\n array = []\n ObjectSpace.each_object(Class) {|m| array << m }\n array\n end", "def load_classes class_path_list\n class_path_list.each do |path|\n add_class path\n end\n end", "def filter(options = {})\n @options = options\n @exclude = @options[:exclude] || []\n @include = @options[:include] || []\n @picked =\n @ancestors -\n (@exclude - included_ancestors) -\n (modules_under_excluded_classes - included_ancestors)\n end" ]
[ "0.66178924", "0.6531296", "0.6465021", "0.6325448", "0.5983741", "0.5918271", "0.5918271", "0.5878157", "0.5870297", "0.5831609", "0.5808009", "0.5750044", "0.5750044", "0.57357603", "0.568527", "0.5675122", "0.56576324", "0.5629552", "0.55808187", "0.5576997", "0.554552", "0.5542215", "0.5536025", "0.5424605", "0.5398248", "0.5390446", "0.53813636", "0.53740585", "0.5372259", "0.5362454", "0.5337904", "0.5331651", "0.5322181", "0.5322181", "0.53135407", "0.53101254", "0.5276516", "0.5267371", "0.52638614", "0.5262475", "0.5256019", "0.5253924", "0.52078134", "0.52073604", "0.52073604", "0.52073604", "0.520734", "0.5198741", "0.5187771", "0.51710796", "0.51710796", "0.515892", "0.515233", "0.5146559", "0.5137849", "0.51241904", "0.51177144", "0.5099903", "0.5066131", "0.5061444", "0.50508493", "0.5026596", "0.50156236", "0.50001496", "0.49988702", "0.49977222", "0.4988488", "0.49734244", "0.49668536", "0.49609795", "0.49509108", "0.4936193", "0.49289235", "0.49274775", "0.49253818", "0.49222752", "0.49198854", "0.49146697", "0.49144003", "0.4909892", "0.49073398", "0.49032804", "0.49015486", "0.49013942", "0.48936895", "0.48893952", "0.48823643", "0.48653352", "0.48569608", "0.48531002", "0.48521516", "0.4851873", "0.4850581", "0.48498783", "0.4818124", "0.48152632", "0.48073974", "0.4806586", "0.48050058", "0.48006618" ]
0.7061887
0
Hides all specified classes/modules and their subclasses. E.g. useful to hide the +SWIG+ classes for FXRuby.
def hide_tree(*classOrModules) classOrModules.each do |classOrModule| @ignored_tree[classOrModule] = true end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def hide(*classOrModules)\n\t\tclassOrModules.each do |classOrModule|\n\t\t\t@ignored[classOrModule] = true\n\t\tend\n\tend", "def hide(*stuff)\n hide_methods(self, [Object], *stuff)\n end", "def hide_singletons(*stuff)\n hide_methods(singleton_class, [Class], *stuff)\n end", "def hide_methods(mod, except_defaults, *stuff)\n options = stuff.last.is_a?(Hash) ? stuff.pop : {}\n include_ancestors = options.fetch(:ancestors){false}\n except = Array(options.fetch(:except){except_defaults})\n protect = Array(options[:protect])\n except_methods = collect_methods(true, *except)\n protect_methods = collect_methods(true, *protect)\n methods_to_hide = collect_methods(include_ancestors, *stuff)\n (methods_to_hide - except_methods).each do |method_name|\n mod.module_eval do\n next unless method_defined?(method_name)\n if protect_methods.include?(method_name)\n protected method_name\n else\n private method_name\n end\n end\n end\n end", "def hide_current_state\n\t\t@classes = Array.new\n\t\tObjectSpace.each_object(Class) do |klass|\n\t\t\[email protected] klass\n\t\tend\n\t\t@modules = Hash.new\n\t\tObjectSpace.each_object(Module) do |mod|\n\t\t\t@modules[mod] = true\n\t\tend\n\tend", "def remove_invisible min_visibility\n return if [:private, :nodoc].include? min_visibility\n remove_invisible_in @method_list, min_visibility\n remove_invisible_in @attributes, min_visibility\n remove_invisible_in @constants, min_visibility\n end", "def hide; end", "def hide; end", "def hide!\n singleton_class.send(:define_method, :nodoc) { true }\n end", "def slideClasses\n blacklist = ['bigtext']\n @classes.reject { |klass| blacklist.include? klass }\n end", "def unhide app\n app.perform :unhide\n end", "def hide app\n app.perform :hide\n end", "def without_modules(*modules); end", "def without_modules(*modules); end", "def hideElements \n @hideElements.each do |element|\n element.hide\n end\n end", "def remove_nodoc_children\n prefix = self.full_name + '::'\n\n modules_hash.each_key do |name|\n full_name = prefix + name\n modules_hash.delete name unless @store.modules_hash[full_name]\n end\n\n classes_hash.each_key do |name|\n full_name = prefix + name\n classes_hash.delete name unless @store.classes_hash[full_name]\n end\n end", "def remove_classes(*args)\n args.each {|x| self.remove_class(x) }\n return self\n end", "def remove_classes_and_modules\n initialize_classes_and_modules\n end", "def hide()\n @view__.hide\n end", "def exclude\n all - methods\n end", "def remove_subclasses\n\t\tself.subclasses.each do |klass|\n\t\t\tfront = klass.name\n\t\t\tif /::/.match(front)\n\t\t\t\tfront,back = parts(klass.name)\n\t\t\t\tfront_class = front.split('::').inject(Object) { |o,n| o.const_get n }\n\t\t\t\tfront_class.__send__(:remove_const, back)\n\t\t\telse\n\t\t\t\tObject.__send__(:remove_const, front)\n\t\t\tend\n\t\tend\n\t\tnil\n\tend", "def trim_classes\n deletions = 1\n while deletions > 0 do\n deletions = 0\n @classes.each do |cls, value|\n next unless value.fetch(:sub_classes, {}).empty? && !value.has_key?(:examples)\n deletions += 1\n @classes.delete(cls)\n sc = value[:super_class]\n next unless sc\n puts \"trim class #{cls}, super-class #{sc}\"\n @classes[sc][:sub_classes].delete(cls) if @classes.fetch(sc, {})[:sub_classes]\n end\n end\n end", "def hideObject _obj, _args\n \"_obj hideObject _args;\" \n end", "def ignored_classes\n Set.new [\n # 'Vedeu::API',\n # 'Vedeu::Application',\n # 'Vedeu::Background',\n # 'Vedeu::Buffers',\n # 'Vedeu::Clear',\n 'Vedeu::Coercions',\n 'Vedeu::Colour',\n 'Vedeu::ColourTranslator',\n 'Vedeu::Common',\n # 'Vedeu::Composition',\n # 'Vedeu::Compositor',\n 'Vedeu::Configuration',\n # 'Vedeu::Cursor',\n # 'Vedeu::Cursors',\n 'Vedeu::Esc',\n 'Vedeu::Event',\n # 'Vedeu::Events',\n # 'Vedeu::Focus',\n # 'Vedeu::Foreground',\n 'Vedeu::Geometry',\n # 'Vedeu::Grid',\n # 'Vedeu::Groups',\n # 'Vedeu::Input',\n # 'Vedeu::Interface',\n # 'Vedeu::Interfaces',\n # 'Vedeu::Keymap',\n # 'Vedeu::Keymaps',\n # 'Vedeu::KeymapValidator',\n # 'Vedeu::Launcher',\n # 'Vedeu::Line',\n 'Vedeu::Log',\n # 'Vedeu::Menu',\n # 'Vedeu::Menus',\n 'Vedeu::MonoLogger',\n # 'Vedeu::Offset',\n # 'Vedeu::Offsets',\n 'Vedeu::Position',\n 'Vedeu::Presentation',\n # 'Vedeu::Refresh',\n # 'Vedeu::Registrar',\n # 'Vedeu::Render',\n 'Vedeu::Repository',\n 'Vedeu::Stream',\n 'Vedeu::Style',\n 'Vedeu::Terminal',\n 'Vedeu::Trace',\n # 'Vedeu::View',\n # 'Vedeu::Viewport',\n ]\n end", "def remove_invisible_in array, min_visibility # :nodoc:\n if min_visibility == :public then\n array.reject! { |e|\n e.visibility != :public and not e.force_documentation\n }\n else\n array.reject! { |e|\n e.visibility == :private and not e.force_documentation\n }\n end\n end", "def hide_elements\n true\n end", "def hide_action(*names)\n write_inheritable_attribute(:hidden_actions, hidden_actions | names.collect { |n| n.to_s })\n end", "def complete min_visibility\n fix_basic_object_inheritance\n\n # cache included modules before they are removed from the documentation\n all_classes_and_modules.each { |cm| cm.ancestors }\n\n unless min_visibility == :nodoc then\n remove_nodoc @classes_hash\n remove_nodoc @modules_hash\n end\n\n @unique_classes = find_unique @classes_hash\n @unique_modules = find_unique @modules_hash\n\n unique_classes_and_modules.each do |cm|\n cm.complete min_visibility\n end\n\n @files_hash.each_key do |file_name|\n tl = @files_hash[file_name]\n\n unless tl.text? then\n tl.modules_hash.clear\n tl.classes_hash.clear\n\n tl.classes_or_modules.each do |cm|\n name = cm.full_name\n if cm.type == 'class' then\n tl.classes_hash[name] = cm if @classes_hash[name]\n else\n tl.modules_hash[name] = cm if @modules_hash[name]\n end\n end\n end\n end\n end", "def hide(symbols)\n RegLang.new(fa.hide(symbols))\n end", "def all_modules klass, trash = []\n trash = [*trash]\n trash += [klass]\n children = shallow(klass, trash)\n all = [klass, children.\n map { |x| all_modules x, trash + children }].\n flatten.select { |x| Module === x }\n all\n end", "def remove(all = false)\n\t\tremove_subclasses if all\n\t\tObject.__send__(:remove_const, self.name)\n\t\tnil\n\tend", "def hide(*values)\n values.inject(self) { |res, val| res._hide(val) or fail ArgumentError, \"Unknown value for hide: #{val}\" }\n end", "def hide_windows\n end", "def hide_frame_state(graph)\n graph.nodes.each_value do |node|\n if FRAME_STATE_NODES.include?(node.props.dig(:node_class, :node_class))\n node.props[:hidden] = true\n end\n end\n end", "def hide_unless(condition)\n hide(:unless => condition)\n end", "def getHideFrom _obj, _args\n \"_obj getHideFrom _args;\" \n end", "def hide!\n self[:hidden] = true\n self\n end", "def remove_class(names = T.unsafe(nil)); end", "def exclude_common_methods!\r\n ExcludeCommonMethods.apply!(self)\r\n end", "def remove_class(*names)\n classes = class_names - names\n\n if classes.empty?\n `#@native.removeAttribute('class')`\n else\n `#@native.className = #{classes.join ' '}`\n end\n\n self\n end", "def hidden_class\n model.is_hidden? ? [\"kono_utils-hidden\"] : []\n end", "def display_klasses(with_modules=false, show_internal=false, klasses=Thor::Base.subclasses)\n klasses -= [Thor, Main, ::Apipie::Client::CliCommand, ::Thor] unless show_internal\n\n show_modules if with_modules && !thor_yaml.empty?\n\n list = Hash.new { |h, k| h[k] = [] }\n groups = []\n\n # Get classes which inherit from Thor\n (klasses - groups).each { |k| list[k.namespace.split(\":\").first] += k.printable_tasks(false) }\n\n # Get classes which inherit from Thor::Base\n groups.map! { |k| k.printable_tasks(false).first }\n list[\"root\"] = groups\n\n # Order namespaces with default coming first\n list = list.sort { |a, b| a[0].sub(/^default/, '') <=> b[0].sub(/^default/, '') }\n list.each { |n, tasks| display_tasks(n, tasks) unless tasks.empty? }\n end", "def hide_action(*names)\n self._hidden_actions = self._hidden_actions | names.map { |n| n.to_s }\n end", "def prune_dependencies\n class_names = @classes.map {|klass| klass.name}\n @classes.each do |klass|\n klass.dependencies = klass.dependencies.uniq.keep_if {|dep| class_names.include?(dep)}\n end\n end", "def hide_secret\n @is_showing_secret = false\n # TODO secret should be using a JPanel or something, not a dialog.\n end", "def remove_constant_hierarchy(*names)\n names.each do |name|\n pieces = name.split(\"::\")\n pieces.size.downto(1).each do |size|\n to_remove = pieces.pop\n const_name = pieces.join(\"::\")\n parent = Object.class_eval(const_name) || Object\n parent.send(:remove_const, to_remove)\n end\n end\nend", "def remove_class(names = nil)\n kwattr_remove(\"class\", names)\n end", "def hide!\n visible(false)\n end", "def hide!\n visible(false)\n end", "def unmodulize_modules(*mod_consts)\n mod_consts.each { |mod_const| unmodulize *(mod_const.instance_methods) }\n end", "def boring_classes\n return [::Class, *::Class.included_modules,\n ::Module, *::Module.included_modules,\n ::Kernel, *::Kernel.included_modules,\n ::Object, *::Object.included_modules,\n ::BasicObject, *::BasicObject.included_modules].uniq\n end", "def not_sandboxed_methods(include_superclasses = false, allowed_mixins=[], *disallowed_methods)\n\n __the_methods_to_check = public_instance_methods(false)\n puts \"#{self.name}: direct: #{__the_methods_to_check.inspect}\" if $DEBUG\n if include_superclasses\n clz = self.superclass\n while !clz.nil?\n unless clz == Object || (defined? BasicObject && clz == BasicObject)\n puts \"#{self.name}: #{clz.name}: #{clz.public_instance_methods(false).inspect}\" if $DEBUG\n __the_methods_to_check += clz.public_instance_methods(false)\n end\n clz = clz.superclass\n end\n \n if allowed_mixins.length > 0\n #we include any mixins\n for m in self.included_modules\n if allowed_mixins.include?(m)\n puts \"#{self.name}: #{m.name}: #{m.public_instance_methods(false).inspect}\" if $DEBUG\n __the_methods_to_check += m.public_instance_methods(false)\n end\n end\n end\n end\n \n __the_methods_to_check << \"nil?\".intern\n \n __the_methods_to_check.uniq!\n \n unless disallowed_methods.nil? || disallowed_methods.length == 0\n not_bang = false\n if disallowed_methods.include?(:bang_methods) #just remove all xxx! methods that modify in place\n __the_methods_to_check.reject! { |meth| meth.to_s[-1, 1] == \"!\"}\n not_bang = true\n end\n unless not_bang || disallowed_methods.length > 1\n __the_methods_to_check.reject! { |meth| disallowed_methods.include?(meth)}\n end\n end\n \n puts \"#{self.name}: #{__the_methods_to_check.inspect}\" if $DEBUG\n \n sandboxed_methods(*__the_methods_to_check)\n \n \n \n end", "def all_hidden(&block)\n @all_hidden = true\n yield\n ensure\n @all_hidden = nil\n end", "def hide(&block)\n visible block ? proc { false == (instance_eval &block) } : false\n end", "def hide\n super\n\n Vedeu.buffers.by_name(name).hide\n end", "def hide_columns_for_model(klass, klass_ui, controller)\n return klass_ui.all_columns.reject {|c| current_show_columns(klass, klass_ui, controller).include?(c.name)}\n end", "def remove_unwanted_views\n blacklight_config.view.delete(:gallery)\n blacklight_config.view.delete(:masonry)\n blacklight_config.view.delete(:slideshow)\n end", "def hide\n ShowWindow.call(@handle, SW_HIDE)\n end", "def hideObjectGlobal _args\n \"hideObjectGlobal _args;\" \n end", "def hide_floating(graph)\n graph.nodes.each_value do |node|\n if node.edges.none? { |e| e.props[:kind] == 'control' }\n node.props[:hidden] = true\n end\n end\n end", "def hide\n @hidden = 1\n\n # A hidden worksheet shouldn't be active or selected.\n @selected = 0\n set_activesheet(0)\n set_firstsheet(0)\n end", "def hide_active_record\n Object.const_set(:ActiveRecordHidden, ::ActiveRecord)\n Object.send(:remove_const, :ActiveRecord)\nend", "def remove_class(sym)\n `var el=this.__native__,klass=sym.__value__`\n `el.className=el.className.replace(new(RegExp)('(^|\\\\\\\\s)'+klass+'(?:\\\\\\\\s|$)'),'$1')`\n return self\n end", "def hidden?\n classes.include?('hidden')\n end", "def get_sorted_module_list classes\n classes.select do |klass|\n klass.display?\n end.sort\n end", "def hide_docs(command)\n def command.nodoc; true end\n end", "def hide_loader\n end", "def without_class?(name)\n a = []\n \n each do |e|\n if !e.get(\"className\").split(\" \").index(name)\n a << e\n end\n end\n \n JS::Collection.new(a)\n end", "def hide\n Control.functions[__method__] ||= AU3_Function.new(\"ControlHide\", 'SSS', 'L')\n res = Control.functions[__method__].call(@title.wide, @text.wide, @c_id.wide)\n raise_unfound if res == 0\n nil\n end", "def hide_if(condition)\n hide(:if => condition)\n end", "def hideBody _args\n \"hideBody _args;\" \n end", "def display_klasses(with_modules = false, show_internal = false, klasses = Foreman::Thor::Base.subclasses)\n klasses -= [Foreman::Thor, Foreman::Thor::Runner, Foreman::Thor::Group] unless show_internal\n\n raise Error, \"No Foreman::Thor commands available\" if klasses.empty?\n show_modules if with_modules && !thor_yaml.empty?\n\n list = Hash.new { |h, k| h[k] = [] }\n groups = klasses.select { |k| k.ancestors.include?(Foreman::Thor::Group) }\n\n # Get classes which inherit from Foreman::Thor\n (klasses - groups).each { |k| list[k.namespace.split(\":\").first] += k.printable_commands(false) }\n\n # Get classes which inherit from Foreman::Thor::Base\n groups.map! { |k| k.printable_commands(false).first }\n list[\"root\"] = groups\n\n # Order namespaces with default coming first\n list = list.sort { |a, b| a[0].sub(/^default/, \"\") <=> b[0].sub(/^default/, \"\") }\n list.each { |n, commands| display_commands(n, commands) unless commands.empty? }\n end", "def non_aliases\n @non_aliases ||= classes_and_modules.reject { |cm| cm.is_alias_for }\n end", "def hideToolBars()\n @ie.AddressBar = true\n @ie.StatusBar = true\n @ie.ToolBar= false\n @ie.MenuBar = false\n end", "def remove_all_methods!\n instance_methods.each do |method_name|\n # Important -- we use Class#remove_method, not Class#undef_method, which does something that's different in\n # some important ways.\n remove_method(method_name) if @methods_defined[method_name.to_sym]\n end\n\n @class_methods_module.instance_methods.each do |method_name|\n @class_methods_module.send(:remove_method, method_name) if @class_methods_defined[method_name]\n end\n end", "def filter(options = {})\n @options = options\n @exclude = @options[:exclude] || []\n @include = @options[:include] || []\n @picked =\n @ancestors -\n (@exclude - included_ancestors) -\n (modules_under_excluded_classes - included_ancestors)\n end", "def remove_classes_from_top_layer(*class_list)\n @lines.reject! {|l| l.is_a?(GLMObject) && class_list.include?(l[:class])}\n end", "def hide_github_shell\n %w(header repo_menu repo_sub_menu repos footer triangle).each do |element|\n element = @document.getElementById(element)\n element.style.setProperty_value_priority(\"display\", \"none\", nil) if element\n end\n end", "def hide_parts(device)\n action_parts(device, '-d')\n end", "def ignore_exts\n @ext_rules.reject\n end", "def hide_all()\n\t#images\n\tfor i in 0..3\n\t\tfor j in 0..4\n\t\t\t@computerimages[i][j].path = \"images/54.png\"\n\t\tend\n\tend\n\t#ranks\n\tfor i in 0..3\n\t\t@computerranks[i].text = \"-----\"\n\tend\t\nend", "def include_hidden!\n @flags |= File::FNM_DOTMATCH\n end", "def hide_scrollbars\n set_overflow(OVERFLOW_HIDDEN)\n end", "def hide_unused_nodes(graph)\n loop do\n modified = false\n graph.nodes.each_value do |node|\n next unless node.outputs.all? { |edge| edge.to.props[:hidden] } &&\n node.inputs.none? { |edge| edge.props[:kind] == 'control' } &&\n node.inputs.none? { |edge| edge.props[:name] == 'anchor' }\n\n unless node.props[:hidden]\n node.props[:hidden] = true\n modified = true\n end\n end\n break unless modified\n end\n end", "def record_class_definitions\n extant, novel = [], []\n ObjectSpace.each_object(Class) { |k| extant << k }\n yield\n ObjectSpace.each_object(Class) { |k| novel << k if !extant.include?(k) }\n novel\n end", "def hide\n self.visible = false\n clear_dmg_preview\n end", "def list_known_classes names = []\n classes = []\n\n stores.each do |store|\n classes << store.module_names\n end\n\n classes = classes.flatten.uniq.sort\n\n unless names.empty? then\n filter = Regexp.union names.map { |name| /^#{name}/ }\n\n classes = classes.grep filter\n end\n\n page do |io|\n if paging? or io.tty? then\n if names.empty? then\n io.puts \"Classes and Modules known to ri:\"\n else\n io.puts \"Classes and Modules starting with #{names.join ', '}:\"\n end\n io.puts\n end\n\n io.puts classes.join(\"\\n\")\n end\n end", "def remove(klass)\n Array(klass).each do |k|\n if k.instance_of?(Module)\n @module_formatters.delete(k)\n else\n k = k.name if k.is_a?(Class)\n @class_formatters.delete(k)\n end\n end\n self\n end", "def hide\n @visible = false\n self\n end", "def hide\n @hidd = 1\n\n # A hidden worksheet shouldn't be active or selected.\n @selecte = 0\n @activesheet = 0\n @firstsheet = 0\n end", "def invisible(stream = $stdout)\n stream.print(hide)\n yield\n ensure\n stream.print(show)\n end", "def unboring_methods\n if [::Class,::Module].include? self\n # Only those instance methods that we have not by virtue of being an instance of ourself\n self.methods - (self.instance_methods - self.singleton_methods)\n elsif self.is_a? ::Class\n # Only those instance methods that we have not by virtue of being a Class, unless we have overridden them\n self.methods - (::Class.instance_methods - self.singleton_methods)\n else\n # Only those instance methods that we have not by virtue of being a Module, unless we have overridden them\n self.methods - (::Module.instance_methods - self.singleton_methods)\n end\n end", "def hidden_namespaces; end", "def restricted_types(*types)\n @_restricted_types ||= []\n types.each do |type|\n raise UnexpectedTypeException.new([Class],type.class) unless type.is_a? Class\n @_restricted_types << type unless @_restricted_types.include? type\n _subclasses.each do |subclass|\n subclass.restricted_types type\n end\n end\n @_restricted_types\n end", "def classes\n @_classes ||= vedeu_classes - vedeu_exceptions - ignored_classes\n end", "def make_hidden\n @linkage_vis = :hidden\n end", "def unhide\n post(\"/api/unhide\", id: fullname)\n end", "def exclude; end", "def hide_window\n end", "def class_unmock(*methods)\n return self.class.class_unmock(*methods) if !self.is_a?(Module)\n do_unmock(methods, :instance, self)\n end" ]
[ "0.80537355", "0.7160505", "0.69249374", "0.6764828", "0.66643965", "0.65248203", "0.62142605", "0.62142605", "0.62083936", "0.6201472", "0.60961556", "0.607149", "0.5970846", "0.5970846", "0.58691376", "0.58258575", "0.5780179", "0.5779363", "0.5761028", "0.57156724", "0.5696485", "0.56107646", "0.5517024", "0.551106", "0.5496217", "0.54425305", "0.544175", "0.5407206", "0.54068834", "0.5379122", "0.5376915", "0.53664076", "0.53444624", "0.53392327", "0.5325684", "0.5308352", "0.5293465", "0.52648723", "0.52534676", "0.5237701", "0.5235363", "0.5215737", "0.51981413", "0.51966125", "0.51781183", "0.5176563", "0.51723284", "0.5163091", "0.5163091", "0.5160395", "0.51393205", "0.5137053", "0.5130924", "0.51050776", "0.5103398", "0.5102006", "0.50952226", "0.508839", "0.5063559", "0.50532", "0.50513184", "0.50490594", "0.5022184", "0.5020489", "0.50197065", "0.5010752", "0.50087905", "0.500654", "0.50028276", "0.5002084", "0.4985326", "0.49834937", "0.49816862", "0.49816456", "0.49772027", "0.49746943", "0.4974255", "0.49574086", "0.4951566", "0.49416316", "0.49342644", "0.49318144", "0.49180508", "0.49170172", "0.49159825", "0.4904036", "0.48905572", "0.4887756", "0.4873019", "0.48663348", "0.4861749", "0.4858139", "0.48529658", "0.48434785", "0.4841628", "0.4840077", "0.48287222", "0.48210314", "0.48176983", "0.48097557" ]
0.710222
2
Generates the dot graph, and returns a string of the graph.
def generate(with_modules=true) str = %Q| digraph #{@graph_name} { #{"rankdir=LR;\n" if @left_to_right} #{"ordering=out;\n" if @sort_alphabetic} edge [color="#{@color_edge}",fontname="#{@font}",fontsize=#{font_size_edge}]; node [color="#{@color_hidden}",fontcolor="#{@color_hidden}",fontname="#{@font}",fontsize=#{font_size},shape=#{shape_class},height=#{@height},width=#{@width}]; | # get classes current = Array.new ObjectSpace.each_object(Class) do |klass| current.push klass end todo = current - @classes - @ignored.keys # remove all classes from ignore_tree todo.delete_if do |klass| klass = klass.superclass while klass && !@ignored_tree[klass] klass end todo = todo.sort_by { |klass| klass.to_s } todo.each do |klass| # all classes black str << %Q| "#{klass}" [height=#{@height},width=#{@width},color="#{@color}",fontcolor="#{@color}"];\n| end con = Hash.new # connections todo.each do |klass| while superclass = klass.superclass break if @ignored[superclass] con[ [superclass, klass] ] = true klass = superclass end end con.each_key do |superclass, klass| str << %Q{\t"#{superclass}" -> "#{klass}";\n} end str << "\n" gen_modules(str, todo) if with_modules str << "}" str end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dot\n RGL::DOT::Graph.new('elements' => dot_edges).to_s\n end", "def to_dot (params={}) to_dot_graph(params).to_s; end", "def to_dot (params={}) to_dot_graph(params).to_s; end", "def to_dot\n i = 0\n dot = [\"digraph G {\", \"#{i} [label=\\\"self\\\"];\"]\n _to_dot(dot, \"\", i, self, i)\n dot << \"}\"\n dot.join(\"\\n\")\n end", "def dependency_graph_dot\n # Importing this here as Chef may not have installed the Gem\n require 'graphviz'\n ::GraphViz.new(:G, type: :digraph) do |g|\n dependency_graph.each do |fragment, dependencies|\n g.add_nodes(fragment)\n\n dependencies.each do |dep|\n g.add_nodes(dep)\n g.add_edges(fragment, dep)\n end\n end\n end.output(dot: String)\n end", "def to_dot(*attrs, &block)\n d = \"#{'di' if @directed}graph G {\\n\"\n d << attrs.collect { |a| \" #{a};\\n\"}.join\n @nodes.each { |n| \n d << \" \\\"#{nid(n)}\\\" [#{nodeparams(n)}];\\n\"}\n @links.sort_by { |k,l| l.weight }.each { |k,l| d << l.to_dot(&block) }\n d << \"}\\n\"\n end", "def to_dot graph\n graph.node(self, :label => production)\n child_nodes.each do |child|\n graph.edge(self, child)\n child.to_dot(graph)\n end\n end", "def graph_to_s\n string = \"\"\n @nodes.each do |name, node|\n string +=\"#{name}:\\n\\t(#{node.name}, #{node.country}, #{node.continent} #{node.timezone}, #{node.coords}, #{node.pop}, #{node.region}) => #{@edges[name]} \\n\"\n end\n string\n end", "def to_dot\n template = <<-eos\ndigraph \"<%= uid || 'graph' %>\" {\n node[ style = \"filled, solid\"];\n<% each_task do |task| -%>\n<% next unless task.name =~ dot_task_filter if dot_task_filter -%>\n<% next unless task.node.name =~ dot_node_filter if dot_node_filter and task.node -%>\n \"<%= task %>\" [label = \"<%= task %>\", fillcolor = \"<%= task.color %>\"];\n<% end -%>\n\n<% each_task do |task| -%>\n<% task.each_forward_dependency do |forward_task| -%>\n<% next unless task.name =~ dot_task_filter if dot_task_filter -%>\n<% next unless task.node.name =~ dot_node_filter if dot_node_filter and task.node -%>\n<% next unless forward_task.name =~ dot_task_filter if dot_task_filter -%>\n<% next unless forward_task.node.name =~ dot_node_filter if dot_node_filter and forward_task.node -%>\n \"<%= task %>\" -> \"<%= forward_task %>\";\n<% end -%>\n<% end -%>\n}\n eos\n ERB.new(template, nil, '-').result(binding)\n end", "def to_dot_graph (params = {})\n params['name'] ||= self.class.name.gsub(/:/,'_')\n fontsize = params['fontsize'] ? params['fontsize'] : '8'\n graph = (directed? ? DOT::DOTDigraph : DOT::DOTSubgraph).new(params)\n edge_klass = directed? ? DOT::DOTDirectedEdge : DOT::DOTEdge\n vertices.each do |v|\n name = v.to_s\n params = {'name' => '\"'+name+'\"',\n 'fontsize' => fontsize,\n 'label' => name}\n v_label = v.to_s\n params.merge!(v_label) if v_label and v_label.kind_of? Hash\n graph << DOT::DOTNode.new(params)\n end\n edges.each do |e|\n params = {'from' => '\"'+ e.source.to_s + '\"',\n 'to' => '\"'+ e.target.to_s + '\"',\n 'fontsize' => fontsize }\n e_label = e.to_s\n params.merge!(e_label) if e_label and e_label.kind_of? Hash\n graph << edge_klass.new(params)\n end\n graph\n end", "def dot\n s = \"digraph query {\\n\"\n @operators.each { |op| \n desc = op.attributes.inspect.gsub(\"\\\"\", \"\\\\\\\"\")\n label = \"[#{op.index}] #{op.name}\"\n if op.performanceData\n duration = (op.performanceData[\"endTime\"] - op.performanceData[\"startTime\"]).to_f\n label += \" (#{duration} ms)\"\n end\n s += \" #{op.index} [label=\\\"#{label}\\n #{desc}\\\", shape=box];\\n\"\n op.dependencies.each { |dep|\n s += \" #{dep.index} -> #{op.index};\\n\"\n }\n } \n s += \"}\\n\"\n end", "def to_dot(opts={})\n\n s = [ \"digraph \\\"process wfid #{wfid}\\\" {\" ]\n @expressions.each { |e| s.push(*e.send(:to_dot, opts)) }\n @errors.each { |e| s.push(*e.send(:to_dot, opts)) }\n s << '}'\n\n s.join(\"\\n\")\n end", "def to_dot\n Dot::Digraph.new(:fontsize => 10, :shape => :box, :size => 5) do |g|\n parse_tree.to_dot(g)\n end.to_dot\n end", "def to_dot(include_starred_states = false)\n all_states = transitions.values.map(&:to_a).flatten.uniq.reject { |t| t == \"*\" }\n\n \"\".tap do |s|\n s << \"digraph #{self.class.name.inspect} {\\n\"\n transitions.each do |signal, signal_transitions|\n signal_transitions.each do |from, to|\n next if !include_starred_states && (from == \"*\" || to == \"*\")\n\n from = from == \"*\" ? all_states : [from]\n to = to == \"*\" ? all_states : [to]\n from.product(to).each { |f, t| s << \" #{f} -> #{t} [ label=\\\"#{signal}\\\" ]\\n\" }\n end\n end\n s << \"}\\n\"\n end\n end", "def dot(io)\n log \"drawing graph \"\n io << <<END\ndigraph test_definition {\ngraph [ rankdir=TB ]\nnode [shape=record,fontname=helvetica]\nedge [fontname=helvetica, arrowsize=0.5]\nEND\n # nodes,transitions = collect_nodes\n # transitions.to_a.flatten.to_set.each do |n|\n @nodes.each do |n|\n next if n.record # if we are a port, skip, record handles drawing\n if n.is_record? # if we are a record, call specialized function\n n.dot_code(io)\n else\n label_sym = (n.is_a?(JoinNode) ? :xlabel : :label)\n attrs={label_sym => '\"'+n.dot_label+'\"'}\n # if n.symbol\n # attrs[:xlabel]=n.symbol\n # end\n if n.respond_to? :dot_node_shape\n attrs[:shape]='\"'+n.dot_node_shape+'\"'\n end\n attr_string=attrs.map do |k,v|\n \"#{k.to_s}=#{v}\"\n end.join(\", \")\n io.puts \"#{n.node_name} [#{attr_string}]\"\n end\n end\n @control_edges.each do |s,d,label|\n log \"adding control edge: [#{s},#{d},#{label}]\"\n attrs={color:\"red\",fontcolor:\"red\"}\n attrs[:label] = '\"'+label.to_s+'\"' if label\n draw_transition(s,d,io,attrs)\n end\n @data_edges.each do |s,d|\n log \"adding data edge\"\n draw_transition(s,d,io,{color: \"green\"})\n end\n io.puts \"}\"\n self\n end", "def to_dot(graph)\n graph.node(self, :label => token)\n end", "def to_dot\n require 'graphviz_r'\n gvr = GraphvizR.new('mind_map')\n gvr.graph[:bgcolor => 'transparent']\n edges = cull()\n\n edges.flatten.uniq.each do |post|\n def post.dot_id; \"post_#{ id }\"; end\n gvr[post.dot_id][ style_node(post) ]\n end\n\n edges.each{ |from, to| gvr[from.dot_id] - gvr[to.dot_id] }\n return gvr.to_dot # that ^ declares a GraphViz edge!\n end", "def render_dot_generic2(graph_name,file_type, opts )\n @graph_data ||= RicGraph.get_dot(graph_name,opts)\n gv=IO.popen(\"/usr/bin/dot -q -T#{file_type}\",\"w+\")\n #gv.puts( Gr aph.to_dot(graph_name,opts) )\n gv.puts( @graph_data )\n gv.close_write\n @generic_svg = {\n :name => graph_name,\n :data => gv.read,\n }\n return @generic_svg[:data]\n end", "def to_dot(options = {})\r\n Dot::Digraph.new(:fontsize => 10, :shape => :box, :size => 5) do |g|\r\n states.each do |state|\r\n g.node(state, :label => state.items.values.collect{|item| item.to_s(options)}.join(\"\\n\"))\r\n @transitions[state].each do |symbol, dest_state|\r\n g.edge(state, dest_state, :label => symbol.name)\r\n end\r\n end\r\n end.to_dot\r\n end", "def generate_graph\n end", "def dependency_in_dot\n result = \"\"\n result << \"digraph recipes {\\n\"\n result << \" rankdir=LR;\\n\"\n result << _dependency_in_dot\n result << \"}\"\n\n result\n end", "def get_graphviz\n handle_graphviz_setup\n @file_name = build_graphviz_restfulpath(params, @output_format)\n # @file_name = get_graph_dir(\"#{@graph_path}#{params[:program]}.#{@output_format}\")\n \n render :layout=>false\n end", "def to_dot(*attrs)\n s = \" \\\"#{nid(@src)}\\\" #{edgesymbol} \\\"#{nid(@dest)}\\\" \"\n s << \"[#{@attrs.join(',')}]\" unless @attrs.empty?\n s << \"[#{attrs.join(',')}]\" unless attrs.empty?\n if block_given?\n wl = yield(@weight)\n s << '[' << case wl\n when String, Symbol\n \"label=\\\"#{wl.tr('\"',\"'\")}\\\"\"\n when Enumerable\n wl.join(', ')\n end << ']'\n else\n s << weightlabel(@weight) if linkcount\n end\n s << \";\\n\"\n s\n end", "def dot_code(io)\n # puts \"drawing record\"\n props\n portinfos = get_port_nodes.map do |n|\n OpenStruct.new(name: n.node_name,\n label: MFactor::dot_escape(n.dot_label)+(n.symbol ? \"(#{n.symbol})\":\"\"))\n end\n label= '\"{{'\n label << (portinfos.map do |p|\n \"<#{p.name}> #{p.label}\"\n end.join(\" | \"))\n label << '}}\"'\n attrs={:label => label}\n attr_string=attrs.map do |k,v|\n \"#{k.to_s}=#{v}\"\n end.join(\", \")\n io << node_name << ' ' << '[' << attr_string << \"]\\n\"\n end", "def to_s\n s = \"\"\n @nodes.each do |identifier, node|\n s += \"#{identifier} (#{node.contigs.join(\",\")}) => #{@edges[identifier]} \\n\"\n end\n s\n end", "def to_s\n res = \"\"\n @graph.each_pair do |key, value|\n res += key.to_s + \": \" + value.join(', ') + \"\\n\"\n end\n res\n end", "def graph\n \n \n @graphml = \"<graphml><key id='label' for='all' attr.name='label' attr.type='string'/><key id='link' for='all' attr.name='link' attr.type='string'/><key id='weight' for='all' attr.name='weight' attr.type='double'/><key id='edgetype' for='edge' attr.name='edgetype' attr.type='string'/><key id='nodetype' for='node' attr.name='nodetype' attr.type='string'/><graph edgedefault='directed'>\" \n @sif = ''\n \n @max = 2\n @log = Array.new\n @additional = {'cleavage' => {}, 'inverse_cleavage' => {}, 'inhibition' => {}, 'inverse_inhibition' => {}, 'interaction' => {}}\n add_nodes(nil,self.protein,nil,0,nil)\n add_additional_nodes(@additional['cleavage'],'cleavage')\n add_additional_nodes(@additional['inverse_cleavage'],'inverse_cleavage')\n add_additional_nodes(@additional['inhibition'],'inhibition')\n add_additional_nodes(@additional['inverse_inhibition'],'inverse_inhibition')\n \n @graphml << \"</graph></graphml>\"\n\n \n # f = File.open(\"#{RAILS_ROOT}/public/images/dynamic/#{self.protein.name}-#{'ppi' if ppi}network.graphml\", 'w')\n # f << @graphml\n # f.close\n\n return @graphml\n end", "def outputDot(podhash)\n # Random set of 16 colors ...\n colors = [0xFF00FF,0x9900FF,0x99CCFF,0x00CC99,\n 0x0000FF,0xFFCC00,0xFF9900,0xFF0000,\n 0xCC00CC,0x6666FF,0xFF99FF,0x6699FF,\n 0x993399,0xFFCCFF,0x6600FF,0xCC00FF,\n 0x00FF00,0xFF0033,0xFF0033,0xCCCCCC];\n\n puts \"digraph PodDeps {\"\n puts \"\\tsize=\\\"8,6\\\";\"\n puts \"\\tnode[fontsize=10];\"\n\n count = 0\n podhash.each do |k,v|\n # Only color if there are more than 2 edges from this node.\n if v.deps.length > 2\n colorstring = sprintf(\"\\\"\\#%06x\\\"\",colors[count%16])\n puts \"\\tedge [color=#{colorstring}];\"\n count = count + 1\n else\n colorstring = sprintf(\"\\\"\\#000000\\\"\")\n puts \"\\tedge [color=black];\"\n end\n v.deps.each do |d|\n puts \"\\t\\\"#{k} #{v.version}\\\" -> \\\"#{d.name} #{d.version}\\\";\"\n end\n puts \"\\t\\\"#{k} #{v.version}\\\" [color=#{colorstring}];\";\n end\n puts \"}\"\nend", "def get_dot(start_state)\n if ((start_state).nil?)\n return nil\n end\n # The output DOT graph for visualization\n dot = nil\n @marked_states = HashSet.new\n if (start_state.is_a?(DFAState))\n dot = self.attr_stlib.get_instance_of(\"org/antlr/tool/templates/dot/dfa\")\n dot.set_attribute(\"startState\", Utils.integer(start_state.attr_state_number))\n dot.set_attribute(\"useBox\", Boolean.value_of(Tool.attr_internal_option_show_nfaconfigs_in_dfa))\n walk_creating_dfadot(dot, start_state)\n else\n dot = self.attr_stlib.get_instance_of(\"org/antlr/tool/templates/dot/nfa\")\n dot.set_attribute(\"startState\", Utils.integer(start_state.attr_state_number))\n walk_rule_nfacreating_dot(dot, start_state)\n end\n dot.set_attribute(\"rankdir\", @rankdir)\n return dot.to_s\n end", "def to_gv\n # General graph options\n str = \"digraph #{@name} {\\n\"\n str += \"\\t// General graph options\\n\"\n str += \"\\trankdir = LR;\\n\"\n str += \"\\tsize = \\\"10.5,7.5\\\";\\n\"\n str += \"\\tnode [ style = filled, fillcolor = white, fontsize = 8.0 ]\\n\"\n str += \"\\tedge [ arrowhead = vee, arrowsize = 0.5, fontsize = 8.0 ]\\n\"\n str += \"\\n\"\n\n str += \"\\t// Places\\n\"\n str += \"\\tnode [ shape = circle ];\\n\"\n @places.each_value {|id| str += @objects[id].to_gv }\n str += \"\\n\"\n\n str += \"\\t// Transitions\\n\"\n str += \"\\tnode [ shape = box, fillcolor = grey90 ];\\n\"\n @transitions.each_value {|id| str += @objects[id].to_gv }\n str += \"\\n\"\n\n str += \"\\t// Arcs\\n\"\n @arcs.each_value {|id| str += @objects[id].to_gv }\n str += \"}\\n\" # Graph closure\n\n return str\n end", "def to_graphviz_cmd(filename, cmd, lang, *attrs, &block)\n case cmd.to_sym\n when :nop, :nop1\n cmd = 'neato -n'\n when :nop2\n cmd = 'neato -n2'\n end\n\n if GRAPHVIZ_PATH\n cmd = File.join(GRAPHVIZ_PATH, cmd)\n end\n\n if lang == :pspdf\n epstopdf(filename) do |tmpfile|\n IO.popen(\"#{cmd} -Tps -o \\\"#{tmpfile}\\\"\",\"w\") do |io| \n io << to_dot(*attrs, &block)\n end\n end\n else\n IO.popen(\"#{cmd} -T#{lang} -o \\\"#{filename}\\\"\",\"w\") do |io| \n io << to_dot(*attrs, &block)\n end\n end\n end", "def draw_dependency_graph(theDOTfile, isVerbose = false)\n puts \" #{theDOTfile}\" if isVerbose\n dot_file = File.open(theDOTfile, 'w')\n emit_heading(dot_file)\n emit_body(dot_file)\n emit_trailing(dot_file)\n end", "def create_dot_graph\r\n\r\n \t# Create the base object, then add edges/nodes later etc\r\n \tmy_graph = Graph.new\r\n \tmy_graph.name= \"State_Model\"\r\n \tmy_graph.node_style= :ellipse\r\n \tmy_graph.type = :digraph\r\n\r\n \t# For each entry in the Adjacency matrix extract the relationships and add the graph edges.\r\n \tself.adjacency_matrix.each_key do |table_key|\r\n \t\ttransition_list=self.adjacency_matrix[table_key]\r\n \t\ttransition_list.each do |transition|\r\n \t\t # is the action guarded?\r\n \t\t if self.guarded_actions !=nil\r\n \t\t guarded=self.guarded_actions.include? transition.action\r\n \t\t\tend # end if \r\n \t\t\t# add the edge...\r\n \t\t\tmy_graph.add_edge(transition.start_state, transition.end_state, \" #{transition.action} \", guarded)\r\n \t\tend # end add transitions\r\n \tend # end add nodes\r\n\r\n \treturn my_graph\r\n end", "def test_to_s\n graph = DirectedGraph.new\n vertex_a = Vertex.new('a')\n vertex_b = Vertex.new('b')\n vertex_c = Vertex.new('c')\n graph.add_vertex(vertex_a).add_vertex(vertex_b).add_vertex(vertex_c)\n graph.add_edge('a','b').add_edge('c','b')\n\n assert(graph.to_s == 'a=>b,b=>,c=>b')\n end", "def to_graph(indent=nil)\n out = \"Parents:\\n\"\n self.parents.each { |parent| out << \" #{parent}\" }\n out << \"\\nObject: #{self.to_s}\\n\"\n out << \"Children:\\n\"\n self.children.each { |child| out << \" #{child}\" }\n out\n end", "def dotty (params = {}, dotfile = 'graph.dot')\n File.open(dotfile, 'w') {|f| f << to_dot(params) }\n system('dotty', dotfile)\n end", "def dotty (params = {}, dotfile = 'graph.dot')\n File.open(dotfile, 'w') {|f| f << to_dot(params) }\n system('dotty', dotfile)\n end", "def map\n shape = @opts.fetch :shape, 'record'\n rslt = StringIO.new\n rslt.puts \"digraph #{@model[:name]} {\"\n rslt.puts ' graph [rankdir=\"LR\"]'\n rslt.puts ''\n\n @model[:tables].each do |table|\n # nodes\n rslt.puts \" \\\"#{table[:name]}\\\" [label=\\\"#{table[:name]}\\\", shape=\\\"#{shape}\\\"]\"\n # @nodes.puts \" \\\"#{table[:name]}\\\" [label=\\\"<f0> #{table[:name]}|<f1> #{table[:desc]}\\\", shape=\\\"#{shape}\\\"]\"\n # edges\n table.fetch(:fks, []).each do |fk|\n rslt.puts \" \\\"#{table[:name]}\\\"->\\\"#{fk[:table]}\\\"\"\n end\n # graphviz\n table.fetch(:graphvizs, []).each do |text|\n rslt.puts \" #{text}\"\n end\n end\n\n rslt.puts '}'\n rslt.string\n end", "def write_graph(name)\n return unless Puppet[:graph]\n\n Puppet.settings.use(:graphing)\n\n file = File.join(Puppet[:graphdir], \"%s.dot\" % name.to_s)\n File.open(file, \"w\") { |f|\n f.puts to_dot(\"name\" => name.to_s.capitalize)\n }\n end", "def write_to_graphic_file (fmt='png', dotfile='graph')\n src = dotfile + '.dot'\n dot = dotfile + '.' + fmt\n \n File.open(src, 'w') {|f| f << self.to_dot << \"\\n\"}\n \n system( \"dot -T#{fmt} #{src} -o #{dot}\" )\n dot\n end", "def generate\n graph = crate.entities.map(&:properties).reject(&:empty?)\n JSON.pretty_generate('@context' => context, '@graph' => graph)\n end", "def test_to_s\n graph = Graph.new\n vertex_a = Vertex.new('a')\n vertex_b = Vertex.new('b')\n vertex_c = Vertex.new('c')\n graph.add_vertex(vertex_a).add_vertex(vertex_b).add_vertex(vertex_c)\n graph.add_edge('a','b').add_edge('c','b')\n\n assert(graph.to_s == 'a=>b,b=>a,b=>c,c=>b')\n end", "def write_to_graphic_file (fmt='png', dotfile='graph')\n src = dotfile + '.dot'\n dot = dotfile + '.' + fmt\n\n File.open(src, 'w') {|f| f << self.to_dot << \"\\n\"}\n\n system( \"dot -T#{fmt} #{src} -o #{dot}\" )\n dot\n end", "def graph\n @g ||= GraphViz.new(:G, :type => :digraph)\n end", "def to_dot(**options) = convert_to('dot', **options)", "def generate_graph_image(filename)\n # Create a new graph\n g = GraphViz.new( :G, :type => :digraph )\n\n # Create nodes\n graphviz_nodes = {}\n @nodes_by_color.each do |color, node| \n graphviz_nodes[color] = g.add_nodes(color) \n end\n\n # Add edges\n @nodes_by_color.each do |color, node| \n from_gnode = graphviz_nodes[color]\n node.edges.each do |edge|\n to_gnode = graphviz_nodes[edge.node.color]\n\n edge_attrs = { weight: edge.weight, label: edge.weight }.compact\n\n g.add_edges(from_gnode, to_gnode, edge_attrs)\n end\n end\n\n # Generate output image\n g.output(png: filename)\n end", "def to_s(symbols = {})\n return \"\" if @nodes.empty?\n @nodes.map {|node| node.to_s(symbols) }.join(self.class.format)\n end", "def get_graph\n @graph = Graph.new\n @tparses.each do |p|\n if p[:args]\n p[:args].each do |type, arg|\n @graph.add_edge(p[:idx], arg, 1) if arg >= 0\n end\n end\n end\n\n g = GraphViz.new(:G, :type => :digraph)\n g.node[:shape] = \"box\"\n g.node[:fontsize] = 11\n g.edge[:fontsize] = 9\n\n n = []\n @tparses.each do |p|\n n[p[:idx]] = g.add_nodes(p[:idx].to_s, :label => \"#{p[:word]}/#{p[:pos]}/#{p[:cat]}\")\n end\n\n @tparses.each do |p|\n if p[:args]\n p[:args].each do |type, arg|\n if arg >= 0 then g.add_edges(n[p[:idx]], n[arg], :label => type) end\n end\n end\n end\n\n g.get_node(@root.to_s).set {|_n| _n.color = \"blue\"} if @root >= 0\n g.get_node(@focus.to_s).set {|_n| _n.color = \"red\"} if @focus >= 0\n\n @graph_rendering = g.output(:svg => String)\n end", "def dfa2dot(dfa)\n str = \"digraph {\\n\"\n str += \"size=\\\"2,2\\\"\\nratio=1.0\\n\"\n str += \"node [shape=circle]\\n\"\n str += \"preinit [shape=plaintext, label=\\\"\\\"]\\n\"\n (dfa.states - dfa.final_states).each do |state|\n str += state + \"\\n\"\n end\n str += \"node [shape=doublecircle]\\n\"\n dfa.final_states.each do |state|\n str += state + \"\\n\"\n end\n str += \"preinit -> #{dfa.initial_state}\\n\"\n dfa.states.each do |s1|\n dfa.states.each do |s2|\n res = dfa.transitions.find_all { |tr| tr[0] == s1 and tr[1] == s2 }\n unless res.empty?\n label = res.map { |tr| tr[2] }.join(',')\n str += s1 + \"->\" + s2 + \"[label=\\\"#{label}\\\"]\\n\"\n end\n end\n end\n \n str + \"}\"\n end", "def builder(graph)\n @dot = self.class.builder(graph, @options[:layout])\n end", "def get_graph_rendering(parse)\n return '' if parse.nil? || parse[:root].nil?\n\n tokens = parse[:tokens]\n root = parse[:root]\n focus = parse[:focus]\n\n g = GraphViz.new(:G, :type => :digraph)\n g.node[:shape] = \"box\"\n g.node[:fontsize] = 10\n g.edge[:fontsize] = 9\n\n n = []\n tokens.each do |p|\n n[p[:idx]] = g.add_nodes(p[:idx].to_s, :label => \"#{p[:lex]}/#{p[:pos]}/#{p[:cat]}\")\n end\n\n tokens.each do |p|\n if p[:args]\n p[:args].each do |type, arg|\n if arg >= 0 then g.add_edges(n[p[:idx]], n[arg], :label => type) end\n end\n end\n end\n\n g.get_node(root.to_s).set {|_n| _n.color = \"blue\"} if root >= 0\n g.get_node(focus.to_s).set {|_n| _n.color = \"red\"} if focus >= 0\n g.output(:svg => String)\n end", "def dump_list\n list = \"\"\n @graph.each do |from, hash|\n\tlist << \"#{from} => \"\n\ta = []\n\thash.each do |to, relation|\n\t a.push(\"#{to} (#{relation})\")\n\tend\n\tlist << a.join(\", \") + \"\\n\"\n end\n list\n end", "def to_s\n result = ''\n\n # Return an empty string for an empty graph\n return result unless @vertices.length > 0\n\n @vertices.each do |vertex|\n added = false\n vertex.neighbours.each_with_index do |value, neighbour_index|\n if (value == true)\n added = true\n result << vertex.name << '=>' << @vertices[neighbour_index].name << ','\n end\n end\n # if there has been no edges for the vertex\n result << vertex.name << '=>,' unless added\n end\n\n # remove trailing comma\n result.chop\n end", "def to_s\n \"<SPLATS::Generator @traversal=#{@traversal} @class=#{@class}>\"\n end", "def print_graph\n @vertex_array.each do |vertex|\n print \"#{vertex.name} points to: \"\n vertex.print_outgoing\n puts ''\n end\n end", "def draw_graph(graph)\n type = graph.directed? ? :digraph : :graph\n g = GraphViz.new(:G, :type => type)\n graph.vertices().each do |v|\n g.add_node(v.id)\n end\n\n added_edges = Set.new\n\n graph.edges_by_vertices().each do |x, edges|\n from = g.get_node(x.id)\n edges.each do |y|\n to = g.get_node(y.id)\n\n if graph.directed?\n # draw each individual 'directed' edge\n g.add_edge(from, to)\n else\n # draw only one edge to represent 'undirected' edge\n g.add_edge(from, to) unless added_edges.include?([to, from])\n added_edges.add([from, to])\n end\n\n\n end\n end\n\n\n filename = \"graph.png\"\n g.output( :png => filename )\n system(\"open #{filename}\")\nend", "def nodes_to_s\n string = \"\"\n @nodes.each do |name, node|\n string +=\"#{name}:\\n\\t(#{node.name})\\n\"\n end\n string\n end", "def to_dot\n return unless gv_object\n gv_object.to_s\n end", "def test_graph_to_s\n sut_graph = Graph.new\n sut_graph.name=\"test_graph\" \n sut_graph.type=:digraph\n sut_graph.node_style=:ellipse\n #sut_graph.add_node \"TEST1\"\n #sut_graph.add_node \"TEST2\"\n sut_graph.add_edge(\"TEST1\" , \"TEST2\" , \"take_me_to_test_2\")\n \n \n returned_obj = sut_graph.to_s\n assert( returned_obj.instance_of?(String) , \"Check to_s returns String, returns: #{returned_obj.class}\" )\n assert(returned_obj.scan(/test_graph/).length==1 , \"Check once occurence of graph name in dot to_s.\")\n assert(returned_obj.scan(/digraph test_graph/).length==1 , \"Check graph type and name in dot to_s.\") \n assert(returned_obj.scan(/shape = ellipse/).length==1 , \"Check graph node style in dot to_s.\") \n #assert(returned_obj.scan(/TEST1\\;/).length==1 , \"Check that Node definition is included: TEST1;\")\n #assert(returned_obj.scan(/TEST2\\;/).length==1 , \"Check that Node definition is included: TEST2}\")\n assert(returned_obj.scan(/label = \\\"take_me_to_test_2\"/).length==1 , \"Check that arc label is included\")\n \n end", "def make_image(options={})\n file = options.fetch :file, nil\n suffix = options.fetch :suffix, nil\n type = options.fetch :type, 'svg'\n\n unless file\n unless suffix\n suffix = dot_plot_number\n self.dot_plot_number += 1\n end\n if suffix.is_a? Integer\n suffix = suffix.to_s.rjust 5, '0'\n end\n graph_name = uid || 'graph'\n file = \"#{graph_name}-#{suffix}.#{type}\"\n end\n info \"Writing the graph image: '#{suffix}' to the file: '#{file}'\"\n command = ['dot', '-T', type, '-o', file]\n Open3.popen2e(*command) do |stdin, out, process|\n stdin.puts to_dot\n stdin.close\n output = out.read\n debug output unless output.empty?\n process.value.exitstatus == 0\n end\n end", "def export_graphviz(export_filename)\n # create GraphViz object from ruby-graphviz package\n graph_viz_output = GraphViz.new( :G, \n \t\t\t\t\t\t\t\t use: :neato, \n\t\t truecolor: true,\n \t\t inputscale: @scale,\n \t\t margin: 0,\n \t\t bb: \"#{@bounds[:minlon]},#{@bounds[:minlat]},\n \t\t #{@bounds[:maxlon]},#{@bounds[:maxlat]}\",\n \t\t outputorder: :nodesfirst)\n\n # append all vertices\n @visual_vertices.each { |k,v|\n graph_viz_output.add_nodes( v.id , :shape => 'point',\n :comment => \"#{v.lat},#{v.lon}!!\",\n :pos => \"#{v.y},#{v.x}!\")\n }\n\n # append all edges\n @visual_edges.each { |edge|\n graph_viz_output.add_edges( edge.v1.id, edge.v2.id, 'arrowhead' => 'none' )\n }\n\n # export to a given format\n format_sym = export_filename.slice(export_filename.rindex('.')+1,export_filename.size).to_sym\n graph_viz_output.output( format_sym => export_filename )\n end", "def to_xml\n \n text = \"<node id=\\\"#{self.id}\\\" label=\\\"#{self.label}\\\">\\n\"\n \n unless self.attributes.nil?\n text << \"\\t<attvalues>\\n\"\n self.attributes.each do |key, value|\n text << \"\\t\\t<attvalue for=\\\"#{key}\\\" value=\\\"#{value}\\\"></attvalue>\\n\"\n end\n text << \"\\t</attvalues>\\n\"\n end\n \n unless self.viz_size.nil?\n text << \"\\t<viz:size value=\\\"#{self.viz_size}\\\"/>\\n\"\n end\n \n unless self.viz_color.nil?\n text << \"\\t<viz:color b=\\\"#{self.viz_color[:b]}\\\" g=\\\"#{self.viz_color[:g]}\\\" r=\\\"#{self.viz_color[:r]}\\\"/>\\n\"\n end\n \n unless self.viz_position.nil?\n text << \"\\t<viz:position x=\\\"#{self.viz_position[:x]}\\\" y=\\\"#{self.viz_position[:z]}\\\"/>\\n\"\n end\n \n text << \"</node>\\n\"\n text \n end", "def to_dot(opts)\n\n i = fei()\n\n label = \"#{[ i.wfid, i.subid, i.expid].join(' ')} #{tree.first}\"\n label += \" (#{h.state})\" if h.state\n\n a = []\n a << \"\\\"#{i.to_storage_id}\\\" [ label=\\\"#{label}\\\" ];\"\n\n # parent\n\n if h.parent_id\n a << \"\\\"#{i.to_storage_id}\\\" -> \\\"#{parent_id.to_storage_id}\\\";\"\n end\n\n # children\n\n h.children.each do |cfei|\n a << \"\\\"#{i.to_storage_id}\\\" -> \\\"#{Ruote.to_storage_id(cfei)}\\\";\"\n end\n\n a\n end", "def export_graphviz(export_filename)\n # create GraphViz object from ruby-graphviz package\n graph_viz_output = GraphViz.new(:G,\n use: :neato,\n truecolor: true,\n inputscale: @scale,\n margin: 0,\n bb: \"#{@bounds[:minlon]},#{@bounds[:minlat]},\n \t\t #{@bounds[:maxlon]},#{@bounds[:maxlat]}\",\n outputorder: :nodesfirst)\n\n # append all vertices\n @visual_vertices.each { |k, v|\n node = graph_viz_output.add_nodes(v.id, :shape => 'point',\n :comment => \"#{v.lat},#{v.lon}!\",\n :pos => \"#{v.y},#{v.x}!\")\n if @showed_vertices.include? node.id\n node.set { |node|\n node.color = 'red'\n node.height = 0.4\n }\n end\n }\n\n @visual_edges.each do |edge|\n direction_constr(edge, graph_viz_output)\n end\n\n # export to a given format\n format_sym = export_filename.slice(export_filename.rindex('.') + 1, export_filename.size).to_sym\n graph_viz_output.output(format_sym => export_filename)\n end", "def to_s\n str = \n%{Petri Net [#{@name}]\n----------------------------\nDescription: #{@description}\nFilename: #{@filename}\n\nPlaces\n----------------------------\n#{str = ''; @places.each_value {|p| str += @objects[p].to_s + \"\\n\"}; str }\nTransitions\n----------------------------\n#{str = ''; @transitions.each_value {|t| str += @objects[t].to_s + \"\\n\" }; str }\nArcs\n----------------------------\n#{str = ''; @arcs.each_value {|a| str += @objects[a].to_s + \"\\n\" }; str}\n}\n return str\n end", "def to_s\n if ((@p).equal?(-1))\n fill_buffer\n end\n buf = StringBuffer.new\n i = 0\n while i < @nodes.size\n t = @nodes.get(i)\n buf.append(\" \")\n buf.append(@adaptor.get_type(t))\n i += 1\n end\n return buf.to_s\n end", "def to_graph(indent_level: 0, show_attr: true, out: [])\n\t\t\tmargin = ''\n\t\t\t0.upto(indent_level/STEP-1) { |p| margin += (p==0 ? ' ' : '|') + ' '*(STEP - 1) }\n\t\t\tmargin += '|' + '-'*(STEP - 2)\n\t\t\tout << margin + \"#{to_s(show_attr: show_attr)}\"\n\t\t\[email protected] do |child|\n\t\t\t\tchild.to_graph(indent_level: indent_level+STEP, show_attr: show_attr, out: out)\n\t\t\tend\n\t\t\treturn out\n\t\tend", "def to_svg\n draw\n invert_coords\n \n polygon = @coords.values.map { |c| c.join(',') }.join(' ')\n labels = ''\n \n t.each { |v| labels << vertex_label(v) << vertex_arc(v) << vertex_value(v) }\n t.each { |v| labels << edge_label(v) } # Needs to be drawn last in order to make ImageMagick render it correctly.\n \n <<-EOT\n<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"no\"?>\n<svg xmlns=\"http://www.w3.org/2000/svg\" version=\"1.1\" width=\"#{canvas_size}\" height=\"#{canvas_size}\">\n<polygon fill=\"#f5eae5\" stroke=\"#993300\" stroke-width=\"1\" points=\"#{polygon}\"/>\n#{labels}</svg>\nEOT\n end", "def to_s\n nodes.map do |_key, node|\n node.to_s\n end.join(\"\\n\")\n end", "def to_s\n nodes.map(&:to_s).join(';')\n end", "def visualize\n require 'open3'\n Open3.popen3('graph-easy') do |i,o,_|\n i.write to_graph_easy\n i.close\n puts o.read\n end\n end", "def inspect\n result = +\"#<#{self.class.name}:#{object_id}\"\n result << ' @vertices=['\n list = vertices.map { |v| \"#<#{v.selfie}>\" }\n result << list.join(', ')\n result << '] '\n edges = []\n vertices.each do |v|\n edges << v.edges do |e|\n result << \"#{v.object_id} #{e.inspect}\"\n end\n end\n result << \"edges=[#{edges.join(\",\\n \")}]>\"\n result\n end", "def printGraph\n\t\tp @g.graphSet\n\t\treturn @g.graphSet\n\tend", "def print\r\n puts \"------ AlphabetGraph is ------\"\r\n edges = \"\"\r\n @edges.each do |edge|\r\n edges << edge.to_s << \", \"\r\n end\r\n puts \"edges : #{edges}\"\r\n vertices = \"\"\r\n @charNodes.each do |node|\r\n vertices << node.to_s << \", \"\r\n end\r\n puts \"vertices : #{vertices}\"\r\n end", "def graphviz(oGraph, c0, c1, c2, c3, c4)\n return if @src_id.nil? || @dst_id.nil?\n return if @count > 1 # TODO: eliminate dupulicate trans\n\n guard = @block.condition_success unless @block.nil?\n guard = \"unknown\" if guard.nil?\n\n if @title.nil?\n title = ''\n else\n title = \"#{@title}\"\n end\n @label = @type + '(' + title + ')\\n' + guard + '\\n' + @block.id\n\n if (@src_id =~ /^V_/) && (@dst_id =~ /^V_/)\n # V to V form\n src_id = @src_id + '_inbound'\n dst_id = @dst_id + '_inbound'\n\n elsif (@src_id =~ /^V_/) && (@dst_id =~ /^C_/)\n src_id = @src_id + '_inbound'\n dst_id = @dst_id\n\n c2.add_node(dst_id) if $graphviz_with_rank\n elsif (@src_id =~ /^C_/) && (@dst_id =~ /^V_/)\n src_id = @src_id\n dst_id = @dst_id + '_outbound'\n c2.add_node(src_id) if $graphviz_with_rank\n c4.add_node(dst_id) if $graphviz_with_rank\n elsif (@src_id =~ /^C_/) && (@dst_id =~ /^C_/)\n # redirect\n src_id = @src_id\n dst_id = @dst_id\n c2.add_node(src_id) if $graphviz_with_rank\n c3.add_node(dst_id) if $graphviz_with_rank\n else\n src_id = @src_id\n dst_id = @dst_id\n end\n\n # draw\n e = oGraph.add_edge(src_id, dst_id, label: @label)\n end", "def graph_new(program, gopts={}, nopts={}, eopts={})\n # initialize new Graphviz graph\n # g = GraphViz::new( \"G\" )\n # program should be one of dot / neato / twopi / circo / fdp\n g = GraphViz::new( :G, :type => :graph, :use=>program)\n g[:overlap] = gopts[:overlap] || \"orthoxy\"\n g[:rankdir] = gopts[:rankdir] || \"LR\"\n \n # set global node options\n g.node[:color] = nopts[:color] || \"#3d0d4c\"\n g.node[:style] = nopts[:style] || \"filled\"\n g.node[:shape] = nopts[:shape] || \"box\"\n g.node[:penwidth] = nopts[:penwidth] || \"1\"\n g.node[:fontname] = nopts[:fontname] || \"Arial\" # \"Trebuchet MS\"\n g.node[:fontsize] = nopts[:fontsize] || \"8\"\n g.node[:fillcolor]= nopts[:fillcolor] || LatticeGridHelper.default_fill_color\n g.node[:fontcolor]= nopts[:fontcolor] || \"#474724\"\n g.node[:margin] = nopts[:margin] || \"0.0\"\n g.node[:width] = nopts[:width] || \"0.2\"\n g.node[:height] = nopts[:height] || \"0.1\"\n g.node[:shape] = nopts[:shape] || \"ellipse\"\n g.node[:margin] = nopts[:margin] || \"0.05\"\n \n # set global edge options\n g.edge[:color] = eopts[:color] || \"#999999\"\n g.edge[:len] = eopts[:len] || \"1\"\n g.edge[:fontsize] = eopts[:fontsize] || \"6\"\n g.edge[:fontcolor] = eopts[:fontcolor] || \"#444444\"\n g.edge[:fontname] = eopts[:fontname] || \"Verdana\"\n g.edge[:dir] = eopts[:dir] || \"forward\"\n g.edge[:arrowsize] = eopts[:arrowsize] || \"0.0\"\n \n return g\nend", "def to_s\n\t\treturn self.stringify_nodes( @output )\n\tend", "def show\n @workflow = Workflow.find(params[:id])\n \n default_graph_settings = \"node [shape=box,style=filled];\\n\"\n \n @processed_graph = default_graph_settings + @workflow.graph + process_graph(@workflow)\n \n gv = IO.popen(\"/usr/local/bin/dot -q -Tpng\", \"w+\")\n gv.puts \"digraph G{\", @processed_graph, \"}\"\n gv.close_write\n @gvpng = gv.read\n\n\n gv = IO.popen(\"/usr/local/bin/dot -q -Tcmapx\", \"w+\")\n gv.puts \"digraph G{\", @processed_graph, \"}\"\n gv.close_write\n @gvmap = gv.read\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @workflow }\n end\n end", "def test_to_s_only_vertices\n graph = Graph.new\n vertex_a = Vertex.new('a')\n vertex_b = Vertex.new('b')\n vertex_c = Vertex.new('c')\n graph.add_vertex(vertex_a).add_vertex(vertex_b).add_vertex(vertex_c)\n\n assert(graph.to_s == 'a=>,b=>,c=>')\n end", "def process_graphviz(data)\n data.gsub(/\\<graphviz\\>\\s*([\\s\\S]*?)\\s*\\<\\/graphviz\\>/m) do\n id = Digest::SHA1.hexdigest($1)\n\n # Write graphviz graph to temp file\n tmp = Tempfile.new ''\n tmp.write $1\n tmp.close\n\n out_path_dir = ::File.expand_path ::File.join(@wiki.path, 'tmp')\n Dir.mkdir out_path_dir unless ::File.exists? out_path_dir\n out_path = ::File.join(out_path_dir, id)\n\n system \"#{@wiki.dot} -Tpng -o #{out_path}.png #{tmp.path}\"\n\n # Clean up tmp file\n tmp.delete\n\n # Replace graph with img link\n %Q(<img alt=\"Graphviz image\" src=\"/tmp/#{id}.png\">)\n end\n end", "def to_gql\n result = ' ' * __depth + __name\n result += __params_to_s(__params, true) unless __params.empty?\n unless __nodes.empty?\n result += \" {\\n\"\n result += __nodes.map(&:to_gql).join(\"\\n\")\n result += \"\\n#{' ' * __depth}}\"\n end\n\n result\n end", "def repr\n @nodes.values_at(*@nodes.each_index.reject {|i| @index_nodes.include? i}).join('/')\n end", "def list2dot(list, options = {})\n return unless list\n\n entries = list.split(\"\\n\")\n return unless entries.count > 0\n\n if options[:leading]\n options[:nodes], options[:edges] = entries.shift.split(' ').map(&:to_i)\n end\n\n output = options[:directed] ? \"digraph {\\n\" : \"graph {\\n\"\n\n list_output, options = make_dot_entries(entries, options)\n\n output += list_output + '}'\n\n check_nodes(options)\n check_edges(options)\n\n output\nend", "def to_s\n edges = @adjacent_nodes.map do |node, payload|\n \"#{node} (lw: #{payload[:weight]})\"\n end.join(', ')\n \"Node #{value} has adjacent nodes: #{edges}\"\n end", "def show\n graph = GraphViz.new(:G, type: :digraph)\n competency = Competency.find(params[:id])\n graphviz(graph, competency)\n @file = Tempfile.new(['foo', '.png'], \"#{Rails.root}/public/images\")\n graph.output(png: @file.path)\n end", "def x_visualize\n require 'open3'\n Open3.popen3('dot -T svg | display') do |i,o,_|\n i.write to_dot\n i.close\n end\n end", "def gv_graph_name\n [:name, :id].each do |method|\n return send method if respond_to? method and send method\n end\n 'graph'\n end", "def show\n\t\tputs @graph\n\tend", "def export_graphviz_path(export_filename, lat_start, lon_start, lat_end, lon_end, shortestPath)\n # create GraphViz object from ruby-graphviz package\n graph_viz_output = GraphViz.new( :G,\n use: :neato,\n truecolor: true,\n inputscale: @scale,\n margin: 0,\n bb: \"#{@bounds[:minlon]},#{@bounds[:minlat]},\n \t\t #{@bounds[:maxlon]},#{@bounds[:maxlat]}\",\n outputorder: :nodesfirst)\n\n # append all vertices\n @visual_vertices.each { |k,v|\n # lat_start, lon_start, lat_end, lon_end,\n if (lat_start == v.lat && lon_start == v.lon || lat_end == v.lat && lon_end == v.lon)\n graph_viz_output.add_nodes( v.id ,\n :shape => 'point',\n :color => 'red',\n :width => '0.2',\n :comment => \"#{v.lat},#{v.lon}!!\",\n :pos => \"#{v.y},#{v.x}!\")\n else\n graph_viz_output.add_nodes( v.id ,\n :shape => 'point',\n :comment => \"#{v.lat},#{v.lon}!!\",\n :pos => \"#{v.y},#{v.x}!\")\n end\n }\n\n # append all vertices\n @visual_edges.each { |edge|\n\n if shortestPath.include?(edge.v1.id) and shortestPath.include?(edge.v2.id)\n graph_viz_output.add_edges( edge.v1.id, edge.v2.id, 'arrowhead' => 'none', 'color' => 'red')\n else\n graph_viz_output.add_edges( edge.v1.id, edge.v2.id, 'arrowhead' => 'none', 'color' => 'black')\n end\n }\n\n # Process dijkstra for starting vertex\n # After finishing find finish vertex and go throught parent vertices to start\n\n # export to a given format\n format_sym = export_filename.slice(export_filename.rindex('.')+1,export_filename.size).to_sym\n graph_viz_output.output( format_sym => export_filename )\n end", "def to_s\n result = ''\n\n # If the document starts with comments, we want to print those at the top.\n top_comments = @nodes.take_while { |node| node.is_a? CommentNode }\n top_comments.each do |comment|\n result += comment.to_s\n end\n\n # Vanity new lines ftw.\n result += \"\\n\"\n\n # Pop the top comments off the node list.\n top_comments.length.times do\n @nodes.shift\n end\n\n if @lightordark == :dark\n result += \"set background=dark\\n\\n\"\n else\n result += \"set background=light\\n\\n\"\n end\n\n result += \"highlight clear\\n\\n\"\n result += \"if exists('syntax_on')\\n\"\n result += \" syntax reset\\n\"\n result += \"endif\\n\\n\"\n result += \"let g:colors_name = '#{@name.to_s}'\\n\\n\"\n\n @nodes.each do |node|\n result += node.to_s\n end\n\n return result\n end", "def to_s\n return @nodes.values.map{|node| node.to_s}.join\n #return @nodes.values.reduce(''){|sum, node| sum + node.to_s}\n end", "def to_s\n @vertices.to_s\n end", "def export_graphviz_nodes(export_filename, id_start, id_end)\n # create GraphViz object from ruby-graphviz package\n graph_viz_output = GraphViz.new( :G,\n use: :neato,\n truecolor: true,\n inputscale: @scale,\n margin: 0,\n bb: \"#{@bounds[:minlon]},#{@bounds[:minlat]},\n \t\t #{@bounds[:maxlon]},#{@bounds[:maxlat]}\",\n outputorder: :nodesfirst)\n\n if id_start != nil && id_end != nil\n # append all vertices\n @visual_vertices.each { |k,v|\n if (id_start == k || id_end == k)\n graph_viz_output.add_nodes( v.id ,\n :shape => 'point',\n :color => 'red',\n :width => '0.2',\n :comment => \"#{v.lat},#{v.lon}!!\",\n :pos => \"#{v.y},#{v.x}!\")\n else\n graph_viz_output.add_nodes( v.id ,\n :shape => 'point',\n :comment => \"#{v.lat},#{v.lon}!!\",\n :pos => \"#{v.y},#{v.x}!\")\n end\n }\n else\n # append all vertices\n @visual_vertices.each { |k,v|\n graph_viz_output.add_nodes( v.id , :shape => 'point',\n :comment => \"#{v.lat},#{v.lon}!!\",\n :pos => \"#{v.y},#{v.x}!\")\n }\n end\n\n # append all edges\n @visual_edges.each { |edge|\n graph_viz_output.add_edges( edge.v1.id, edge.v2.id, 'arrowhead' => 'none' )\n }\n\n # export to a given format\n format_sym = export_filename.slice(export_filename.rindex('.')+1,export_filename.size).to_sym\n graph_viz_output.output( format_sym => export_filename )\n end", "def parse_dotfile\n # the chunk is everything inside '{}'\n raw_chunk = @raw_dotfile.split(\"{\")[1].split(\"}\")[0].strip\n # pull out the header\n raw_header = raw_chunk.match(/([\\w\\s*=\".,\\s\\[\\]_\\\\]+;)*/m)[0]\n # find body by chopping header off chunk\n raw_body = raw_chunk.sub(raw_header, \"\")\n # split the body on '>];', which delimits the tables section\n raw_connections = raw_body.split(\">];\")[-1].strip\n # split out the tables section from the body\n raw_tables = raw_body.split(\">];\")[0 .. -2].join(\">];\").strip + \" \\n>];\"\n\n # assemble the output hash\n @graph_type = @raw_dotfile.match(/\\A\\s*((?:di)?graph)/)[1]\n @title = @raw_dotfile.match(/\\A\\s*(?:di)?graph\\s*(\\w+)/)[1]\n @header = parse_header(raw_header, \";\")\n @nodes = parse_nodes(raw_tables)\n @connections = parse_connections(raw_connections)\n end", "def _to_dot(dot, l, c, e, i)\n i += 1\n dot << \"#{c} -> #{i} [label=\\\"#{l}\\\"];\" if l\n c = i\n case e\n when LiteralString\n dot << \"#{i} [label=\\\"#{e.inspect.gsub('\"', '\\\\\"')}.lit\\\"];\"\n i\n when Symbol, Numeric, String, Class, TrueClass, FalseClass, NilClass\n dot << \"#{i} [label=\\\"#{e.inspect.gsub('\"', '\\\\\"')}\\\"];\"\n i\n when Array\n dot << \"#{i} [label=\\\"Array\\\"];\"\n e.each_with_index do |v, j|\n i = _to_dot(dot, j, c, v, i)\n end\n when Hash\n dot << \"#{i} [label=\\\"Hash\\\"];\"\n e.each do |k, v|\n i = _to_dot(dot, k, c, v, i)\n end\n when SQL::ComplexExpression \n dot << \"#{i} [label=\\\"ComplexExpression: #{e.op}\\\"];\"\n e.args.each_with_index do |v, j|\n i = _to_dot(dot, j, c, v, i)\n end\n when SQL::Identifier\n dot << \"#{i} [label=\\\"Identifier\\\"];\"\n i = _to_dot(dot, :value, c, e.value, i)\n when SQL::QualifiedIdentifier\n dot << \"#{i} [label=\\\"QualifiedIdentifier\\\"];\"\n i = _to_dot(dot, :table, c, e.table, i)\n i = _to_dot(dot, :column, c, e.column, i)\n when SQL::OrderedExpression\n dot << \"#{i} [label=\\\"OrderedExpression: #{e.descending ? :DESC : :ASC}#{\" NULLS #{e.nulls.to_s.upcase}\" if e.nulls}\\\"];\"\n i = _to_dot(dot, :expression, c, e.expression, i)\n when SQL::AliasedExpression\n dot << \"#{i} [label=\\\"AliasedExpression\\\"];\"\n i = _to_dot(dot, :expression, c, e.expression, i)\n i = _to_dot(dot, :alias, c, e.aliaz, i)\n when SQL::CaseExpression\n dot << \"#{i} [label=\\\"CaseExpression\\\"];\"\n i = _to_dot(dot, :expression, c, e.expression, i) if e.expression\n i = _to_dot(dot, :conditions, c, e.conditions, i)\n i = _to_dot(dot, :default, c, e.default, i)\n when SQL::Cast\n dot << \"#{i} [label=\\\"Cast\\\"];\"\n i = _to_dot(dot, :expr, c, e.expr, i)\n i = _to_dot(dot, :type, c, e.type, i)\n when SQL::Function\n dot << \"#{i} [label=\\\"Function: #{e.f}\\\"];\"\n e.args.each_with_index do |v, j|\n i = _to_dot(dot, j, c, v, i)\n end\n when SQL::Subscript \n dot << \"#{i} [label=\\\"Subscript: #{e.f}\\\"];\"\n i = _to_dot(dot, :f, c, e.f, i)\n i = _to_dot(dot, :sub, c, e.sub, i)\n when SQL::WindowFunction\n dot << \"#{i} [label=\\\"WindowFunction\\\"];\"\n i = _to_dot(dot, :function, c, e.function, i)\n i = _to_dot(dot, :window, c, e.window, i)\n when SQL::Window\n dot << \"#{i} [label=\\\"Window\\\"];\"\n i = _to_dot(dot, :opts, c, e.opts, i)\n when SQL::PlaceholderLiteralString\n str = e.str\n str = \"(#{str})\" if e.parens\n dot << \"#{i} [label=\\\"PlaceholderLiteralString: #{str.inspect.gsub('\"', '\\\\\"')}\\\"];\"\n i = _to_dot(dot, :args, c, e.args, i)\n when SQL::JoinClause\n str = \"#{e.join_type.to_s.upcase} JOIN\"\n if e.is_a?(SQL::JoinOnClause)\n str << \" ON\"\n elsif e.is_a?(SQL::JoinUsingClause)\n str << \" USING\"\n end\n dot << \"#{i} [label=\\\"#{str}\\\"];\"\n i = _to_dot(dot, :table, c, e.table, i)\n i = _to_dot(dot, :alias, c, e.table_alias, i) if e.table_alias\n if e.is_a?(SQL::JoinOnClause)\n i = _to_dot(dot, :on, c, e.on, i)\n elsif e.is_a?(SQL::JoinUsingClause)\n i = _to_dot(dot, :using, c, e.using, i)\n end\n when Dataset\n dot << \"#{i} [label=\\\"Dataset\\\"];\"\n TO_DOT_OPTIONS.each do |k|\n next unless e.opts[k]\n i = _to_dot(dot, k, c, e.opts[k], i)\n end\n else\n dot << \"#{i} [label=\\\"Unhandled: #{e.inspect.gsub('\"', \"''\")}\\\"];\"\n end\n i\n end", "def to_s\n res = \"\\n\" + @objs + ':'\n res += ' ' + @deps.uniq.join(' ') if @deps\n res += \"\\n\"\n @dirs_to_create.each do |dir|\n res += \"\\t\" + Platform.mkdir(dir) + \"\\n\"\n end\n @files_to_copy.each do |k,v|\n res += \"\\t\" + Platform.cp(v, k) + \"\\n\"\n end\n @rules.each { |r| res += \"\\t\" + r + \"\\n\" }\n res\n end", "def to_s\n print \"[ \"\n\n each do |node|\n print \"#{node.data}\"\n print \", \" unless node.next_node == nil\n end\n\n print \" ]\\n\"\n end", "def to_s\n print \"[ \"\n\n each do |node|\n print \"#{node.data}\"\n print \", \" unless node.next_node == nil\n end\n\n print \" ]\\n\"\n end", "def to_s\n s = \"\"\n each do\n |nodo| s += nodo.to_s + \"\\n\"\n end\n s\n end", "def print_graph\n puts\n adj_lists.each do |vertex|\n print \"#{vertex.name}\"\n neighbor = vertex.adj_list\n while !neighbor.nil?\n print \" --> #{neighbor.name} (#{neighbor.weight})\"\n neighbor = neighbor.next\n end\n puts\n end\n end" ]
[ "0.7877583", "0.7474248", "0.7474248", "0.7381871", "0.73514456", "0.7338884", "0.7243258", "0.70842546", "0.7081876", "0.7055181", "0.705158", "0.70044804", "0.69768965", "0.6903931", "0.6893293", "0.6819366", "0.6757849", "0.67157936", "0.66546035", "0.662403", "0.6598103", "0.65457106", "0.65063965", "0.6505489", "0.64991146", "0.6487892", "0.6479099", "0.6430471", "0.64216524", "0.63796085", "0.63444704", "0.63161224", "0.63095564", "0.6298571", "0.6264806", "0.6254042", "0.6254042", "0.62251663", "0.6222424", "0.6146923", "0.61448187", "0.61374557", "0.60926485", "0.6041678", "0.60245603", "0.6010992", "0.5989776", "0.5978607", "0.5974112", "0.5968024", "0.59542024", "0.592826", "0.59180564", "0.5904958", "0.5895366", "0.5879278", "0.58741534", "0.58116233", "0.5799294", "0.57857615", "0.5772361", "0.5764351", "0.5755785", "0.5736149", "0.5729906", "0.5712454", "0.5705191", "0.5671733", "0.5667193", "0.56551445", "0.56545335", "0.5649795", "0.5634147", "0.5629625", "0.56103694", "0.56086296", "0.55942893", "0.5578776", "0.5561697", "0.55507815", "0.5526893", "0.55134606", "0.55030614", "0.54878175", "0.54851276", "0.5469891", "0.54697084", "0.54654056", "0.54538", "0.5449335", "0.54419065", "0.5441359", "0.543714", "0.5416932", "0.5405988", "0.53941095", "0.5392055", "0.5392055", "0.5389702", "0.53876024" ]
0.60193527
45
Create a new DNS monitor with the given +options+.
def initialize( options=DEFAULT_OPTIONS ) options = DEFAULT_OPTIONS.merge( options || {} ) options.each do |name, value| self.public_send( "#{name}=", value ) end @client = Whois::Client.new end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def initialize(options = {})\n @options = {\n :daemonize => DEFAULT_DAEMONIZE,\n :host => DEFAULT_HOST,\n :path => DEFAULT_PATH,\n :pid_file => DEFAULT_PID_FILE,\n :port => DEFAULT_PORT,\n :threads_number => DEFAULT_THREADS_NUMBER,\n :timeout => DEFAULT_TIMEOUT\n }.merge(options)\n \n @stats = {}\n end", "def create_dhcp_server(network, options)\n end", "def initialize( options=DEFAULT_OPTIONS )\n\t\t\toptions = DEFAULT_OPTIONS.merge( options || {} )\n\n\t\t\toptions.each do |name, value|\n\t\t\t\tself.public_send( \"#{name}=\", value )\n\t\t\tend\n\n\t\t\t@resolver = Resolv::DNS.new\n\t\tend", "def create(options = {})\n response = request(:post, \"/settings/hypervisor_zones.json\", :query => {:pack => options})\n end", "def initialize(info = {})\n super\n register_options(\n [\n OptAddressRange.new('RHOSTS', [true, 'The multicast address or CIDR range of targets to query', '224.0.0.252']),\n Opt::RPORT(5355),\n # TODO: allow more than one\n OptString.new('NAME', [true, 'The name to query', 'localhost']),\n OptString.new('TYPE', [true, 'The query type (name, # or TYPE#)', 'A'])\n ],\n self.class\n )\n end", "def find_or_create_host(opts)\n\t\treport_host(opts.merge({:wait => true}))\n\tend", "def initialize(address, options = {})\n @address = Address.new(address)\n @options = options\n @mutex = Mutex.new\n @monitor = Monitor.new(self, options)\n @description = Description.new(self)\n end", "def create options = {}\n raise ArgumentError.new \"Bad options passed #{options}\" unless options[ :id ]\n case options[:type]\n when :zmq\n Zmq.new options\n else\n Redis.new options\n end\n end", "def initialize(options = {})\n @formatter = Format.new\n @options = options\n @limit = options[:limit]\n @interval = options[:interval] || 5\n case options[:type]\n when :icmp\n @ping_handler = Net::Ping::External.new(@options[:host], @options[:port])\n when :udp\n @ping_handler = Net::Ping::UDP.new(@options[:host], @options[:port])\n when :tcp\n @ping_handler = Net::Ping::TCP.new(@options[:host], @options[:port])\n when :http, :https\n @ping_handler = Net::Ping::HTTP.new(@options[:url])\n when :ldap, :ldaps\n @ping_handler = Net::Ping::LDAP.new(@options[:url])\n end\n end", "def create(opts)\n opts = check_params(opts,[:search_base_dns,:servers])\n super(opts)\n end", "def create(options = {})\n response = request(:post, \"/network_zones.json\", :query => {:pack => options})\n end", "def initialize(options = {})\n @registry = options.fetch(:registry) { Restforce::DB::Registry }\n @interval = options.fetch(:interval) { DEFAULT_INTERVAL }\n @delay = options.fetch(:delay) { DEFAULT_DELAY }\n @verbose = options.fetch(:verbose) { false }\n @logger = options.fetch(:logger)\n @tracker = options.fetch(:tracker)\n @runner = options.fetch(:runner)\n @exit = options.fetch(:run_once)\n @history = options.fetch(:history)\n\n DB.reset\n DB.configure { |config| config.parse(options[:config]) }\n end", "def new(options) \n Client.get(\"/patterns/new\", :query => options)\n end", "def add(options = {})\n entry = Entry.new(\n ip_address: options[:ip_address],\n hostname: options[:hostname],\n aliases: options[:aliases],\n comment: options[:comment],\n priority: options[:priority],\n )\n\n @entries << entry\n remove_existing_hostnames(entry) if options[:unique]\n end", "def initialize(options={})\n @options = options\n @nsqd_tcp_addresses = s_to_a(options[:nsqd_tcp_addresses])\n @lookupd_tcp_addresses = s_to_a(options[:lookupd_tcp_addresses])\n @lookupd_poll_interval = options[:lookupd_poll_interval] || 120\n @long_id = options[:long_id] || Socket.gethostname\n @short_id = options[:short_id] || @long_id.split('.')[0]\n NSQ.logger = options[:logger] if options[:logger]\n NSQ.logger.level = options[:logger_level] if options[:logger_level]\n\n @selector = ::NIO::Selector.new\n @timer = Timer.new(@selector)\n @topic_count = Hash.new(0)\n @subscribers = {}\n @subscriber_mutex = Monitor.new\n @name = \"#{@long_id}:#{@short_id}\"\n\n raise 'Must pass either option :nsqd_tcp_addresses or :lookupd_http_addresses' if @nsqd_tcp_addresses.empty? && @lookupd_http_addresses.empty?\n\n @conns = {}\n @last_lookup = nil\n\n @logger.info(\"starting reader for topic '%s'...\" % self.topic) if @logger\n end", "def initialize(options = {})\n default_options = { :port => 6543,\n :status_port => 6544,\n :connection_type => :playback,\n :protocol_version => MythTV::DEFAULT_PROTOCOL_VERSION }\n \n options = default_options.merge(options)\n\n # Set up a local logging object\n @log = MythTV::Utils.setup_logging(options)\n \n # We cannot start unless we've been given a host to connect to\n raise ArgumentError, \"You must specify a :host key and value to initialize()\" unless options.has_key?(:host)\n\n @host = options[:host]\n @port = options[:port]\n @status_port = options[:status_port]\n @protocol_version = options[:protocol_version]\n\n @socket = TCPSocket.new(@host, @port)\n \n check_proto\n \n if options[:connection_type] == :playback\n announce_playback()\n elsif options[:connection_type] == :filetransfer\n announce_filetransfer(options[:filename])\n else\n raise ArgumentError, \"Unknown connection type '#{options[:connection_type]}'\"\n end\n end", "def start_server(options = {})\n\n # Backward compatibility\n if options.is_a? String\n url = options\n port = nil\n logfile = nil\n else\n url = options[:url]\n port = options[:port]\n logfile = options[:logfile]\n end\n\n url = ENV['TALKSHOW_REMOTE_URL'] if ENV['TALKSHOW_REMOTE_URL']\n port = ENV['TALKSHOW_PORT'] if ENV['TALKSHOW_PORT']\n logfile = ENV['TALKSHOW_LOG'] if ENV['TALKSHOW_LOG']\n\n Talkshow::Server.set_port port if port\n Talkshow::Server.set_logfile logfile if logfile\n \n if !url\n @type = :thread\n @question_queue = ::Queue.new\n @answer_queue = ::Queue.new\n @thread = Thread.new do\n Talkshow::Server.question_queue(@question_queue)\n Talkshow::Server.answer_queue(@answer_queue)\n Talkshow::Server.run!\n end\n else\n @type = :remote\n @question_queue = Talkshow::Queue.new(url)\n @answer_queue = Talkshow::Queue.new(url)\n end\n \n end", "def initialize( options )\n\t\tLoggability.format_as( :color ) if $stderr.tty?\n\t\t@options = options\n\n\t\tif @options.debug\n\t\t\t$DEBUG = true\n\t\t\t$VERBOSE = true\n\t\t\tLoggability.level = Logger::DEBUG\n\t\telsif @options.loglevel\n\t\t\tLoggability.level = @options.loglevel\n\t\tend\n\n\t\tMongrel2::Config.configure( :configdb => @options.config )\n\tend", "def initialize(options={})\n @options = options\n super(nil)\n @logdev = LogDevice.new(self)\n @logdev.run_socket_thread\n\n @formatter = proc do |severity, time, progname, msg|\n if msg.is_a?(Exception)\n \"#{severity}: #{msg.message} (#{msg.class})\\n\" + (msg.backtrace || []).join(\"\\n\")\n else\n \"#{severity}: #{msg}\"\n end\n end\n end", "def initialize(options)\n @observer_queue = 'rpm_worker_observer'\n @observer_class = 'AbfWorker::RpmWorkerObserver'\n super options\n @runner = DockerRpmWorker::Runners::Rpm.new(self, options)\n init_live_logger(\"abfworker::rpm-worker-#{@build_id}\")\n init_file_logger(ENV['HOME'] + '/script_output.log')\n initialize_live_inspector options['time_living']\n end", "def initialize(options = {})\n expect! options => { :adapter => String }\n\n adapter_url = options[:adapter]\n\n EM.next_tick do\n @adapter = RubPubSub::Adapter.create(adapter_url)\n end\n end", "def initialize(options, args)\n defaults = {\n interval: 1\n }\n @options = defaults.merge((options || {}).each { |k, v| { k => v } })\n return unless args.first\n\n resolve_addressing args.first\n normalise_options\n end", "def initialize(options = nil)\n @config = DEFAULTS.merge(options || { })\n \n merge_config_options\n assign_extracted_host_port\n end", "def create(options = {})\n raise ArgumentError.new(\"You must provide :cores and :ram\") if options[:ram].nil? or options[:cores].nil?\n raise ArgumentError.new(\":ram has to be at least 256MiB and a multiple of it\") if options[:ram].to_i < 256 or (options[:ram].to_i % 256) > 0\n raise ArgumentError.new(\":availability_zone has to be either 'AUTO', 'ZONE_1', or 'ZONE_2'\") if options[:availability_zone] and !['AUTO', 'ZONE_1', 'ZONE_2'].include? options[:availability_zone]\n raise ArgumentError.new(\":os_type has to be either 'WINDOWS' or 'OTHER'\") if options[:os_type] and !['WINDOWS', 'OTHER'].include? options[:os_type]\n options[:server_name] = options.delete :name if options[:name]\n response = Profitbricks.request :create_server, options\n self.find(:id => response[:server_id])\n end", "def initialize options\n # stats\n\n @stats = {}\n\n clear_stats\n\n if options[:host]\n port = options[:port]\n port = DEFAULT_PORT if port.nil?\n @socket = TCPSocket.new options[:host], port\n elsif options[:socket]\n @socket = options[:socket]\n end\n\n @number = 0\n\n set_options options\n end", "def create(options = {})\n options[:name] ||= SecureRandom.hex\n\n create_options = { p: port }\n create_options[:c] = options[:name] if options[:name]\n create_options[:d] = options[:dir] if options[:dir]\n exec(\"create\", create_options)\n\n options[:name]\n end", "def create\n begin\n enterMaintenanceMode\n rescue\n Puppet.err 'Could not find Host system.Either Host is not exist or disconnected'\n end\n end", "def initialize(options = {})\n find_xvfb\n\n @display = options.fetch(:display, 99).to_i\n @reuse_display = options.fetch(:reuse, true)\n @dimensions = options.fetch(:dimensions, '1280x1024x24')\n\n #TODO more logic here, autopicking the display number\n if @reuse_display\n launch_xvfb unless read_pid\n elsif read_pid\n raise Exception.new(\"Display :#{display} is already taken and reuse=false\")\n else\n launch_xvfb\n end\n\n raise Exception.new(\"Xvfb did not launch - something's wrong\") unless read_pid\n end", "def initialize(options={})\n @host = options[:host] or raise ArgumentError, \"#{self.class.name} requires :host\"\n @port = options[:port] or raise ArgumentError, \"#{self.class.name} requires :port\"\n @node_id = options[:node_id]\n @protocol = options[:protocol] || DEFAULT_PROTOCOL\n @logger = options[:logger] || Logger.new($stdout)\n @health = :good\n end", "def initialize(options = {})\n @port = 7\n @service_check = false\n @data_must_match = true\n @data = \"net-ping2\\n\"\n @timeout = 10\n @bind_host = nil\n @bind_port = nil\n super(options)\n end", "def create_server(options = {})\n begin\n server = connection.servers.create(options[:server_def])\n rescue Excon::Errors::BadRequest => e\n response = Chef::JSONCompat.from_json(e.response.body)\n if response['badRequest']['code'] == 400\n message = \"Bad request (400): #{response['badRequest']['message']}\"\n ui.fatal(message)\n else\n message = \"Unknown server error (#{response['badRequest']['code']}): #{response['badRequest']['message']}\"\n ui.fatal(message)\n end\n raise CloudExceptions::ServerCreateError, message\n end\n\n msg_pair(\"Instance Name\", server.name)\n msg_pair(\"Instance ID\", server.id)\n\n print \"\\n#{ui.color(\"Waiting for server [wait time = #{options[:server_create_timeout]}]\", :magenta)}\"\n\n # wait for it to be ready to do stuff\n server.wait_for(Integer(options[:server_create_timeout])) { print \".\"; ready? }\n\n puts(\"\\n\")\n server\n end", "def initialize options\n @count = options[:count] || Float::INFINITY\n @drb_config = DRb::DRbServer.make_config\n @incoming_packets = Queue.new\n @incomplete_streams = {}\n @incomplete_timestamps = {}\n @loader = DRbDump::Loader.new @drb_config\n @quiet = options[:quiet]\n @resolver = Resolv if options[:resolve_names]\n @run_as_directory = options[:run_as_directory]\n @run_as_user = options[:run_as_user]\n\n initialize_devices options[:devices]\n\n @capps = []\n @drb_streams = {}\n @running = false\n @statistics = DRbDump::Statistics.new\n end", "def initialize(options = {})\n # merge default options with options hash\n o = {\n :domain => 'http://www.dev.usys.org',\n :key => '',\n :user_agent => 'FsCommunicator/0.1 (Ruby)', # should be overridden by options user_agent\n :session => nil,\n :handle_throttling => false,\n :logger => nil,\n :timeout => nil\n }.merge(options)\n @domain = o[:domain]\n @key = o[:key]\n @user_agent = o[:user_agent]\n @session = o[:session]\n @handle_throttling = o[:handle_throttling]\n @logger = o[:logger]\n @timeout = o[:timeout]\n end", "def initialize( options={} )\n\t\toptions = DEFAULT_OPTIONS.merge( options )\n\n\t\t@host = options[:host]\n\t\t@port = options[:port]\n\t\t@connect_type = options[:connect_type]\n\t\t@results_class = options[:results_class]\n\n\t\t@conn = nil\n\t\t@bound_user = nil\n\n\n\t\t@object_conversions = DEFAULT_OBJECT_CONVERSIONS.dup\n\t\t@attribute_conversions = DEFAULT_ATTRIBUTE_CONVERSIONS.dup\n\t\t@registered_controls = []\n\n\t\t@base_dn = options[:base_dn] || self.get_default_base_dn\n\t\t@base = nil\n\n\t\t# Immediately bind if credentials are passed to the initializer.\n\t\tif ( options[:bind_dn] && options[:pass] )\n\t\t\tself.bind( options[:bind_dn], options[:pass] )\n\t\tend\n\tend", "def create(options)\n new(options).create\n end", "def initialize(watchers = [], options = {})\n super\n @standalone = options[:standalone].nil? ? true : options[:standalone]\n @port = options[:port] || 3000\n @env = options[:env] || 'development'\n ping_opt = unless options[:touch].nil?\n UI.info \"Warning: The :touch option has been replaced by the :ping option, usage is still the same.\"\n options[:touch]\n else\n options[:ping]\n end\n @ping = ping_opt.eql?(true) ? '/' : ping_opt\n end", "def use(host, options)\r\n @servers[host] = Server.new(self, host, options)\r\n end", "def create_host_only_network(options)\n end", "def initialize(options) # :yield: mesg\n @options = options\n @options[\"Host\"] = \"localhost\" unless @options.has_key?(\"Host\")\n @options[\"Port\"] = 23 unless @options.has_key?(\"Port\")\n @options[\"Prompt\"] = /[$%#>] \\z/n unless @options.has_key?(\"Prompt\")\n @options[\"Timeout\"] = 10 unless @options.has_key?(\"Timeout\")\n @options[\"Waittime\"] = 0 unless @options.has_key?(\"Waittime\")\n unless @options.has_key?(\"Binmode\")\n @options[\"Binmode\"] = false\n else\n unless (true == @options[\"Binmode\"] or false == @options[\"Binmode\"])\n raise ArgumentError, \"Binmode option must be true or false\"\n end\n end\n\n unless @options.has_key?(\"Telnetmode\")\n @options[\"Telnetmode\"] = true\n else\n unless (true == @options[\"Telnetmode\"] or false == @options[\"Telnetmode\"])\n raise ArgumentError, \"Telnetmode option must be true or false\"\n end\n end\n\n @telnet_option = { \"SGA\" => false, \"BINARY\" => false }\n\n if @options.has_key?(\"Output_log\")\n @log = File.open(@options[\"Output_log\"], 'a+')\n @log.sync = true\n @log.binmode\n end\n\n if @options.has_key?(\"Dump_log\")\n @dumplog = File.open(@options[\"Dump_log\"], 'a+')\n @dumplog.sync = true\n @dumplog.binmode\n def @dumplog.log_dump(dir, x) # :nodoc:\n len = x.length\n addr = 0\n offset = 0\n while 0 < len\n if len < 16\n line = x[offset, len]\n else\n line = x[offset, 16]\n end\n hexvals = line.unpack('H*')[0]\n hexvals += ' ' * (32 - hexvals.length)\n hexvals = format(\"%s %s %s %s \" * 4, *hexvals.unpack('a2' * 16))\n line = line.gsub(/[\\000-\\037\\177-\\377]/n, '.')\n printf \"%s 0x%5.5x: %s%s\\n\", dir, addr, hexvals, line\n addr += 16\n offset += 16\n len -= 16\n end\n print \"\\n\"\n end\n end\n\n if @options.has_key?(\"Proxy\")\n if @options[\"Proxy\"].kind_of?(Net::Telnet)\n @sock = @options[\"Proxy\"].sock\n elsif @options[\"Proxy\"].kind_of?(IO)\n @sock = @options[\"Proxy\"]\n else\n raise \"Error: Proxy must be an instance of Net::Telnet or IO.\"\n end\n else\n message = \"Trying \" + @options[\"Host\"] + \"...\\n\"\n yield(message) if block_given?\n @log.write(message) if @options.has_key?(\"Output_log\")\n @dumplog.log_dump('#', message) if @options.has_key?(\"Dump_log\")\n\n begin\n if @options[\"Timeout\"] == false\n @sock = TCPSocket.open(@options[\"Host\"], @options[\"Port\"])\n else\n Timeout.timeout(@options[\"Timeout\"], Net::OpenTimeout) do\n @sock = TCPSocket.open(@options[\"Host\"], @options[\"Port\"])\n end\n end\n rescue Net::OpenTimeout\n raise Net::OpenTimeout, \"timed out while opening a connection to the host\"\n rescue\n @log.write($ERROR_INFO.to_s + \"\\n\") if @options.has_key?(\"Output_log\")\n @dumplog.log_dump('#', $ERROR_INFO.to_s + \"\\n\") if @options.has_key?(\"Dump_log\")\n raise\n end\n @sock.sync = true\n @sock.binmode\n\n message = \"Connected to \" + @options[\"Host\"] + \".\\n\"\n yield(message) if block_given?\n @log.write(message) if @options.has_key?(\"Output_log\")\n @dumplog.log_dump('#', message) if @options.has_key?(\"Dump_log\")\n end\n\n end", "def initialize(host, options = T.unsafe(nil)); end", "def dash(options={})\n set RGhost::Dash.new(options)\n end", "def daemon=(options = {})\n if options.respond_to?(:keys)\n daemon.bind = options[:bind] if options[:bind]\n daemon.port = options[:port] if options[:port]\n daemon.log_file = options[:log_file] if options[:log_file]\n daemon.timer = options[:timer] if options[:timer]\n end\n end", "def initialize(info = {})\n super\n register_options(\n [\n Opt::RPORT(123),\n ], self.class)\n\n register_advanced_options(\n [\n OptInt.new('VERSION', [true, 'Use this NTP version', 2]),\n OptInt.new('IMPLEMENTATION', [true, 'Use this NTP mode 7 implementation', 3])\n ], self.class)\n end", "def initialize( options )\n @notification = options.fetch(:notification)\n @output = options.fetch(:output) { STDOUT }\n end", "def initialize\n @hostname = Socket.gethostname()\n @dns_port = 53\n @ttl = 7200\n @priority = 1\n @weight = 5\n @resolver = nil\n @ipv4 = nil\n @ipv6 = nil\n @sleep_time = 60\n @max_dns_response_time=10\n @zone = \"\"\n @transport = :udp\n end", "def create(name, type)\n configure [\"aaa group server #{type} #{name}\", 'exit']\n end", "def initialize(options = nil)\n super() # MonitorMixin\n options ||= {}\n @logger = options[:logger]\n # Initializes each instance variable of the same name as option\n # keyword.\n default_options.each_pair { |key, default|\n instance_variable_set(\"@#{key}\", options.fetch(key, default))\n }\n end", "def create_network_switch(options)\n load_path = Pathname.new(File.expand_path(\"../scripts\", __FILE__))\n script_path = load_path.join('create_switch.ps1')\n execute(script_path, options)\n end", "def create_monitor(_mon_name, mon_params)\n res = @dog.monitor(mon_params['type'], mon_params['query'], mon_params)\n if res[0] == '200'\n logger.info \"\\tMonitor #{res[1]['id']} created successfully\"\n return res[1]['id']\n else\n logger.error \"\\tError creating monitor: #{res}\"\n end\n end", "def create(options={})\n unless options[:ostemplate]\n # We need at least a valid ostemplate\n raise ArgumentError, \"Create requires argument :ostemplate.\"\n end\n\n cmd = \"#{@vzctl} create #{@ctid}\"\n\n options.each do |opt,val|\n cmd << \" --#{opt}\"\n cmd << \" #{val}\"\n end\n\n execute(cmd)\n\n Log.debug(\"Reading new container configuration file: #{@configfile}\")\n @config = Config.new(load_config_file)\n @config.add_observer(self)\n end", "def initialize(hosts, options)\n @hosts = hosts\n @options = options\n @logger = options[:logger]\n @perf_timestamp = Time.now\n @hosts.map { |h| setup_perf_on_host(h) }\n end", "def initialize(options)\n @options =\n if options.kind_of?(String)\n Hash[ [:host, :port].zip(options.split(\":\")) ]\n else\n options.dup\n end.tap { |o| o[:port] = o[:port].to_i }\n\n @pool = Pool.new(\n host: @options[:host],\n port: @options[:port],\n size: @options.fetch(:pool_size, 8),\n ttl: 60\n )\n\n assert_valid_pool unless @options[:verify] == false\n end", "def create_server opts\n Server.create opts.merge :account => self\n end", "def initialize(options={})\n @timers = []\n @settings = options[:settings]\n @logger = options[:logger]\n @redis = options[:redis]\n @options = @settings.to_hash.fetch(:tessen, {})\n end", "def initialize(options, monitoring, cluster)\n @options = options\n @monitoring = monitoring\n @cluster = cluster\n end", "def create_host_only_network(options)\n # Create the interface\n execute_prlsrvctl('net', 'add', options[:network_id], '--type', 'host-only')\n\n # Configure it\n args = ['--ip', \"#{options[:adapter_ip]}/#{options[:netmask]}\"]\n if options[:dhcp]\n args.concat(['--dhcp-ip', options[:dhcp][:ip],\n '--ip-scope-start', options[:dhcp][:lower],\n '--ip-scope-end', options[:dhcp][:upper]])\n end\n\n execute_prlsrvctl('net', 'set', options[:network_id], *args)\n\n # Return the details\n {\n name: options[:network_id],\n ip: options[:adapter_ip],\n netmask: options[:netmask],\n dhcp: options[:dhcp]\n }\n end", "def initialize(info = {})\n super\n\n register_options(\n [\n Opt::LHOST,\n Opt::LPORT(4444)\n ], Msf::Handler::ReverseUdp)\n\n # XXX: Not supported by all modules\n register_advanced_options(\n [\n OptAddress.new('ReverseListenerBindAddress', [ false, 'The specific IP address to bind to on the local system']),\n OptInt.new('ReverseListenerBindPort', [ false, 'The port to bind to on the local system if different from LPORT' ]),\n OptString.new('ReverseListenerComm', [ false, 'The specific communication channel to use for this listener']),\n OptBool.new('ReverseListenerThreaded', [ true, 'Handle every connection in a new thread (experimental)', false])\n ] +\n Msf::Opt::stager_retry_options,\n Msf::Handler::ReverseUdp)\n\n self.conn_threads = []\n end", "def initialize(options = {})\n super\n @options = Options.with_defaults(options) \n @notifier = Notifier.new(@options) \n end", "def initialize options = {}\n options[:DoNotListen] = true\n sockets_key = options.delete(:LaunchdSockets) || 'WEBrickSockets'\n\n super\n\n launch_checkin\n\n servers = launch_sockets sockets_key, TCPServer\n\n listeners.replace servers\n\n trap 'TERM' do shutdown end\n end", "def initialize(options = {}, &block)\n options = options.dup\n @server = options.delete(:server)\n @udp_flags = options.delete(:udp_flags) || 0\n raise(ArgumentError, 'Missing mandatory argument: :server') unless @server\n\n super(options, &block)\n reopen\n end", "def create_server(options = {})\n begin\n add_custom_attributes(options[:server_def])\n server = connection.servers.create(options[:server_def])\n rescue Excon::Error::BadRequest => e\n response = Chef::JSONCompat.from_json(e.response.body)\n if response[\"badRequest\"][\"code\"] == 400\n message = \"Bad request (400): #{response[\"badRequest\"][\"message\"]}\"\n ui.fatal(message)\n else\n message = \"Unknown server error (#{response[\"badRequest\"][\"code\"]}): #{response[\"badRequest\"][\"message\"]}\"\n ui.fatal(message)\n end\n raise CloudExceptions::ServerCreateError, message\n rescue Fog::Errors::Error => e\n raise CloudExceptions::ServerCreateError, e.message\n end\n\n print \"\\n#{ui.color(\"Waiting for server [wait time = #{options[:server_create_timeout]}]\", :magenta)}\"\n\n # wait for it to be ready to do stuff\n server.wait_for(Integer(options[:server_create_timeout])) { print \".\"; ready? }\n\n puts(\"\\n\")\n server\n end", "def setup(options)\n @nick = options['nick'] if options['nick']\n @server = options['server'] if options['server']\n @port = options['port'].to_i if options['port']\n connect\n end", "def create\n Puppet.debug \"starting create #{self.class.to_s}\"\n dns_service = get_dns_service(get_fqdn)\n dns_service.create_record(get_fqdn, get_type, get_ip) if dns_service != nil\n Puppet.debug \"done with create #{self.class.to_s}\"\n end", "def create_db(options)\n info \"Created database '#{options[:name]}'\"\n end", "def initialize(info = {})\n super(update_info(info,\n 'Name' => 'Backdoor Sniffer',\n 'Description' => %q{\n This module will attempt to sniff out well known backdoors hidden in popular directories and setup/backup files \n that should not be public facing. The list of directories and page names are taken from various repositories of\n backdoors.\n },\n 'Author' =>\n [\n 'h4cklife',\n ],\n 'License' => MSF_LICENSE\n )\n )\n\n deregister_options('RHOST', 'DigestAuthIIS', 'NTLM::SendLM',\n 'NTLM::SendNTLM', 'VHOST', 'RPORT', 'NTLM::SendSPN', 'NTLM::UseLMKey',\n 'NTLM::UseNTLM2_session', 'NTLM::UseNTLMv2')\n\n register_options(\n [\n\t\t\t\tOptString.new('DOMAIN', [true, 'Target domain']),\n OptString.new('OUTFILE', [false, 'A filename to store the results']),\n\t\t\t\tOptBool.new('DATABASE', [false, 'Add search results to thea loot database', false]),\n\t\t\t\tOptBool.new('SSL', [false, 'Use SSL', false]),\n\t\t\t\tOptBool.new('VERBOSE', [false, 'Verbose mode', false]),\n ], self.class)\n\n end", "def create_new_record(options = {})\n @mongo_record = LogMessage.new(options.merge({\n :messages => [],\n :time => Time.now.getutc,\n :progname => resolve_application_name,\n :combined => true\n }))\n end", "def initialize( options={} )\n end", "def initialize(info = {})\n\t\tsuper\n\n\t\t# Register our options, overriding the RHOST/RPORT from TCP\n\t\tregister_options(\n\t\t\t[\n\t\t\t\tOpt::RPORT(25),\n\t\t\t\tOptString.new('MAILFROM', [ true, 'The FROM address of the e-mail', '[email protected]']),\n\t\t\t\tOptString.new('MAILTO', [ true, 'The TO address of the e-mail']),\n\t\t\t\tOptString.new('VERBOSE', [ false, 'Display verbose information']),\n\t\t\t], Msf::Exploit::Remote::SMTPDeliver)\n\t\tregister_autofilter_ports([ 25, 465, 587, 2525, 25025, 25000])\n\t\tregister_autofilter_services(%W{ smtp smtps})\n\tend", "def initialize(options={})\n @config = DEFAULTS.merge! options\n @config[:console_id] = CONSOLE_MAP[@config[:console]] if CONSOLE_MAP.has_key? @config[:console]\n\n setup_connection\n setup_resources\n end", "def find_or_create_host(opts)\n host = get_host(opts.clone)\n return host unless host.nil?\n\n report_host(opts)\n end", "def initialize(info = {})\n\t\tsuper\n\t\tregister_options(\n\t\t\t[\n\t\t\t\tOpt::LPORT(4444),\n\t\t\t\t#OptAddress.new('RHOST', [false, 'The target address', '']),\n\t\t\t], Msf::Handler::BeEFBind)\n\tend", "def initialize(options={})\n @server_list = ServerList.new\n @groups = Hash.new { |h,k| h[k] = ServerList.new }\n @gateway = nil\n @open_groups = []\n @connect_threads = []\n @on_error = :fail\n @default_user = ENV['USER'] || ENV['USERNAME'] || \"unknown\"\n\n @open_connections = 0\n @pending_sessions = []\n @session_mutex = Mutex.new\n\n options.each { |opt, value| send(\"#{opt}=\", value) }\n end", "def initialize(aOptions)\n\t\t@options = aOptions\n\t\t@thread = nil\n\t\t@name = aOptions[:name] || random_word(8,8)\n\t\tif not @logger = aOptions[:logger]\n\t\t\t@logger = Logger.new(STDERR)\n\t\t\[email protected] = Logger::DEBUG\n\t\tend\n\t\tself.start() if aOptions[:auto_start]\n\tend", "def initialize(host, options={})\n @host = host\n @options = DEFAULT_CONFIGURATION.merge(options)\n end", "def create(opts)\n opts = check_params(opts,[:servers])\n super(opts)\n end", "def vps_create(options)\n #puts \"#{@beanstalk.last_conn.addr}: create #{options['code']}\\n\"\n puts options.inspect\n vps = Antir::VPS.new\n\n code = options['code']\n vps.id = code\n vps.name = code\n vps.ip = \"10.10.1.#{code}\"\n puts vps.inspect\n vps.create\n\n #@report.send_string(\"created #{options['code']}\")\n end", "def new\n @host = Host.new\n end", "def initialize(options)\n @options = options\n @subscribers = []\n @discovered_nodes = []\n end", "def initialize(options = {})\n @daemonize = options[:daemonize].nil? ? true : options[:daemonize]\n @pid_file = options[:pid_file].nil? ? nil : File.absolute_path(options[:pid_file])\n end", "def create(name)\n configure [\"interface #{name}\", 'no ip address', 'switchport']\n end", "def initialize(options={})\n @options = {:host => nil,\n :vendor => \"Dell\",\n :model => nil,\n :version => \"2c\",\n :community => \"public\"\n }\n @options.merge!(options) if options.is_a?(Hash)\n @debug = false\n end", "def initialize(info = {})\n\t\tsuper\n\n\t\tregister_options(\n\t\t\t[\n\t\t\t\tOpt::LPORT(4444),\n\t\t\t\tOptAddress.new('RHOST', [false, 'The target address', '']),\n\t\t\t], Msf::Handler::BindTcp)\n\n\t\tself.conn_threads = []\n\t\tself.listener_threads = []\n\t\tself.listener_pairs = {}\n\tend", "def initialize(options)\n Resque.redis = \"#{options[:server]}:#{options[:port]}\"\n end", "def create(options = {})\n nil\n end", "def create(options)\n API::request(:post, 'background_checks', options)\n end", "def initialize(info = {})\n super\n\n # Register the options that all SNMP exploits may make use of.\n register_options(\n [\n Opt::RHOST,\n Opt::RPORT(161),\n OptString.new('COMMUNITY', [ true, 'SNMP Community String', 'public' ]),\n OptString.new('VERSION', [ true, 'SNMP Version <1/2c>', '1' ]),\n OptInt.new('TIMEOUT', [ true, 'SNMP Timeout', 1 ]),\n OptInt.new('RETRIES', [ true, 'SNMP Retries', 1 ])\n ], Msf::Exploit::Remote::SNMPClient)\n end", "def initialize(options = {})\n @options = options\n @group = options.delete(:group)\n @watchers = options.delete(:watchers) { [] }\n @callbacks = options.delete(:callbacks) { [] }\n _register_callbacks\n end", "def initialize(options = {})\n @seq = 0\n @bind_port = 0\n @bind_host = nil\n @data_size = 56\n\n @ping_id = (Thread.current.object_id ^ Process.pid) & 0xffff\n\n super(options)\n end", "def initialize(options); end", "def initialize(options)\n @options = options\n @verbose = options[:Verbose]\n @boxes = options[:Boxes]\n Net::IMAP.debug = options[:Debug]\n end", "def sparkRegisterMonitorCmdParser(className)\n logNormal($sparkPrefix + \" sparkRegisterMonitorCmdParser \" + className + \"\\n\")\n sparkGetMonitorServer()\n sparkCreate(className, $serverPath+'monitor/SparkMonitor/'+className)\nend", "def create_datagram_socket(options=nil)\n if !block_given? && options == nil\n return ::Vertx::Util::Utils.safe_create(@j_del.java_method(:createDatagramSocket, []).call(),::Vertx::DatagramSocket)\n elsif options.class == Hash && !block_given?\n return ::Vertx::Util::Utils.safe_create(@j_del.java_method(:createDatagramSocket, [Java::IoVertxCoreDatagram::DatagramSocketOptions.java_class]).call(Java::IoVertxCoreDatagram::DatagramSocketOptions.new(::Vertx::Util::Utils.to_json_object(options))),::Vertx::DatagramSocket)\n end\n raise ArgumentError, \"Invalid arguments when calling create_datagram_socket(options)\"\n end", "def initialize logger, options\n @logger = logger\n @options = options\n raise AgentError, \"no configuration specified.\" unless @options\n @locker = Monitor.new\n @threads = nil\n end", "def create_gdom_disk(options)\n client_disk = options['q_struct']['gdom_disk'].value\n disk_size = options['q_struct']['gdom_size'].value\n disk_size = disk_size.downcase\n vds_disk = options['name']+\"_vdisk0\"\n if not client_disk.match(/\\/dev/)\n if not File.exist?(client_disk)\n message = \"Information:\\tCreating guest domain disk \"+client_disk+\" for client \"+options['name']\n command = \"mkfile -n #{disk_size} #{client_disk}\"\n output = execute_command(options,message,command)\n end\n end\n message = \"Information:\\tChecking Virtual Disk Server device doesn't already exist\"\n command = \"ldm list-services |grep 'primary-vds0' |grep '#{vds_disk}'\"\n output = execute_command(options,message,command)\n if not output.match(/#{options['name']}/)\n message = \"Information:\\tAdding disk device to Virtual Disk Server\"\n command = \"ldm add-vdsdev #{client_disk} #{vds_disk}@primary-vds0\"\n output = execute_command(options,message,command)\n end\n return\nend", "def initialize(options={})\n @logger = options.delete(:logger) || self.class.default_logger\n @options = options\n options.host = '127.0.0.1' if options.host.nil? || options.host == 'localhost'\n options.port ||= 9312\n\n # If you use localhost, MySQL insists on a socket connection, but Sphinx\n # requires a TCP connection. Using 127.0.0.1 fixes that.\n # so does passing in the socket file e.g. socket:'/usr/local/sphinx/var/run/sphinx.sock'\n # nb: sphinx.conf listen definition must specify mysql41 as the protocol, e.g.,\n # listen = localhost:9312:mysql41\n\n super( { symbolize_keys: true, database_timezone: :utc, application_timezone: :local }.merge( options ) )\n end", "def initialize( options )\n\t\tTreequel.logger.formatter = Treequel::ColorLogFormatter.new( Treequel.logger )\n\n\t\tif options.debug\n\t\t\t$DEBUG = true\n\t\t\t$VERBOSE = true\n\t\t\tTreequel.logger.level = Logger::DEBUG\n\t\telsif options.loglevel\n\t\t\tTreequel.logger.level = Treequel::LOG_LEVELS[ options.loglevel ]\n\t\tend\n\n\t\t@options = options\n\t\tif @options.all?\n\t\t\t@options[:attrtypes] =\n\t\t\t\t@options[:objectclasses] =\n\t\t\t\t@options[:syntaxes] =\n\t\t\t\t@options[:matching_rules] =\n\t\t\t\t@options[:matching_rule_uses] =\n\t\t\t\ttrue\n\t\tend\n\n\t\t@directory = Treequel.directory( options.ldapurl )\n\t\t@prompt = HighLine.new\n\n\t\[email protected]_at = @prompt.output_cols - 10\n\n\t\tself.log.debug \"Created new treewhat command object for %s\" % [ @directory ]\n\tend", "def connect_to_gdom_console(options)\n check_cdom_vntsd()\n check_gdom_exists(options)\n check_gdom_is_running(options)\n vcc_port = get_gdom_console_port(options)\n vcc_port = vcc_port.chomp\n handle_output(options,\"\") \n handle_output(options,\"To connect to console of Guest Domain #{options['name']} type the following command: \")\n handle_output(options,\"\") \n handle_output(options,\"telnet localhost #{vcc_port}\")\n handle_output(options,\"\") \n return\nend", "def create(_options)\n raise NotImplementedError\n end", "def create_nic(options)\n Nic.create(options.merge(:server_id => self.id))\n end", "def create(name)\n configure([\"interface #{name}\", 'no switchport'])\n end", "def create\n name, type = resource[:name].split('/')\n rdata = resource[:rdata]\n ttl = resource[:ttl]\n case type\n when 'MX'\n Array(rdata).each_with_index do |exchange, index|\n preference = Array(resource[:preference])[index]\n nsupdate(\"server #{server}\n update add #{name} #{ttl} MX #{preference} #{exchange}\n send\")\n end\n when 'SRV'\n Array(rdata).each_with_index do |target, index|\n port = Array(resource[:port])[index]\n weight = Array(resource[:weight])[index]\n priority = Array(resource[:priority])[index]\n nsupdate(\"server #{server}\n update add #{name} #{ttl} SRV #{priority} #{weight} #{port} #{target}\n send\")\n end\n else\n nsupdate(\"server #{server}\n update add #{name} #{ttl} #{type} #{Array(rdata).first}\n send\")\n end\n end" ]
[ "0.6064956", "0.60221183", "0.5793687", "0.55477023", "0.5499154", "0.54941326", "0.5460354", "0.5424016", "0.5369096", "0.5354829", "0.5324311", "0.5321214", "0.53147143", "0.5308993", "0.53043556", "0.52905476", "0.5269478", "0.52592635", "0.52573234", "0.5248966", "0.5223063", "0.5221611", "0.5220858", "0.5213802", "0.52058554", "0.5205364", "0.51932114", "0.5191507", "0.51735294", "0.5165734", "0.5153798", "0.5149431", "0.51354903", "0.5095465", "0.509328", "0.50929666", "0.50927573", "0.5091255", "0.507079", "0.506457", "0.5061077", "0.5060642", "0.5054271", "0.50507104", "0.5044791", "0.50392056", "0.50353813", "0.5035171", "0.5032984", "0.5023865", "0.50207424", "0.502039", "0.5013796", "0.5010311", "0.50102305", "0.50017893", "0.50010335", "0.49913245", "0.49908116", "0.4977871", "0.49768302", "0.49623886", "0.49580303", "0.4956458", "0.49557495", "0.4949359", "0.49483094", "0.49438676", "0.49438375", "0.4938587", "0.49380857", "0.49379385", "0.4936633", "0.4934832", "0.49204355", "0.48957562", "0.48938686", "0.4881119", "0.4870183", "0.4869755", "0.4869641", "0.48664793", "0.48630217", "0.48618576", "0.48613864", "0.48562896", "0.48512077", "0.4841142", "0.48410386", "0.48211", "0.48091358", "0.48068908", "0.48035598", "0.4787475", "0.47865716", "0.47855008", "0.47849363", "0.47834885", "0.47831285", "0.478176", "0.47812665" ]
0.0
-1
Return a clone of this object with its timeout set to +new_timeout+.
def with_timeout( new_timeout ) copy = self.clone copy.timeout = new_timeout return copy end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def without_timeout\n\t\treturn self.clone( :timeout => 0 )\n\tend", "def timeout=(new_timeout)\n if new_timeout && new_timeout.to_f < 0\n raise ArgumentError, \"Timeout must be a positive number\"\n end\n @timeout = new_timeout.to_f\n end", "def connect_timeout=(new_timeout)\n if new_timeout && new_timeout.to_f < 0\n raise ArgumentError, \"Timeout must be a positive number\"\n end\n\n @connect_timeout = new_timeout.to_f\n end", "def timeout( seconds=nil )\n\t\tif seconds\n\t\t\treturn self.clone( :timeout => seconds )\n\t\telse\n\t\t\treturn @options[:timeout]\n\t\tend\n\tend", "def idle_timeout( new_timeout=nil )\n\t\t\t@idle_timeout = new_timeout if new_timeout\n\t\t\treturn @idle_timeout\n\t\tend", "def with_max_timeout(max_timeout)\n @max_timeout = max_timeout\n self\n end", "def timeout=(timeout)\n @timeout = timeout\n end", "def set_timeout(timeout=nil)\n if timeout.class == Fixnum && !block_given?\n @j_del.java_method(:setTimeout, [Java::long.java_class]).call(timeout)\n return self\n end\n raise ArgumentError, \"Invalid arguments when calling set_timeout(timeout)\"\n end", "def timeout(value)\n fresh.tap do |criteria|\n criteria.timeout_value = value\n end\n end", "def timeout_raw=(timeout)\n @serial_timer.timeout = timeout\n end", "def timeout=(timeout)\n @timeout = timeout.to_f/1000 * 60\n end", "def timeout\n super\n end", "def timeout\n super\n end", "def _clone\n self.class.new(self)\n end", "def clone_with( new_settings, &block )\n\t\tnewobj = self.dup\n\t\tnewobj.merge_settings( new_settings )\n\n\t\tif block\n\t\t\treturn newobj.call( &block )\n\t\telse\n\t\t\treturn newobj\n\t\tend\n\tend", "def timeout\n options[:timeout] || super\n end", "def clone\n self\n end", "def clone_with( new_settings, &block )\n\t\tnewobj = self.dup\n\t\tnewobj.settings.merge!( new_settings )\n\n\t\tif block\n\t\t\treturn newobj.call( &block )\n\t\telse\n\t\t\treturn newobj\n\t\tend\n\tend", "def clone\n newobj = super\n newobj.instance_eval do\n __getobj__.each_pair do |k, v|\n __getobj__[k] = v.clone\n end\n end\n newobj\n end", "def clone()\n if @handle.ptr == nil\n raise \"this is disposed\"\n end\n result = TimeSpan.new(Native.TimeSpan_clone(@handle.ptr))\n result\n end", "def set_timeout timeout\r\n command 'setTimeout', timeout\r\n end", "def set_timeout timeout\r\n command 'setTimeout', timeout\r\n end", "def initialize(timeout, options)\n @timeout = timeout\n @options = options\n end", "def timeout=(value)\n @timeout = value\n end", "def extended_timeout\n puts \"Please wait, this could take a few minutes ...\"\n old_timeout = ::DTK::Client::Conn.get_timeout()\n ::DTK::Client::Conn.set_timeout(EXTENDED_TIMEOUT)\n result = yield\n ::DTK::Client::Conn.set_timeout(old_timeout)\n result\n end", "def limit( new_limit=nil )\n\t\tif new_limit.nil?\n\t\t\treturn self.options[:limit]\n\t\telse\n\t\t\tself.log.debug \"cloning %p with new limit: %p\" % [ self, new_limit ]\n\t\t\treturn self.clone( :limit => Integer(new_limit) )\n\t\tend\n\tend", "def timeout=(value)\n @transfer[:timeout] = value\n end", "def with_timeout(time, &block)\n Timeout.timeout(time) do\n block.call(self)\n end\n end", "def clone(newRole)\n return ZKPeer.new(@id, @address, newRole, @dataPort, @electionPort, @clientPort)\n end", "def timeout\n @timeout\n end", "def clone\n self.class.new(raw.clone)\n end", "def clone\n self\n end", "def dup\n self.class.new(options)\n end", "def clone\n self.copy\n end", "def clone\n self.copy\n end", "def clone\n self.class.new(@parent, @position)\n end", "def clone\n @clone ||= super\n end", "def with(new_options)\n self.class.new(name, options.merge(new_options))\n end", "def timeout=(timeout)\n @stop_time = timeout.nil? ? nil : current_time + timeout\n end", "def clone\n self.class.new(**as_json)\n end", "def clone\n dup\n end", "def clone() end", "def timeout_after(time); end", "def open_timeout= open_timeout\n @agent.open_timeout = open_timeout\n end", "def timeout_in\n 15.minutes\n end", "def clone\n super\n end", "def _timeout_in\n 1.minute\n end", "def dup\n self.class.new nil, @opts, &@block\n end", "def timeout=(timeout)\n @timeout = timeout\n @socket_timeout = nil\n\n if @timeout.to_f > 0\n secs = @timeout.floor\n usecs = (@timeout - secs).floor * 1_000_000\n @socket_timeout = [secs, usecs].pack(\"l_2\")\n end\n end", "def timeout!(*args, &block)\n Timeout::timeout(*args, &block)\n end", "def set_timeout(timeoutMs=nil)\n if timeoutMs.class == Fixnum && !block_given?\n @j_del.java_method(:setTimeout, [Java::long.java_class]).call(timeoutMs)\n return self\n end\n raise ArgumentError, \"Invalid arguments when calling set_timeout(timeoutMs)\"\n end", "def clone\n copy(false)\n end", "def clone(parent_element)\n self.class.new(parent_element, name, options.clone)\n end", "def set_lock_timeout(timeout)\n @lock_timeout = timeout\n end", "def default_timeout\n self.class.mocked_default_timeout\n end", "def timeout\n @timeout || Elected.timeout\n end", "def new_query(timeframe)\n klass.new(namespace, bucket, options.merge(timeframe: timeframe))\n end", "def default_timeout\n 900\n end", "def connect_timeout=(val)\n @j_del.setConnectTimeout(val)\n self\n end", "def clone\n end", "def read_timeout= read_timeout\n @agent.read_timeout = read_timeout\n end", "def timeout\n return @timeout\n end", "def new_item\n pause(item_delay) if item_delay\n self[:index] = nil\n self[:value] = nil\n end", "def clone\n super\n end", "def clone; end", "def clone; end", "def clone; end", "def with(new_options)\n self.class.new(identifier, options.merge(new_options))\n end", "def open_timeout=(open_timeout); end", "def timeout(*args, &blk)\n (@client ||= connect).timeout(*args, &blk)\n end", "def new(identifier, new_options = EMPTY_HASH)\n self.class.new(identifier, options.merge(new_options))\n end", "def timeout_at; end", "def initialize(timeout = DEFAULT_TIMEOUT)\n timeout = timeout.to_int if timeout.respond_to?(:to_int)\n \n raise ArgumentError.new(\"Timeout must be > 0\") unless timeout > 0\n \n @timeout = timeout\n \n # we can use this for look-ups in O(1), instead of only find-min in O(1)\n @store = {}\n end", "def timeout_seconds\n return 1200\n end", "def setconnectiontimeout(timeout)\r\n setvalue(@@CONNECTION_TIMEOUT, timeout)\r\n end", "def initialize(timeout=600)\n @holder = NameTimerHolder.new(timeout)\n end", "def initialize( timeout: Arborist::Monitor::Webservice.default_timeout )\n\t\t\tself.timeout = timeout\n\t\tend", "def evict_timeout(timeout)\n @evict_timeout = timeout.to_i\n self\n end", "def lock_timeout(duration:)\n add option: \"-lock-timeout=#{duration}\"\n end", "def clone\n q = super\n q.attrs = self.attrs.clone\n return q\n end", "def tcp_timeout\n super\n end", "def default_timeout\n 60\n end", "def timeout(val = nil)\n if val\n @j_del.setTimeout(val)\n self\n else\n @j_del.getTimeout\n end\n end", "def idle_timeout=(timeout); end", "def timeout=(value)\n @connect_timeout, @send_timeout, @receive_timeout = value, value, value\n end", "def initialize(timeout_period)\n self.timeout_period = timeout_period\n self.terminated = false\n self.last_ping = Time.now\n end", "def clone(*) end", "def clone(*) end", "def timeout\n @dups += 1\n send_dat(:resend => true)\n # FIXME - need to give up eventually!\n end", "def clone\n @clone = true\n end", "def add_timeout(opts = {})\n @timeout = opts[:timeout] || @timeout\n opts.delete(:timeout)\n end", "def with_ties\n clone(:limit_with_ties=>true)\n end", "def clone( options={} )\n\t\tself.log.debug \"cloning %p with options = %p\" % [ self, options ]\n\t\tnewset = super()\n\t\tnewset.options = @options.merge( options )\n\t\treturn newset\n\tend", "def clone\n Marshal.load(Marshal.dump(self))\n end", "def clone\n Marshal.load(Marshal.dump(self))\n end", "def clone\n Marshal.load(Marshal.dump(self))\n end", "def clone\n Marshal.load(Marshal.dump(self))\n end", "def clone\n Marshal.load(Marshal.dump(self))\n end", "def clone\n duplicate = self.class.new(source_end.client, target_end.client)\n duplicate.adopt(self)\n\n duplicate\n end" ]
[ "0.64084935", "0.6082062", "0.5882083", "0.5701635", "0.5602263", "0.55298173", "0.546274", "0.54486436", "0.5408024", "0.5324225", "0.5262407", "0.5258866", "0.5258866", "0.5250542", "0.5217657", "0.51823545", "0.5137367", "0.5131997", "0.50728095", "0.50595796", "0.50453144", "0.50453144", "0.50303656", "0.49424744", "0.49401", "0.4922987", "0.4900801", "0.4896432", "0.48880202", "0.48875895", "0.48752233", "0.4866619", "0.4861099", "0.48472422", "0.48472422", "0.48429325", "0.48326632", "0.48003134", "0.4792765", "0.4791209", "0.47878787", "0.47854894", "0.47800282", "0.4776273", "0.47703192", "0.47654292", "0.47456014", "0.47393942", "0.47289124", "0.4727805", "0.47121686", "0.47007513", "0.4698666", "0.46886885", "0.46839693", "0.46785536", "0.46759415", "0.46741697", "0.46605414", "0.46597096", "0.46500623", "0.4648962", "0.46383983", "0.4629981", "0.46268958", "0.46268958", "0.46268958", "0.46220604", "0.46182573", "0.4616798", "0.46136025", "0.46088937", "0.460102", "0.4597107", "0.4595637", "0.45927575", "0.45887387", "0.45884427", "0.4584527", "0.45832902", "0.45788306", "0.4576543", "0.45714405", "0.45655638", "0.45637718", "0.45632422", "0.4561646", "0.4561646", "0.4554058", "0.45527014", "0.45509112", "0.4547772", "0.45474228", "0.45468968", "0.45468968", "0.45468968", "0.45468968", "0.45468968", "0.45459715" ]
0.86934096
1
Run the domain check for each of the specified Hash of +nodes+ and return a Hash of updates for them based on their DNS domain record's status.
def run( nodes ) self.log.debug "Got nodes to check with %p: %p" % [ self, nodes ] records = nodes.each_with_object( {} ) do |(identifier, node), hash| self.log.debug "Looking up whois info for %p (%p)" % [ identifier, node ] hash[ identifier ] = self.client.lookup( node['name'] ) end return records.each_with_object( {} ) do |(identifier, record), hash| parser = record.parser hash[ identifier ] = self.parse_record( parser, identifier ) end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run( nodes )\n\t\t\tself.log.debug \"Got %d nodes to check with %p\" % [ nodes.length, self ]\n\t\t\tlookups = self.create_lookups( nodes )\n\t\t\treturn self.wait_for_responses( lookups, nodes )\n\t\tend", "def find_applying_nodes(hosts, statuses = [])\n Log.debug(\"checking applying status of #{hosts.inspect}\")\n @client.filter[\"identity\"].clear\n hosts.each do |host|\n @client.identity_filter(host)\n end\n\n results = @client.status\n\n hosts.each do |host|\n result = results.select { |r| r[:sender] == host }.first\n status = statuses.select { |s| s[:name] == host }.first\n\n unless status\n status = make_status(host)\n statuses << status\n end\n\n if result\n # check the value of applying as defined in the agent ddl\n if result[:data][:applying] == true\n # we're applying\n if result[:data][:initiated_at]\n # it's a new agent, we can record when it started\n Log.debug(\"#{host} run was started at #{result[:data][:initiated_at]}\")\n status[:initiated_at] = result[:data][:initiated_at]\n else\n Log.debug(\"#{host} run started\")\n end\n else\n # Here we check the \"asked to run but not yet started\" state.\n if result[:data][:lastrun].to_i >= status[:initiated_at]\n Log.debug(\"#{host} run completed\")\n # The node has finished applying, remove from the running set\n statuses.reject! { |s| s[:name] == host }\n next\n else\n # We haven't started yet that we can see, increment the check counter\n status[:checks] += 1\n Log.debug(\"#{host} starting, checks #{status[:checks]}\")\n end\n end\n else\n # We didn't get a result from this host, log and record a check happened\n log(\"Host #{host} did not respond to the status action.\")\n status[:no_response] += 1\n end\n\n if status[:no_response] >= 5\n # If we missed many responses to status, assume it's a dead node\n log(\"Host #{host} failed to respond multiple times. Skipping.\")\n statuses.reject! { |s| s[:name] == host }\n end\n\n if status[:checks] >= 5\n # If we hit more than 5 checks, assume it couldn't start\n log(\"Host #{host} did not move into an applying state. Skipping.\")\n statuses.reject! { |s| s[:name] == host }\n end\n end\n\n return statuses\n end", "def wait_for_responses( lookups, nodes )\n\t\t\tupdate = {}\n\n\t\t\tuntil lookups.empty?\n\n\t\t\t\tlookups.keys.each do |thr|\n\t\t\t\t\tnext if thr.alive?\n\n\t\t\t\t\tidentifier = lookups.delete( thr )\n\t\t\t\t\tbegin\n\t\t\t\t\t\trecords = thr.value\n\n\t\t\t\t\t\tif !records\n\t\t\t\t\t\t\tupdate[ identifier ] = { error: \"Lookup failed (timeout).\" }\n\t\t\t\t\t\telsif records.empty?\n\t\t\t\t\t\t\tupdate[ identifier ] = { error: \"Lookup failed (no records returned).\" }\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\tnode_data = nodes[ identifier ]\n\t\t\t\t\t\t\tupdate[ identifier ] = self.compare_values( records, node_data )\n\t\t\t\t\t\tend\n\t\t\t\t\trescue SystemCallError => err\n\t\t\t\t\t\tmsg = \"%p: %s\" % [ err.class, err.message ]\n\t\t\t\t\t\tself.log.error \"%s while looking up %s\" % [ msg, identifier ]\n\t\t\t\t\t\tupdate[ identifier ] = { error: msg }\n\t\t\t\t\tend\n\t\t\t\tend\n\n\t\t\tend\n\n\t\t\treturn update\n\t\tend", "def check\n begin\n domains = File.read(@params[:domains_path]).split\n rescue Errno::ENOENT \n STDERR.puts \"File #{@params[:domains_path]} does not exist\"\n exit 1 \n end\n\n checks = domains.map do |domain|\n rdap = Net::HTTP.get(URI(\"#{@params[:rdap_url]}/domain/#{domain}\"))\n db.check domain, rdap \n end\n\n message = checks.map {|check| check.status}.to_json\n STDOUT.puts message\n\n if @params[:gchat] \n # GChat gets every status update\n GChat.new(@params[:gchat]).message(message) \n end\n if @params[:mandrill_key] && @params[:mandrill_email] && checks.any?(&:changed?)\n # We only email changed domains\n Mandrill.new(@params[:mandrill_key], @params[:mandrill_email]).message(message)\n end\n end", "def dns_check\n gen_host_records # These are the hosts we have\n load_all_subnets # These are the DNS entries\n \n # We want a standard layout, with the hypervisor API entries being \n @host_record.each do |hr| # Array of host record Hash's\n hn = hr[:hostname]\n shn = hn.split('.',2)[0] # Remove the domain\n forward_hr = @forward_host_record[hn] # Find Host Record\n if forward_hr.nil?\n # We have no IPAM entry for this hostname\n if (rhr = @reverse_host_records[hr[:ip]])\n puts \"Only Reverse IPAM entry for #{shn}: #{rhr}\"\n @infoblox.create_host_record(ip_address: hr[:ip], hostname: hn, aliases: hr[:aliases])\n else\n puts \"No IPAM entry for hostrecord: #{hr}\"\n @infoblox.create_host_record(ip_address: hr[:ip], hostname: hn, aliases: hr[:aliases])\n end\n else\n # We have an IPAM record for this hostname\n if forward_hr[:ip] != hr[:ip]\n puts \"IP mismatch #{shn} #{hr[:ip]} != #{forward_hr[:ip]} for IPAM: #{forward_hr}\"\n elsif forward_hr[:hostname] != hn\n # Reference must be via ALIASES or CNAMES\n if forward_hr[:aliases].include?(shn)\n puts \"Hostname #{shn} is an ALIAS. IPAM: #{forward_hr}\"\n elsif forward_hr[:cnames].include?(hn)\n puts \"Hostname #{shn} is a CNAME. IPAM: #{forward_hr}\"\n end\n end\n end\n end\n \n # We want to find IPAM entries, not matching existing @host_record entries\n @reverse_host_records.each do |ip, ahr| # Hash to array of host records from IPAM, indexed by IP\n ahr.each do |hr| # One IP can have multiple host records, with associated ALIAS and CNAME records\n local_hr = @host_record_index[hr[:hostname]]\n if local_hr.nil?\n puts \"No local entry #{hr[:hostname]} for #{hr}\"\n end\n end\n end\nend", "def run( nodes )\n\t\t\tresults = {}\n\t\t\thydra = Typhoeus::Hydra.new( self.runner_settings )\n\n\t\t\tnodes.each do |identifier, node|\n\t\t\t\tself.log.debug \"Making request for node %s\" % [ identifier ]\n\t\t\t\trequest = self.request_for_node( node )\n\t\t\t\trequest.on_complete do |response|\n\t\t\t\t\tself.log.debug \"Handling response for %s\" % [ identifier ]\n\t\t\t\t\tresults[ identifier ] =\n\t\t\t\t\t\tself.make_response_results( response, node )\n\t\t\t\tend\n\t\t\t\thydra.queue( request )\n\t\t\tend\n\n\t\t\thydra.run\n\n\t\t\treturn results\n\t\tend", "def transform_hosts(hosts)\n require 'time'\n\n node_data = []\n\n hosts.each do |host|\n if host[:report_timestamp].nil?\n # This can happen in weird cases. Mark as an expired node, so\n # the expired logic doesn't try to do math on a nil timestamp.\n last_checkin = nil\n formatted_checkin = 'N/A'\n host[:expired] = nil\n else\n last_checkin = Time.now - Time.parse(host[:report_timestamp])\n formatted_checkin = sprintf(\"%#{@options.round_to}f\",(last_checkin * @options.divisor).abs)\n end\n node_data << {\n :last_checkin => last_checkin,\n :expired => host[:expired].nil? ? false : host[:expired],\n :certname => host[:certname],\n :environment => host[:report_environment].nil? ? 'N/A' : host[:report_environment],\n :status => host[:latest_report_status].nil? ? 'N/A' : host[:latest_report_status],\n :formatted_checkin => formatted_checkin\n }\n end\n\n unless @options.environments.empty?\n node_data.delete_if {|node| not @options.environments.include? node[:environment] }\n end\n unless @options.statuses.empty?\n node_data.delete_if {|node| not @options.statuses.include? node[:status] }\n end\n\n node_data\n end", "def parse_dns(nodeList)\n find_if_exists = 0\n domain = Hash.new{|hsh,key| hsh[key] = []}\n address = Hash.new{|hsh,key| hsh[key] = []}\n cname = Hash.new{|hsh,key| hsh[key] = []}\n\n nodeArr = []\n # To remove null values for '.split' method to work\n nodeList.each do |node|\n if node == ''|| node.empty? || node == \"\\n\"\n next\n end\n nodeArr.push(node.strip.split(','))\n end\n # Creating the key value Hash\n nodeArr.each do |(type,domain,source)|\n if type == \"CNAME\"\n cname[domain.strip.to_sym].push(source.strip)\n elsif type == \"A\"\n address[domain.strip.to_sym].push(source.strip)\n end\n end\n # Adding CNAME hash and ADDRESS hash into domain hash\n domain[:CNAME].push(cname)\n domain[:ADDRESS].push(address)\n return domain\nend", "def update_from_host_store!\n\t\tputs \"Invoke internal procedures to update the sub-domain list from the host store.\"\n\t\t# Step 1 - obtain the latest sub-domains\n\t\tmy_tracker = Wmap::HostTracker.instance\n\t\tmy_tracker.data_dir = @data_dir\n\t\tmy_tracker.hosts_file = my_tracker.data_dir + \"/\" + \"hosts\"\n\t\tmy_tracker.load_known_hosts_from_file(my_tracker.hosts_file)\n\t\tsubs = my_tracker.dump_sub_domains - [nil,\"\"]\n\t\tmy_tracker = nil\n\t\t# Step 2 - update the sub-domain list\n\t\tunless subs.empty?\n\t\t\t#subs.map { |x| self.add(x) unless domain_known?(x) }\n\t\t\tself.bulk_add(subs,@max_parallel)\n\t\tend\n\t\tputs \"Update discovered sub-domains into the store: #{@known_internet_sub_domains}\"\n\t\tself.save!(file_domains=@file_sub_domains, domains=@known_internet_sub_domains)\n\trescue Exception => ee\n\t\tputs \"Exception on method #{__method__}: #{ee}\" if @verbose\n\t\treturn nil\n\tend", "def compare_domain(args)\r\n server_list = args[:server_list]\r\n domain = args[:domain_name]\r\n rtype = args[:rtype]\r\n rdata = args[:actual_rdata]\r\n rdata = (rtype == \"NAPTR\") ? rdata : rdata.downcase\r\n r = \"\"\r\n failed_rlist = []\r\n @timeout = 30\r\n sleep 15 if args[:sleepfirst]\r\n server_list.each do |server|\r\n dig_pass = \"succeed to dig @#{server} #{domain} #{rtype} => #{rdata}\"\r\n dig = `dig @#{server} #{domain} #{rtype}`\r\n if dig.include?(rdata)\r\n puts dig_pass\r\n else\r\n puts \"dig @#{server} #{domain} #{rtype} failed as expected!\" if args[:expected_dig_fail]\r\n return \"succeed\" if args[:expected_dig_fail]\r\n begin\r\n Timeout::timeout(@timeout){\r\n while !dig.include?(rdata)\r\n sleep 5\r\n dig_retry = `dig @#{server} #{domain} #{rtype}`\r\n puts dig_pass if dig_retry.include?(rdata)\r\n break if dig_retry.include?(rdata)\r\n end\r\n }\r\n rescue Timeout::Error\r\n puts \"Error => dig @#{server} #{domain} #{rtype} timed out!\"\r\n failed_rlist << \"failed\"\r\n end\r\n end\r\n end\r\n failed_rlist.empty? ? 'succeed' : 'failed'\r\n end", "def update_dns()\n #\n # Handle each host in the config file at a time\n #\n @config['hosts'].each {|h|\n #\n # Skip update if current public IP matches the IP for the host in the cache file\n #\n if @cache[h['host']] && @myip.eql?(@cache[h['host']]['ip'])\n @logger.info \"Skipping #{h['host']} - Already pointing to #{@myip}\"\n else\n url = \"https://domains.google.com/nic/update?hostname=#{h['host']}&myip=#{@myip}\"\n @logger.info \"Updating host [#{h['host']}] - #{url}\"\n\n #\n # Access Google Domains API to update IP\n #\n open(url,\n :http_basic_authentication => [h['username'],h['password']],\n \"User-Agent\" => \"#{@options[:user_agent]}\") {|r|\n if r.status[0] == \"200\"\n r.each_line {|line|\n if (/(?<sts>(good|nochg))\\s+(?<ip>(\\d+\\.\\d+\\.\\d+\\.\\d+)?)/ =~ line)\n #\n # Cache if API call was successful\n #\n @cache[h['host']] = {'ip' => ip}\n @logger.debug \"[#{@responses[sts][0]}][#{sts}] : [#{@responses[sts][1]}]\"\n else\n @logger.warn \"[#{@responses[line][0]}][#{line}] : [#{@responses[line][1]}]\"\n end\n }\n else\n @logger.error \"Error status returned #{r.status.inspect}\"\n end\n }\n write_cache_file\n end\n }\n end", "def check_domains(sld_or_domains, tlds=nil)\n query_args = {'Command' => 'Check'}\n if sld_or_domains.is_a? Array\n query_args['DomainList'] = sld_or_domains.join(',')\n else\n query_args['SLD'] = sld_or_domains\n end\n unless tlds == nil\n if tlds.is_a? Array\n query_args['TLDList'] = tlds.join(',')\n else\n query_args['TLD'] = tlds\n end\n end\n query_push(query_args)\n get_response\n # Set up a hash of domains\n checked_domains = {}\n if query_args['DomainList']\n sld_or_domains.each {|domain| checked_domains[domain] = nil}\n elsif query_args['TLDList']\n tlds.each {|tld| checked_domains[sld_or_domains + '.' + tld] = nil}\n else\n checked_domains[sld_or_domains + '.' + tlds] = nil\n end\n # RRPCodes: 210 = available, 211 = not available\n if checked_domains.length > 1\n # If we have multiple domains, run a loop to fill in results\n x = 1\n @result['DomainCount'].to_i.times do\n domain = @result['Domain' + x.to_s]\n if @result['RRPCode' + x.to_s].to_i == 210\n checked_domains[domain] = true\n elsif @result['RRPCode' + x.to_s].to_i == 211\n checked_domains[domain] = false\n end\n x += 1\n end\n else\n if @result['RRPCode'].to_i == 210\n checked_domains[sld_or_domains + '.' + tlds] = true\n elsif @result['RRPCode'].to_i == 211\n checked_domains[sld_or_domains + '.' + tlds] = false\n end\n end\n puts checked_domains.to_yaml\n end", "def gather_pollable_domains\n @logger.info 'CsyncJob Generate: Gathering current domain(s) data'\n Nameserver.select(:hostname_puny, :domain_id).all.each do |ns|\n %i[secure insecure].each do |i|\n @input_store[i][ns.hostname_puny] = [] unless @input_store[i].key? ns.hostname_puny\n end\n\n append_domains_to_list(ns)\n end\n end", "def update_node_info_cache()\n @all_stats = []\n @nodes.each { |node|\n ip = node.private_ip\n acc = AppControllerClient.new(ip, @@secret)\n\n begin\n @all_stats << acc.get_stats()\n rescue FailedNodeException\n Djinn.log_warn(\"Failed to get status update from node at #{ip}, so \" +\n \"not adding it to our cached info.\")\n end\n }\n end", "def domain_check(*domains)\n domains.flatten!\n response = send_request(domain_check_xml(*domains))\n\n get_result(:xml => response, :callback => :domain_check_process)\n end", "def refresh_ip_sites\n\t\tputs \"Refresh all entries that contain an IP address instead of a FQDN ... \"\n\t\tsites=get_ip_sites\n\t\tlive_sites=sites.delete_if { |x| @known_sites[x]['code'] == 10000 or @known_sites[x]['code'] == 20000 }\n\t\tchanges=Hash.new\n\t\tchanges=bulk_refresh(live_sites)\n\t\t@known_sites.merge!(changes)\n\t\tputs \"Done refresh IP sites.\"\n\t\treturn changes\n\trescue => ee\n\t\tputs \"Exception on method #{__method__}: #{ee}\" if @verbose\n\tend", "def refresh_all\n\t\tputs \"Refresh all the entries in the local host repository in one shot.\"\n\t\tchanges=Hash.new\n\t\thosts=@known_hosts.keys\n\t\t@known_hosts=Hash.new\n\t\tchanges=bulk_add(hosts)\n\t\t@known_hosts.merge!(changes)\n\t\t#@known_hosts.keys.map do |key|\n\t\t#\tunless is_ip?(key)\n\t\t#\t\thost=refresh(key)\n\t\t#\t\tchanges.push(host) unless host.nil?\n\t\t#\tend\n\t\t#end\n\t\tputs \"\\n#{changes.size} Entries Refreshed:\" if changes.size>0\n\t\t#changes.map { |x| puts x }\n\t\tputs \"Done refreshing the local hosts.\"\n\t\treturn changes\n\trescue => ee\n\t\tputs \"Exception on method #{__method__}: #{ee}\"\n\tend", "def ddns_update(ip)\n ddns_update_he(HEuser, HEpw, HEtid, ip)\n {\n \"canishe.com\" => [ \"mail\", \"www\", \"@\", \"*\" ],\n \"gaelan.me\" => [ \"www\", \"@\" ],\n \"psd-chinese.net\" => [ \"www\", \"@\" ],\n }.each do |zone, hosts|\n hosts.each do |host|\n ddns_update_iwmn(host, zone, ip, IWMNuser, IWMNpw)\n end\n end\n\t{\n \"marimbaboise.com\" => [ \"www\", \"@\" ],\n }.each do |zone, hosts|\n hosts.each do |host|\n ddns_update_enom(host, zone, ip, BRpw)\n end\n end\nend", "def check_hostnames\n all_good = true\n \n @check_groups.each do |group|\n group.checks.each do |check|\n unless check.hostname && Dnsruby::Resolv.getaddress(check.hostname)\n puts \"Error: check #{check.name} has invalid hostname '#{check.hostname}'\"\n all_good = false\n end\n end\n end\n \n all_good\n end", "def domains_slaves_sync(domains, masterip)\n existing_domains = domains_slaves_list\n\n # Add any missing domains\n responses = []\n domains.each do |domain|\n unless existing_domains.include?(domain)\n response = domains_slaves_add(domain, masterip)\n response[:domain] ||= domain\n responses << response\n end\n end\n\n # Now check the existing domains\n existing_domains.keys.sort.each do |domain|\n if domains.include?(domain)\n # Update the masterip if it isn't correct\n if existing_domains[domain] != masterip\n response = domains_slaves_updatemasterip(domain, masterip)\n response[:domain] ||= domain\n responses << response\n end\n else\n # Delete domain; not on list\n response = domains_slaves_delete(domain)\n response[:domain] ||= domain\n responses << response\n end\n end\n\n responses\n end", "def create_lookups( nodes )\n\t\t\treturn nodes.each_with_object( {} ) do |(identifier, node), hash|\n\t\t\t\tself.log.debug \"Creating lookup for node: %p\" % [ node ]\n\t\t\t\tname = node['name'] or next\n\t\t\t\trecord_type = node['record_type'] || 'A'\n\t\t\t\trecord_class = Resolv::DNS::Resource::IN.const_get( record_type ) or\n\t\t\t\t\traise \"Unsupported record type %p!\" % [ record_type ]\n\n\t\t\t\tself.log.debug \"Looking up %s record for %s (%s)\" % [ record_type, name, identifier ]\n\t\t\t\tthr = Thread.new do\n\t\t\t\t\tself.resolver.getresources( name, record_class )\n\t\t\t\tend\n\t\t\t\thash[ thr ] = identifier\n\t\t\tend\n\t\tend", "def domain_update(domain, fields)\n unless ([ :admin, :tech, :billing, :nservers ] - fields.keys).empty?\n raise ArgumentError, \"Required fields not found\"\n end\n query :domain_modify, {\n domain: domain,\n admin_c: fields[:admin],\n tech_c: fields[:tech],\n billing_c: fields[:billing],\n ns_list: fields[:nservers].join(':')\n }\n end", "def hostnames(nodes)\n @referenced_nodes ||= ObjectList.new\n nodes = listify(nodes)\n nodes.each_node do |node|\n @referenced_nodes[node.name] ||= node\n end\n return nodes.values.collect {|node| node.domain.name}\n end", "def get_host_keys(_nodes, metadata)\n updated_metadata = {}\n # Get the list of nodes, per hostname (just in case several nodes share the same hostname)\n # Hash<String, Array<String> >\n hostnames = Hash.new { |hash, key| hash[key] = [] }\n metadata.each do |node, node_metadata|\n if node_metadata[:host_ip]\n hostnames[node_metadata[:host_ip]] << node\n elsif node_metadata[:hostname]\n hostnames[node_metadata[:hostname]] << node\n end\n end\n unless hostnames.empty?\n host_keys_for(*hostnames.keys).each do |hostname, ip|\n hostnames[hostname].each do |node|\n updated_metadata[node] = ip\n end\n end\n end\n updated_metadata\n end", "def process_domains\n domains.each do |domain|\n params = options\n params[:host] = configuration.host\n params[:server] = servers[domain][\"server\"]\n compiler = YMDP::Compiler::Base.new(domain, git_hash, params)\n \n compiler.process_all\n end\n end", "def updates\n @report = {}\n Server.find_each do |server|\n # Go through each package. In some cases (gems) there may be multiple\n # versions of a package on the machine.\n packages = {}\n server.servers_to_packages.find_each do |package_map|\n next unless package_map.status == 'pending'\n package = Package.find(package_map.package_id)\n\n new = {}\n new['provider'] = package.provider\n new['version'] = package.version\n packages[package.name] = [] unless packages.key?(package.name)\n packages[package.name] << new\n end\n @report[server.hostname] = packages unless packages.empty?\n end\n end", "def get_and_update_node_state!(node, attribute_names)\n ret = {}\n external_ref = node[:external_ref]\n external_ref_changed = false\n attribute_names.each do |attribute_name|\n case attribute_name\n when :host_addresses_ipv4\n external_ref_changed = true if NodeState.update_host_addresses_ipv4!(ret, external_ref, node) \n when :fqdn\n external_ref_changed = true if NodeState.update_fqdn!(ret, external_ref, node) \n else\n Log.error(\"Not treating update of BOSH node attribute '#{attribute_name}'\")\n end\n end\n node.update(external_ref: external_ref) if external_ref_changed\n ret \n end", "def fetch_nodes(nodes, dns_cache)\n ret = []\n nodes.each_with_index do |item, index|\n ip, port = item\n host = dns_cache.fetch(ip) {\n |missing_ip|\n host = Resolv.getname(missing_ip)\n dns_cache[ip] = host\n host\n }\n name = \"#{host}:#{port}\"\n role = index == 0 ? 'master' : 'slave'\n node = {\n :host => host, :port => port,\n :name => name, :ip => ip,\n :role => role\n }\n ret << node\n end\n ret\n end", "def health_check\n ret = {}\n unready = []\n NodeObject.all.each do |node|\n unready << node.name unless node.ready?\n end\n ret[:nodes_not_ready] = unready unless unready.empty?\n failed = Proposal.all.select { |p| p.active? && p.failed? }\n ret[:failed_proposals] = failed.map(&:display_name) unless failed.empty?\n ret\n end", "def apply(nodes)\n node_manager.assert_known(nodes)\n for node in nodes\n node_manager.find(node).apply\n end\n end", "def compare_ns_records( records, hosts )\n\t\t\trecord_hosts = Set.new( records.map(&:name) )\n\t\t\thosts = Set.new( hosts.map {|name| Resolv::DNS::Name.create(name + '.')} )\n\n\t\t\tself.log.debug \"Comparing %p to %p\" % [ record_hosts, hosts ]\n\n\t\t\tstatus = nil\n\t\t\tif ( record_hosts ^ hosts ).empty?\n\t\t\t\tstatus = { ns_record: record_hosts.map(&:to_s) }\n\t\t\telsif !( subset = record_hosts - hosts ).empty?\n\t\t\t\tstatus = { error: \"missing NS records: %s\" % [subset.map(&:to_s).join(', ')] }\n\t\t\telsif !( subset = hosts - record_hosts ).empty?\n\t\t\t\tstatus = { error: \"extra NS records: %s\" % [subset.map(&:to_s).join(', ')] }\n\t\t\tend\n\n\t\t\treturn status\n\t\tend", "def query_files_hosts(hostlist, hosts)\n report_dir = get_report_dir\n\n existing_nodes = hostlist.map{|x| x[:certname]}\n\n local_host_template = {\n :deactivated=>false,\n :latest_report_hash=>nil,\n :facts_environment=>nil,\n :cached_catalog_status=>\"not_used\",\n :report_environment=>nil,\n :latest_report_corrective_change=>nil,\n :catalog_environment=>nil,\n :facts_timestamp=>nil,\n :latest_report_noop=>nil,\n :expired=>false,\n :latest_report_noop_pending=>nil,\n :report_timestamp=>nil,\n :certname=>nil,\n :catalog_timestamp=>nil,\n :latest_report_job_id=>nil,\n :latest_report_status=>nil\n }.freeze\n\n local_host_reports = []\n\n if File.directory?(report_dir)\n @logger.debug(\"Processing Report Directory: #{report_dir}\")\n\n Dir.glob(\"#{report_dir}/*\").each do |node_dir|\n @logger.debug(\"Processing Node Directory: #{node_dir}\")\n\n latest_report = Dir.glob(\"#{node_dir}/*.yaml\").sort.last\n if latest_report\n @logger.debug(\"Processing YAML Report: #{latest_report}\")\n\n begin\n require 'puppet'\n\n transaction_report = YAML.load_file(latest_report)\n\n unless (hosts.empty? || hosts.include?(transaction_report.host))\n @logger.debug(\"Skipping #{transaction_report.host} since it is not in the host list\")\n next\n end\n\n if existing_nodes.include?(transaction_report.host)\n @logger.debug(\"Skipping #{transaction_report.host} since it already exists\")\n next\n end\n\n local_host_data = Marshal.load(Marshal.dump(local_host_template))\n local_host_data[:latest_report_hash] = transaction_report.catalog_uuid\n local_host_data[:facts_environment] = transaction_report.environment\n local_host_data[:report_environment] = transaction_report.environment\n local_host_data[:latest_report_corrective_change] = transaction_report.corrective_change\n local_host_data[:catalog_environment] = transaction_report.environment\n local_host_data[:facts_timestamp] = transaction_report.time.to_s\n local_host_data[:latest_report_noop] = transaction_report.noop\n local_host_data[:latest_report_noop_pending] = transaction_report.noop_pending\n local_host_data[:report_timestamp] = transaction_report.time.to_s\n local_host_data[:certname] = transaction_report.host\n local_host_data[:catalog_timestamp] = transaction_report.time.to_s\n local_host_data[:latest_report_job_id] = transaction_report.catalog_uuid\n local_host_data[:latest_report_status] = transaction_report.status\n\n hostlist << local_host_data\n\n @logger.debug(\"Processed Host Report: #{local_host_data}\")\n rescue => e\n @logger.warn \"Error processing report at '#{latest_report}': #{e}\"\n end\n else\n @logger.debug \"Could not find latest report in '#{node_dir}'\"\n end\n end\n else\n @logger.debug \"Could not find report directory at '#{report_dir}'\"\n end\n end", "def resolve_ip_sites\n\t\tputs \"Resolve sites that contain an IP address. Update the site cache table once a hostname is found in the local host table.\" if @verbose\n\t\tupdates=Array.new\n\t\tsites=get_ip_sites\n\t\thost_tracker=Wmap::HostTracker.instance\n\t\thost_tracker.data_dir=@data_dir\n\t\thost_tracker.hosts_file = host_tracker.data_dir + \"/\" + \"hosts\"\n\t\thost_tracker.load_known_hosts_from_file\n\t\tsites.map do |site|\n\t\t\tputs \"Work on resolve the IP site: #{site}\" if @verbose\n\t\t\tip=url_2_host(site)\n\t\t\thostname=host_tracker.local_ip_2_host(ip)\n\t\t\tif hostname.nil?\n\t\t\t\tputs \"Can't resolve #{ip} from the local host store. Skip #{site}\" if @verbose\n\t\t\telse\n\t\t\t\tputs \"Host-name found for IP #{ip}: #{hostname}\" if @verbose\n\t\t\t\tupdates.push(site)\n\t\t\t\trefresh(site)\n\t\t\tend\n\t\tend\n\t\tupdates.sort!\n\t\tputs \"The following sites are now refreshed: #{updates}\" if @verbose\n\t\thost_tracker=nil\n\t\treturn updates\n\trescue Exception => ee\n\t\tputs \"Exception on method #{__method__}: #{ee}\" if @verbose\n\tend", "def verify_prior_nodes_live(node)\n if node.workorder.has_key?(\"rfcCi\")\n ci = node.workorder.rfcCi\n actionName = node.workorder.rfcCi.rfcAction\n else\n ci = node.workorder.ci\n actionName = node.workorder.actionName\n end\n \n timeout_sec = node['solr_api_timeout_sec'].to_i\n \n #get the map with all cloud's id & deployment order in the form |key,value| => |cloudId, deployment_order|\n #ex {34951930=>\"7\", 35709237=>\"8\", 34951924=>\"4\"}\n cloudIdsWithDpmtOrderMap = cloud_deployment_order(node)\n Chef::Log.info(\"Cloud id & deployment order map : #{cloudIdsWithDpmtOrderMap.to_json}\")\n \n #get array of solrcloud nodes for the action selected\n #get list of all solrcloud nodes which are selected for this action in the deployment\n nodesInAction = get_solrcloud_instances_by_action(node, actionName)\n \n thisNodeCiName = ci[:ciName]\n Chef::Log.info(\"This solrcloud node's ciName : #{thisNodeCiName}\")\n \n #get the node cloud id \"solrcloud-34951924-1\" => \"34951924\"\n thisCloudId = thisNodeCiName.split('-')[1]\n \n #get the deployment order of this node's cloud\n thisNodeCloudDpmtOrder = cloudIdsWithDpmtOrderMap.fetch(thisCloudId.to_i)\n Chef::Log.info(\"This node's cloud deployment order : #{thisNodeCloudDpmtOrder}\")\n \n #get all cloud ids having deployment order <= node_cloud_deployment_order. This is required to make sure that all prior cloud deployment was completed\n #ex From all clouds {34951930=>\"7\", 35709237=>\"8\", 34951924=>\"4\"} for node_cloud_deployment_order = 7 => {34951930=>\"7\", 34951924=>\"4\"}\n #same node_cloud_id is inclusive because there may be multiple nodes in node's cloud.\n #This list is used to make sure that all nodes across this cloud list are deployed first\n cloudIdsTobeDeployedPrior = []\n cloudIdsWithDpmtOrderMap.each do |k, v|\n if v.to_i <= thisNodeCloudDpmtOrder.to_i\n cloudIdsTobeDeployedPrior.push k\n end\n end\n Chef::Log.info(\"Cloud ids to be deployed before : #{cloudIdsTobeDeployedPrior.to_json}\")\n \n #get solrcloud nodes for cloud ids to be deployed prior\n nodeIndexesTobeDeployedPrior = []\n nodesInAction.each do |n|\n ciName = n['ciName']\n cloudId = ciName.split('-')[1]\n index = ciName.split('-', 2)[1]\n if cloudIdsTobeDeployedPrior.include? cloudId.to_i\n # prefx the cloud deployment order to determine the order of solr instace in the deployment\n # User might select the lower cloudId with higher deployment order and vice-versa so deployment order will be useful\n nodeIndexesTobeDeployedPrior.push cloudIdsWithDpmtOrderMap[cloudId.to_i]+\"-\"+index\n end\n end\n \n #sort solrcloud_nodes_tobe_deployed_prior by ciName (cloudId & compute index)\n nodeIndexesTobeDeployedPrior.sort! {|a, b| b <=> a}\n #default sorting is in descending order, we want to process the deployment in the ascending order of compute number\n #so reverse the order\n nodeIndexesTobeDeployedPrior.reverse!\n Chef::Log.info(\"solrclouds to executed before #{nodeIndexesTobeDeployedPrior.to_json}\")\n \n computeCloudIdIpMap = get_compute_number_to_ip_map(node)\n Chef::Log.info(\"compute number to ip map : #{computeCloudIdIpMap.to_json}\")\n # prefx the cloud deployment order to determine the order of solr instace in the deployment\n # User might select the lower cloudId with higher deployment order and vice-versa so deployment order will be useful\n thisNodeIndex = thisNodeCloudDpmtOrder+\"-\"+thisNodeCiName.split('-',2)[1]\n Chef::Log.info(\"This node index : #{thisNodeIndex}\")\n \n # select only the nodes with lower index & this node index\n nodeIndexesTobeDeployedPrior = nodeIndexesTobeDeployedPrior.select {|cloudIdIndex| cloudIdIndex <= thisNodeIndex}\n \n index = nodeIndexesTobeDeployedPrior.index {|id| id == thisNodeIndex}\n Chef::Log.info(\"index = #{index}\")\n \n wait_time = index * 10;\n Chef::Log.info(\"Allowing #{wait_time} seconds for prior nodes to start the deployment before\")\n sleep wait_time\n \n nodeIpsTobeDeployedPrior = []\n nodeIndexesTobeDeployedPrior.each do |nodeIndex|\n if !nodeIndex.eql? thisNodeIndex\n nodeIndexWithoutDpmtOrder = nodeIndex.split('-',2)[1]\n Chef::Log.info(\"nodeIndexWithoutDpmtOrder = #{nodeIndexWithoutDpmtOrder}\")\n nodeIpsTobeDeployedPrior.push computeCloudIdIpMap[nodeIndexWithoutDpmtOrder]\n end\n end\n \n # No need to check for other nodes to confirm those are live before processing this node as there are no prior nodes in the list\n if nodeIpsTobeDeployedPrior.empty?\n return\n end\n \n Chef::Log.info(\"nodeIpsTobeDeployedPrior = #{nodeIpsTobeDeployedPrior.to_json}\")\n host = nodeIpsTobeDeployedPrior[0]\n cluster_state = get_cluster_state(host, node['port_no'], timeout_sec)\n nodes_up_status = nodes_live?(host, node['port_no'], nodeIpsTobeDeployedPrior, timeout_sec)\n Chef::Log.info(\"Node live status : #{nodes_up_status}\")\n if !nodes_up_status \n error = \"Some of the prior nodes from list #{nodeIpsTobeDeployedPrior.to_json} in the deployment are not live.\"\n puts \"***FAULT:FATAL=#{error}\"\n raise error\n end\n end", "def parse_dns(dns_raw)\n dns = []\n dns_records = {}\n record_type_A = []\n record_type_A_IP = []\n record_type_CNAME = []\n record_type_CNAME_alias = []\n\n #adds each line to dns array and splipt them with \",\"\n dns_raw.each do |lines_in_files|\n dns.push([lines_in_files.split(\",\")])\n end\n\n #Checks for recordA,IP or recordCNAME and adds them to the respected array\n dns.each do |words_in_files|\n if words_in_files[0][0] == \"A\"\n record_type_A.push(words_in_files[0][1].strip)\n record_type_A_IP.push(words_in_files[0][2].strip)\n elsif words_in_files[0][0] == \"CNAME\"\n record_type_CNAME.push(words_in_files[0][1].strip)\n record_type_CNAME_alias.push(words_in_files[0][2].strip)\n end\n end\n\n #record_A hash stores values of recordA\n record_A = {\n :source => record_type_A,\n :ip => record_type_A_IP,\n }\n\n #recordCNAME hash stores values of recordCNAME\n record_CNAME = {\n :source => record_type_CNAME,\n :alias => record_type_CNAME_alias,\n }\n\n #dns_records gets both Hashes\n dns_records = {\n :A => record_A,\n :CNAME => record_CNAME,\n }\n\n #returns record dns_record with two hashes.\n return dns_records\nend", "def cert_needs_updating?(node)\n if !file_exists?([:node_x509_cert, node.name], [:node_x509_key, node.name])\n return true\n else\n cert = load_certificate_file([:node_x509_cert, node.name])\n if cert.not_after < Time.now.advance(:months => 2)\n log :updating, \"cert for node '#{node.name}' because it will expire soon\"\n return true\n end\n if cert.subject.common_name != node.domain.full\n log :updating, \"cert for node '#{node.name}' because domain.full has changed (was #{cert.subject.common_name}, now #{node.domain.full})\"\n return true\n end\n cert.openssl_body.extensions.each do |ext|\n if ext.oid == \"subjectAltName\"\n ips = []\n dns_names = []\n ext.value.split(\",\").each do |value|\n value.strip!\n ips << $1 if value =~ /^IP Address:(.*)$/\n dns_names << $1 if value =~ /^DNS:(.*)$/\n end\n dns_names.sort!\n if ips.first != node.ip_address\n log :updating, \"cert for node '#{node.name}' because ip_address has changed (from #{ips.first} to #{node.ip_address})\"\n return true\n elsif dns_names != dns_names_for_node(node)\n log :updating, \"cert for node '#{node.name}' because domain name aliases have changed\\n from: #{dns_names.inspect}\\n to: #{dns_names_for_node(node).inspect})\"\n return true\n end\n end\n end\n end\n return false\n end", "def run\n super\n res = []\n entity_name = _get_entity_name\n entity_type = _get_entity_type_string\n\n # skip cdns\n if !get_cdn_domains.select{ |x| entity_name =~ /#{x}/}.empty? || \n !get_internal_domains.select{ |x| entity_name =~ /#{x}/}.empty?\n _log \"This domain resolves to a known cdn or internal host, skipping\"\n return\n end\n\n # check that it resolves\n resolves_to = resolve_names entity_name\n unless resolves_to.first\n _log \"No resolution for this record, unable to check\"\n return \n end\n\n # We use their DNS servers to query\n nameservers= ['185.228.168.168', '185.228.168.169']\n _log \"Querying #{nameservers}\"\n dns_obj = Resolv::DNS.new(nameserver: nameservers)\n \n # Try twice, just in case (avoid FP's)\n res = dns_obj.getaddresses(entity_name)\n res.concat(dns_obj.getresources(entity_name, Resolv::DNS::Resource::IN::CNAME)).flatten\n\n # Detected only if there's no resolution\n if res.any?\n _log \"Resolves to #{res.map{|x| \"#{x.to_s}\" }}. Seems we're good!\"\n else\n source = \"CleanBrowsing\"\n description = \"The Cleanbrowsing DNS security filter focuses on restricting access \" + \n \"to malicious activity. It blocks phishing, spam and known malicious domains.\"\n \n _create_linked_issue(\"blocked_by_dns\", {\n status: \"confirmed\",\n additional_description: description,\n source: source, \n proof: \"Resolved to the following address(es) outside of #{source} (#{nameservers}): #{resolves_to.join(\", \")}\",\n to_reproduce: \"dig #{entity_name} @#{nameservers.first}\",\n references: [{ type: \"remediation\", uri: \"https://cleanbrowsing.org/\" }]\n }) \n \n # Also store it on the entity \n blocked_list = @entity.get_detail(\"suspicious_activity_detected\") || [] \n @entity.set_detail(\"suspicious_activity_detected\", blocked_list.concat([{source: source}]))\n\n end\n\n end", "def convert_fqdns_to_ips(nodes)\n if is_cloud?\n Djinn.log_debug(\"In a cloud deployment, so converting FQDNs -> IPs\")\n else\n Djinn.log_debug(\"Not in a cloud deployment, so not converting FQDNs -> IPs\")\n return nodes\n end\n\n if @creds[\"hostname\"] =~ /#{FQDN_REGEX}/\n begin\n @creds[\"hostname\"] = HelperFunctions.convert_fqdn_to_ip(@creds[\"hostname\"])\n rescue Exception => e\n Djinn.log_fatal(\"Failed to convert main hostname #{@creds['hostname']}\")\n HelperFunctions.log_and_crash(\"Failed to convert main hostname #{@creds['hostname']}\")\n end\n end\n \n nodes.each { |node|\n # Resolve the private FQDN to a private IP, but don't resolve the public\n # FQDN, as that will just resolve to the private IP.\n\n pri = node.private_ip\n if pri =~ /#{FQDN_REGEX}/\n begin\n node.private_ip = HelperFunctions.convert_fqdn_to_ip(pri)\n rescue Exception => e\n node.private_ip = node.public_ip\n end\n end\n }\n\n return nodes\n end", "def expiration_results\n domains = config[:domain].split(',')\n warning_days = config[:warning].to_i\n critical_days = config[:critical].to_i\n max_retries = 4\n\n results = {\n critical: {},\n warning: {},\n ok: {},\n unknown: {}\n }\n whois = Whois::Client.new(timeout: config[:timeout])\n\n domains.each do |domain|\n begin\n tries ||= 0\n whois_result = whois.lookup(domain).parser\n rescue Timeout::Error, Errno::ECONNRESET, Whois::ConnectionError\n tries += 1\n if tries < max_retries\n retry\n else\n results[:unknown][domain] = 'Connection error' unless config[:'ignore-errors']\n next\n end\n end\n\n begin\n expires_on = DateTime.parse(whois_result.expires_on.to_s)\n domain_result = (expires_on - DateTime.now).to_i\n if domain_result <= critical_days\n results[:critical][domain] = domain_result\n elsif domain_result <= warning_days\n results[:warning][domain] = domain_result\n else\n results[:ok][domain] = domain_result\n end\n rescue StandardError\n results[:unknown][domain] = 'Parsing error' unless config[:'ignore-errors']\n end\n end\n results\n end", "def analyze(sites, checks)\n success = Hash.new { |h, k| h[k] = Hash.new(&h.default_proc) }\n counts = Hash.new { |h, k| h[k] = Hash.new(&h.default_proc) }\n checks.each do |nam, check_data|\n success[nam] = sites.select { |_, site| site[nam] =~ check_data[SiteStandards::CHECK_VALIDATE] }.keys\n counts[nam][SITE_PASS] = success[nam].count\n counts[nam][SITE_WARN] = 0 # Reorder output\n counts[nam][SITE_FAIL] = sites.select { |_, site| site[nam].nil? }.count\n counts[nam][SITE_WARN] = sites.size - counts[nam][SITE_PASS] - counts[nam][SITE_FAIL]\n end\n\n return [\n counts, {\n SITE_PASS => '# Sites with links to primary ASF page',\n SITE_WARN => '# Sites with link, but not an expected ASF one',\n SITE_FAIL => '# Sites with no link for this topic'\n }, success\n ]\n end", "def update_from_site_store!\n\t\t\t#begin\n puts \"Invoke internal procedures to update the primary host-name table from the site store.\"\n # Step 1 - update the prime host table based on the SSL cert CN fields\n\t\t\t\tcns=Hash.new\n\t\t\t\tchecker=Wmap::UrlChecker.new(:data_dir=>@data_dir)\n my_tracker = Wmap::SiteTracker.instance\n my_tracker.sites_file = @data_dir + \"sites\"\n my_tracker.load_site_stores_from_file\n\t\t\t\tmy_tracker.get_ssl_sites.map do |site|\n\t\t\t\t\tputs \"Exam SSL enabled site entry #{site} ...\"\n\t\t\t\t\tmy_host=url_2_host(site)\n\t\t\t\t\tnext if @known_hosts.key?(my_host) # add the logic to optimize the process\n\t\t\t\t\tputs \"Pull SSL cert details on site: #{site}\"\n\t\t\t\t\tcn=checker.get_cert_cn(site)\n\t\t\t\t\tunless cn.nil? or cns.key?(cn)\n\t\t\t\t\t\tcns[cn]=true\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\tcns.keys.map do |cn|\n\t\t\t\t\tif is_fqdn?(cn)\n\t\t\t\t\t\tnext if @known_hosts.key?(cn)\n\t\t\t\t\t\tself.add(cn)\n\t\t\t\t\t\tputs \"New entry added: #{cn}\\t#{@known_hosts[cn]}\"\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\t# Step 2 - Save the cache into the file\n\t\t\t\tself.save!\n checker=nil\n my_tracker=nil\n\t\t\t#rescue Exception => ee\n\t\t\t#\tputs \"Exception on method #{__method__}: #{ee}\" if @verbose\n # checker=nil\n # my_tracker=nil\n\t\t\t#\treturn nil\n\t\t\t#end\n\t\tend", "def hosts_file(nodes=nil)\n if nodes.nil?\n if @referenced_nodes && @referenced_nodes.any?\n nodes = @referenced_nodes\n nodes = nodes.merge(nodes_like_me[:services => 'mx']) # all nodes always need to communicate with mx nodes.\n end\n end\n return {} unless nodes\n hosts = {}\n my_location = @node['location'] ? @node['location']['name'] : nil\n nodes.each_node do |node|\n hosts[node.name] = {'ip_address' => node.ip_address, 'domain_internal' => node.domain.internal, 'domain_full' => node.domain.full}\n node_location = node['location'] ? node['location']['name'] : nil\n if my_location == node_location\n if facts = @node.manager.facts[node.name]\n if facts['ec2_public_ipv4']\n hosts[node.name]['ip_address'] = facts['ec2_public_ipv4']\n end\n end\n end\n host_pub_key = Util::read_file([:node_ssh_pub_key,node.name])\n if host_pub_key\n hosts[node.name]['host_pub_key'] = host_pub_key\n end\n end\n hosts\n end", "def hosts_file(nodes=nil)\n if nodes.nil?\n if @referenced_nodes && @referenced_nodes.any?\n nodes = @referenced_nodes\n nodes = nodes.merge(nodes_like_me[:services => 'mx']) # all nodes always need to communicate with mx nodes.\n end\n end\n return {} unless nodes\n hosts = {}\n my_location = @node['location'] ? @node['location']['name'] : nil\n nodes.each_node do |node|\n hosts[node.name] = {'ip_address' => node.ip_address, 'domain_internal' => node.domain.internal, 'domain_full' => node.domain.full}\n node_location = node['location'] ? node['location']['name'] : nil\n if my_location == node_location\n if facts = @node.manager.facts[node.name]\n if facts['ec2_public_ipv4']\n hosts[node.name]['ip_address'] = facts['ec2_public_ipv4']\n end\n end\n end\n host_pub_key = Util::read_file([:node_ssh_pub_key,node.name])\n if host_pub_key\n hosts[node.name]['host_pub_key'] = host_pub_key\n end\n end\n hosts\n end", "def hosts(touchAndPrune=false)\n hosts=@vp_lock.synchronize{@hostname2vp.keys}\n if touchAndPrune\n check_up_hosts(hosts)\n else\n hosts\n end\n end", "def hash_nodes(statements, nodes, grounded_hashes)\n hashes = grounded_hashes.dup\n ungrounded_hashes = {}\n hash_needed = true\n\n # We may have to go over the list multiple times. If a node is marked as\n # grounded, other nodes can then use it to decide their own state of\n # grounded.\n while hash_needed\n starting_grounded_nodes = hashes.size\n nodes.each do | node |\n unless hashes.member? node\n grounded, hash = node_hash_for(node, statements, hashes)\n if grounded\n hashes[node] = hash\n end\n ungrounded_hashes[node] = hash\n end\n end\n\n # after going over the list, any nodes with a unique hash can be marked\n # as grounded, even if we have not tied them back to a root yet.\n uniques = {}\n ungrounded_hashes.each do |node, hash|\n uniques[hash] = uniques.has_key?(hash) ? false : node\n end\n uniques.each do |hash, node|\n hashes[node] = hash if node\n end\n hash_needed = starting_grounded_nodes != hashes.size\n end\n [hashes, ungrounded_hashes]\n end", "def dns_update(zone, records)\n update = Dnsruby::Update.new(zone)\n records.each do |r|\n if r.type.upcase == 'ADD'\n s = \"#{Domain} 3600 #{Type} #{RDATA}\"\n rr = Dnsruby::RR.create(s)\n update.add(rr)\n else\n update.delete(r['Domain'], r['Type'], r['RDATA'])\n end\n end\n update\n end", "def check_domains_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: DomainApi.check_domains ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling DomainApi.check_domains\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling DomainApi.check_domains\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/domain/check_all'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'Array<Array>'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"DomainApi.check_domains\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: DomainApi#check_domains\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def poll\n unless memcached_nodes.empty?\n memcached_nodes.each do | hostname_port |\n stats_text = issue_stats hostname_port \n if stats_text.present?\n @last_stats[hostname_port] = parse_and_report_stats hostname_port, stats_text\n else\n @last_stats[hostname_port] = {}\n end \n end\n\n aggregate_stats\n logger.debug \"Done with aggs\" \n end\n end", "def frwdlp(session,hostlst,domain,dest)\n\tdest = dest + \"-DNS-forward-lookup.txt\"\n\tprint_status(\"Performing DNS Forward Lookup for hosts in #{hostlst} for domain #{domain}\")\n\tfilewrt(dest,\"DNS Forward Lookup for hosts in #{hostlst} for domain #{domain}\")\n\tresult = []\n\tthreads = []\n\ttmpout = []\n\tbegin\n\tif ::File.exists?(hostlst)\n\t\t::File.open(hostlst).each {|line|\n \t\t\tthreads << ::Thread.new(line) { |h|\n \t\t\t#print_status(\"checking #{h.chomp}\")\n\t\t \tr = session.sys.process.execute(\"nslookup #{h.chomp}.#{domain}\", nil, {'Hidden' => true, 'Channelized' => true})\n \t\t \twhile(d = r.channel.read)\n \t\t\tif d =~ /(Name)/\n \t\t\t\td.scan(/Name:\\s*\\S*\\s*Address\\w*:\\s*.*?.*?.*/) do |n|\n \t\t\t\ttmpout << n.split\n \t\t\tend\n \t\t\tbreak\n \t\tend\n end\n\n r.channel.close\n r.close\n\t\t\t}\n\t\t}\n\tthreads.each { |aThread| aThread.join }\n\ttmpout.uniq.each do |t|\n \tprint_status(\"\\t#{t.join.sub(/Address\\w*:/, \"\\t\")}\")\n \tfilewrt(dest,\"#{t.join.sub(/Address\\w*:/, \"\\t\")}\")\n end\n\n\telse\n\t\tprint_status(\"File #{hostlst}does not exists!\")\n\t\texit\n\tend\n\trescue ::Exception => e\n \t\tprint_status(\"The following Error was encountered: #{e.class} #{e}\")\n\tend\nend", "def bulk_vhost_update(dbcheck, tenant_array)\n #tenant_status['vhost_updated'] = false\n command = \"chef-client -o prom-classfront::classflow-conf\"\n\n # list of environments that require vhost updates\n tenvs = [] \n tenant_array.each do |tenant|\n tenant_config = tenant[0]\n tenant_status = tenant[1]\n tenvs << tenant_config['env']\n end\n tenant_envlist = tenvs.uniq\n tenant_envlist.each do |tenant_env|\n front_end = dbcheck.lookup_frontend_servers(tenant_env)\n front_end.each do |instance|\n logger.info \"Updating vhost through chef-client run on #{instance}.\"\n begin\n run_ssh_command(instance, command)\n \n # Provides error details on connection failure\n rescue SocketError => e\n logger.error \"Unable to update vhost on instance #{instance}\"\n logger.error \"#{e}\"\n end\n end\n end\n \n # Mark each tenant as updated\n tenant_array.each do |tenant|\n tenant_status = tenant[1]\n tenant_status['vhost_updated'] = true\n end\n end", "def refresh_all\n\t\tputs \"Refresh all the entries within the local site store ... \"\n\t\tchanges=Hash.new\n\t\tchanges=bulk_refresh(@known_sites.keys)\n\t\t@known_sites.merge!(changes)\n\t\tputs \"Done refresh all entries.\"\n\t\treturn changes\n\trescue => ee\n\t\tputs \"Exception on method #{__method__}: #{ee}\" if @verbose\n\tend", "def find_nodes_to_verify(references)\n nodes = {}\n\n references.each do |uri, _digest_value|\n uri = uri.sub(/^#/, '')\n node = find_node_by_uri(uri)\n\n nodes[uri] = calculate_digest(node)\n end\n\n nodes\n end", "def check_domains(opts = {})\n data, _status_code, _headers = check_domains_with_http_info(opts)\n data\n end", "def run_checks(key_ttl)\n # We also need to perform the auditing checks against the config\n # Checks to be performed :\n # b) Warn if number of prepublished ZSKs < ZSK:Standby\n # Do this by [alg, alg_length] - so only select those keys which match the config\n @config.keys.zsks.each {|zsk|\n prepublished_zsk_count = @cache.prepublished.keys.select {|k|\n k.zone_key? && !k.sep_key? && (k.algorithm == zsk.algorithm) &&\n (k.key_length == zsk.alg_length)\n }.length\n if (prepublished_zsk_count < zsk.standby)\n msg = \"Not enough prepublished ZSKs! Should be #{zsk.standby} but have #{prepublished_zsk_count}\"\n @parent.log(LOG_WARNING, msg)\n end\n }\n @cache.inuse.each {|key, time|\n timestamp = time[0]\n first_timestamp = time[1]\n # Ignore this check if the key was already in use at the time at which the lifetime policy was changed.\n # How do we know to which AnyKey group this key belongs? Can only take a guess by [algorithm, alg_length] tuple\n # Also going to have to put checks in place where key protocol/algorithm is checked against policy :-(\n # - no we don't! These are only checked when we are loading a new key - not one we've seen before.\n # and of course, a new key should be created with the correct values!\n key_group_policy_changed = false\n # First, find all the key groups which this key could belong to\n keys = @config.changed_config.zsks\n if (key.sep_key?)\n keys = @config.changed_config.ksks\n end\n possible_groups = keys.select{|k| (k.algorithm == key.algorithm) &&\n (k.alg_length == key.key_length)}\n # Then, find the latest timestamp (other than 0)\n key_group_policy_changed_time = 0\n if (possible_groups.length == 0)\n # Can't find the group this key belongs to\n if (@config.changed_config.kasp_timestamp < first_timestamp)\n # @TODO@ o if there has been no change in any of the configured keys then error (the key shouldn't exist)\n # Shouldn't this be caught by something else?\n end\n # o if there has been a change since the key was first seen, then don't raise any errors for this key\n else\n possible_groups.each {|g|\n if (g.timestamp > key_group_policy_changed_time)\n key_group_policy_changed_time = g.timestamp\n key_group_policy_changed = true\n end\n }\n next if (key_group_policy_changed && (first_timestamp < key_group_policy_changed_time))\n end\n\n if (key.zone_key? && !key.sep_key?)\n # d) Warn if ZSK inuse longer than ZSK:Lifetime + Enforcer:Interval\n # Get the ZSK lifetime for this type of key from the config\n zsks = @config.keys.zsks.select{|zsk|\n (zsk.algorithm == key.algorithm) &&\n (zsk.alg_length == key.key_length)}\n next if (zsks.length == 0)\n # Take the \"safest\" value - i.e. the longest one in this case\n zsk_lifetime = 0\n zsks.each {|z|\n zsk_lifetime = z.lifetime if (z.lifetime > zsk_lifetime)\n }\n lifetime = zsk_lifetime + @enforcer_interval + @validity\n if timestamp < (Time.now.to_i - lifetime)\n msg = \"ZSK #{key.key_tag} in use too long - should be max #{lifetime} seconds but has been #{Time.now.to_i-timestamp} seconds\"\n @parent.log(LOG_WARNING, msg)\n end\n else\n # c) Warn if KSK inuse longer than KSK:Lifetime + Enforcer:Interval\n # Get the KSK lifetime for this type of key from the config\n ksks = @config.keys.ksks.select{|ksk| (ksk.algorithm == key.algorithm) &&\n (ksk.alg_length == key.key_length)}\n next if (ksks.length == 0)\n # Take the \"safest\" value - i.e. the longest one in this case\n ksk_lifetime = 0\n ksks.each {|k|\n ksk_lifetime = k.lifetime if (k.lifetime > ksk_lifetime)\n }\n lifetime = ksk_lifetime + @enforcer_interval + @validity\n if timestamp < (Time.now.to_i - lifetime)\n# msg = \"KSK #{key.key_tag} in use too long - should be max #{lifetime} seconds but has been #{Time.now.to_i-timestamp} seconds\"\n msg = \"KSK #{key.key_tag} reaching end of lifetime - should be max #{lifetime} seconds but has been #{Time.now.to_i-timestamp} seconds, not including time taken for DS to be seen\"\n @parent.log(LOG_WARNING, msg)\n end\n end\n }\n if (@config.audit_tag_present)\n check_inuse_keys_history(key_ttl)\n end\n end", "def resolve(dns_records, lookup_chain, domain)\n record_A = dns_records[:A]\n record_CNAME = dns_records[:CNAME]\n\n #if domain in recordA\n #add destination to lookup_chain\n if record_A[:source].include?(domain)\n lookup_chain.push(record_A[:ip][record_A[:source].index(domain)])\n\n #if domain in recordCNAME\n #add destination to lookup_chain\n #update domain with destination\n #call the funtion again with new domain(Recursion)\n elsif record_CNAME[:source].include?(domain)\n lookup_chain.push(record_CNAME[:alias][record_CNAME[:source].index(domain)])\n domain = record_CNAME[:alias][record_CNAME[:source].index(domain)]\n resolve(dns_records, lookup_chain, domain)\n else\n return lookup_chain\n end\nend", "def check_up_hosts(hostlisthash, settings={ :retry => true, :maxalert => NO_EMAIL, :timeout => 30})\n if hostlisthash.class==Array\n hostlisthash=hostlisthash.to_h(true)\n end\n if not settings.include?(:timeout)\n settings[:timeout]=30\n end\n if not settings.include?(:retry)\n settings[:retry]=true\n end\n if not settings.include?(:maxalert)\n settings[:maxalert]=NO_EMAIL\n end\n results, unsuccessful_hosts=issue_command_on_hosts(hostlisthash,settings){|h,p| h.backtic(\"hostname --fqdn\").chomp(\"\\n\").strip.downcase}\n uphosts=[]\n results.each{|vp|\n uphosts << ($rename_vp.has_key?(vp.at(0)) ? $rename_vp[vp.at(0)] : vp.at(0))\n if vp.at(0) != vp.at(1)\n log { \"check_up_hosts(): vp.at(0) != vp.at(1): #{vp.join(\" \")}\" }\n end\n }\n # if prune\n # unsuccessful_hosts.each{|h|\n # self.unregister_host(h)\n # }\n # end\n return uphosts\n end", "def evaluate_checks\n log.info(\"Evaluating Checks: '#{@config['checks'].length}'\")\n\n @config['checks'].each do |check|\n check_name = check['check']\n check_cfg = check['cfg']\n\n collect_metrics(check_name, check_cfg).each do |metric|\n status = 0\n\n # on service it will come with \"state_required\" flag\n if check_name == 'service'\n # adding defaults in case they are not set\n check_cfg = check_cfg.merge(\n 'state' => 'active',\n 'state_required' => 1\n )\n # giving a service hint by adding it's name\n check_name = \"service_#{check_cfg['name']}\"\n status = equals(metric['value'], check_cfg['state_required'])\n else\n # normal threshold evaluation\n status = evaluate(\n metric['value'],\n check_cfg['warn'],\n check_cfg['crit']\n )\n end\n\n template_variables = metric\n template_variables['cfg'] = check_cfg\n\n append_event(\n \"check_#{check_name}\",\n @tmpl.render(check['check'], template_variables),\n status,\n metric['source']\n )\n end\n end\n end", "def run_batch(batch)\n\n\t\t@results = {}\n\t\t@aliases = {}\n\n\t\tprint_status(\"Sending probes to #{batch[0]}->#{batch[-1]} (#{batch.length} hosts)\")\n\n\t\tbegin\n\t\t\tudp_sock = nil\n\t\t\tidx = 0\n\n\t\t\t# Create an unbound UDP socket if no CHOST is specified, otherwise\n\t\t\t# create a UDP socket bound to CHOST (in order to avail of pivoting)\n\t\t\tudp_sock = Rex::Socket::Udp.create( { 'LocalHost' => datastore['CHOST'] || nil, 'Context' => {'Msf' => framework, 'MsfExploit' => self} })\n\t\t\tadd_socket(udp_sock)\n\n\t\t\t# Try three times since NTP servers can be a bit busy\n\t\t\t1.upto(3) do\n\t\t\tbatch.each do |ip|\n\t\t\t\tnext if @results[ip]\n\n\t\t\t\tbegin\n\t\t\t\t\tdata = probe_pkt_ntp(ip)\n\t\t\t\t\tudp_sock.sendto(data, ip, datastore['RPORT'].to_i, 0)\n\t\t\t\trescue ::Interrupt\n\t\t\t\t\traise $!\n\t\t\t\trescue ::Rex::HostUnreachable, ::Rex::ConnectionTimeout, ::Rex::ConnectionRefused\n\t\t\t\t\tnil\n\t\t\t\tend\n\n\t\t\t\tif (idx % 30 == 0)\n\t\t\t\t\twhile (r = udp_sock.recvfrom(65535, 0.1) and r[1])\n\t\t\t\t\t\tparse_reply(r)\n\t\t\t\t\tend\n\t\t\t\tend\n\n\t\t\t\tidx += 1\n\t\t\tend\n\t\t\tend\n\n\t\t\twhile (r = udp_sock.recvfrom(65535, 10) and r[1])\n\t\t\t\tparse_reply(r)\n\t\t\tend\n\n\t\trescue ::Interrupt\n\t\t\traise $!\n\t\trescue ::Exception => e\n\t\t\tprint_status(\"Unknown error: #{e.class} #{e}\")\n\t\tend\n\n\t\[email protected] do |k|\n\n\t\t\treport_service(\n\t\t\t\t:host => k,\n\t\t\t\t:proto => 'udp',\n\t\t\t\t:port => datastore['RPORT'].to_i,\n\t\t\t\t:name => 'NTP'\n\t\t\t)\n\n\t\t\treport_note(\n\t\t\t\t:host => k,\n\t\t\t\t:proto => 'udp',\n\t\t\t\t:port => datastore['RPORT'].to_i,\n\t\t\t\t:type => 'ntp.monlist',\n\t\t\t\t:data => {:monlist => @results[k]}\n\t\t\t)\n\n\t\t\tif (@aliases[k] and @aliases[k].keys[0] != k)\n\t\t\t\treport_note(\n\t\t\t\t\t:host => k,\n\t\t\t\t\t:proto => 'udp',\n\t\t\t\t\t:port => datastore['RPORT'].to_i,\n\t\t\t\t\t:type => 'ntp.addresses',\n\t\t\t\t\t:data => {:addresses => @aliases[k].keys}\n\t\t\t\t)\n\t\t\tend\n\n\t\t\tif (datastore['StoreNTPClients'])\n\t\t\t\tprint_status(\"#{k} Storing #{@results[k].length} NTP client hosts in the database...\")\n\t\t\t\t@results[k].each do |r|\n\t\t\t\t\tmaddr,mport,mserv = r\n\t\t\t\t\treport_note(:host => maddr, :type => 'ntp.client.history', :data => {:address => maddr, :port => mport, :server => mserv})\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\n\tend", "def postprocess(msgs, _results)\n groups = {}\n msgs.each do |m|\n os = m.metadata[:originserver]\n\n next if os.nil? || os[:address].nil?\n\n address = IPAddr.new os[:address]\n range = address.ipv4? ? 16..32 : 48..128\n range.each do |mask|\n groups[address.mask(mask)] ||= []\n groups[address.mask(mask)] << m.digest_as_hex\n end\n end\n\n { address_ranges: groups }\n end", "def for_tree_nodes(tree_nodes, check_root_record = true)\n return {} if tree_nodes.empty?\n assert_same_type!(tree_nodes)\n\n # E.g. ArchivalObject\n node_model = tree_nodes[0].class.node_model\n\n # E.g. Resource\n root_model = tree_nodes[0].class.root_model\n\n # Initialize our result map to true -- assuming \"published\" by default.\n result = Hash[tree_nodes.map {|node| [node, true]}]\n\n if check_root_record\n # If we're the top-level call, we'll check the repository and root\n # record's publication status. There's no point doing this at every\n # level of the tree, but do it up front to save some potential work.\n root_record_id_to_child = {}\n tree_nodes.each do |node|\n if repository_published?(node[:repository_id])\n root_record_id_to_child[node.root_record_id] ||= []\n root_record_id_to_child[node.root_record_id] << node\n else\n result[node] = false\n end\n end\n\n return result if root_record_id_to_child.empty?\n\n root_model\n .filter(:id => root_record_id_to_child.keys)\n .filter(Sequel.|({:publish => 0},\n {:suppressed => 1}))\n .select(:id)\n .each do |root_record|\n root_record_id_to_child.fetch(root_record.id).each do |node|\n result[node] = false\n end\n end\n end\n\n parent_id_to_child = {}\n tree_nodes.each do |node|\n if result[node] && node.publish == 1 && node.suppressed == 0\n # OK so far, but check the ancestors.\n if node.parent_id\n parent_id_to_child[node.parent_id] ||= []\n parent_id_to_child[node.parent_id] << node\n end\n else\n # Unpublished/suppressed. Nothing more to check.\n result[node] = false\n end\n end\n\n unless parent_id_to_child.empty?\n parent_ids = parent_id_to_child.keys\n parent_publication_status = for_tree_nodes(node_model.filter(:id => parent_ids)\n .select(:id, :parent_id, :root_record_id, :publish, :suppressed)\n .all,\n false)\n\n parent_publication_status.each do |parent, published|\n # If the parent was unpublished, that overrides our previous result.\n parent_id_to_child.fetch(parent.id).each do |node|\n result[node] &&= published\n end\n end\n end\n\n result\n end", "def intelligent_nodeps(mapping, package, nodeps_sym = :delete, deps_sym = :cant_delete, active_criteria = false)\n vnfds, nsds, testds, files, cant_delete_vnfds = [], [], [], [], []\n cant_delete_nsds, cant_delete_testds, cant_delete_files = [], [], []\n mapping.each do |content|\n # next if content['content-type'].split('.')[-2] == 'osm'\n if content['content-type'].split('.')[-1] == 'vnfd'\n if check_dependencies( content, package.pd, active_criteria)\n logger.info 'VNFD ' + content['id'][:name] + ' has more than one dependency'\n cant_delete_vnfds << content['id']\n else\n vnfds << content['id']\n end\n elsif content['content-type'].split('.')[-1] == 'nsd'\n if check_dependencies(content, package.pd, active_criteria)\n logger.info 'NSD ' + content['id'][:name] + ' has more than one dependency'\n cant_delete_nsds << content['id']\n else\n nsds << content['id']\n end\n elsif content['content-type'].split('.')[-1] == 'tstd'\n if check_dependencies(content, package.pd, active_criteria)\n logger.info 'TESTD ' + content['id'][:name] + ' has more than one dependency'\n cant_delete_testds << content['id']\n else\n testds << content['id']\n end\n elsif content['content-type'].split('.')[-1] != 'ref'\n if check_dependencies_files(content, package.pd, active_criteria)\n logger.info 'File with {uuid =>' + content[:uuid] + '} has more than one dependency'\n cant_delete_files << {uuid: content[:uuid]}\n else\n files << {uuid: content[:uuid]}\n end\n end\n end\n { nodeps_sym => { vnfds: vnfds, nsds: nsds, testds: testds, files: files },\n deps_sym => { vnfds: cant_delete_vnfds, nsds: cant_delete_nsds,\n testds: cant_delete_testds, files: cant_delete_files} }\n end", "def domain_update(args)\n if args.key?(:chg) && args[:chg].key?(:registrant)\n raise ArgumentError, 'You need to do a trade or recover operation to change the registrant'\n end\n has_contacts = args.key?(:add) && args[:add].key?(:contacts) || args.key?(:add) && args[:add].key?(:contacts)\n has_ns = args.key?(:add) && args[:add].key?(:ns) || args.key?(:add) && args[:add].key?(:ns)\n has_other = args.key?(:add) && args[:add].key?(:status) || args.key?(:add) && args[:add].key?(:status) || args.key?(:chg) && args[:chg].key?(:authInfo)\n if [has_contacts, has_ns, has_other].count { |v| v } > 1\n raise ArgumentError, \"You can't update all that at one time\"\n end\n [:add, :rem].each do |ar|\n if args.key?(ar) && args[ar].key?(:ns) && args[ar][:ns].first.is_a?(String)\n args[ar][:ns] = args[ar][:ns].map { |ns| { :hostName => ns } }\n end\n end\n super\n end", "def get_njobs(nodes)\n\n # Reset job count on each input node\n nodes.each do |wn|\n wn[:njobs] = -1\n end\n\n begin\n pbsnodes_xml = REXML::Document.new( %x[ #{$cmd_pbsnodes} 2> /dev/null ] )\n rescue\n return\n end\n\n return if pbsnodes_xml.elements.empty?\n\n pbsnodes_xml.elements.each('//Data/Node') do |node_xml|\n\n name = node_xml.elements['name'].text\n is_offline = node_xml.elements['state'].text.include?('offline')\n is_down = node_xml.elements['state'].text.include?('down')\n\n jobs_xml = node_xml.elements['jobs']\n if jobs_xml\n njobs = jobs_xml.text.split(' ').length\n else\n njobs = 0\n end\n\n # Find matching input nodes: FQDN must be set, node must be up and offline\n nodes.each do |wn|\n next unless wn[:fqdn] and wn[:fqdn] == name and is_offline and !is_down\n wn[:njobs] = njobs\n end\n\n end\n\nend", "def update_node_search_domains_with_http_info(node_search_domains_properties, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: NsxComponentAdministrationApi.update_node_search_domains ...\"\n end\n # verify the required parameter 'node_search_domains_properties' is set\n if @api_client.config.client_side_validation && node_search_domains_properties.nil?\n fail ArgumentError, \"Missing the required parameter 'node_search_domains_properties' when calling NsxComponentAdministrationApi.update_node_search_domains\"\n end\n # resource path\n local_var_path = \"/node/network/search-domains\"\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(node_search_domains_properties)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'NodeSearchDomainsProperties')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: NsxComponentAdministrationApi#update_node_search_domains\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def pdb_get_facts(node_ip_hostname)\n keyed_facts = {}\n\n if test_env\n response = \"[{\\\"certname\\\":\\\"host-name-01.domain.com\\\",\\\"name\\\":\\\"trusted\\\",\\\"value\\\":{\\\"authenticated\\\":\\\"remote\\\",\\\"certname\\\":\\\"host-name-01.domain.com\\\",\\\"domain\\\":\\\"domain.com\\\",\\\"extensions\\\":{\\\"company_trusted_swimlane\\\":\\\"n/a\\\",\\\"pp_datacenter\\\":\\\"mtv\\\",\\\"pp_environment\\\":\\\"qa\\\",\\\"pp_product\\\":\\\"test\\\",\\\"pp_role\\\":\\\"rabbit_mq\\\"},\\\"hostname\\\":\\\"host-name-01\\\"},\\\"environment\\\":\\\"tier2\\\"},{\\\"certname\\\":\\\"puppet.upguard.org\\\",\\\"environment\\\":\\\"production\\\",\\\"name\\\":\\\"virtual\\\",\\\"value\\\":\\\"#{TEST_OS_VIRT_PLATFORM}\\\"},{\\\"certname\\\":\\\"puppet.upguard.org\\\",\\\"environment\\\":\\\"production\\\",\\\"name\\\":\\\"operatingsystemmajrelease\\\",\\\"value\\\":\\\"#{TEST_OS_MAJOR_RELEASE}\\\"},{\\\"certname\\\":\\\"puppet.upguard.org\\\",\\\"environment\\\":\\\"production\\\",\\\"name\\\":\\\"operatingsystem\\\",\\\"value\\\":\\\"#{TEST_OS}\\\"}]\"\n else\n response = `curl -X GET #{PUPPETDB_URL}/pdb/query/v4/nodes/#{node_ip_hostname}/facts -d 'query=[\"or\", [\"=\",\"name\",\"trusted\"], [\"=\",\"name\",\"virtual\"], [\"=\",\"name\",\"operatingsystem\"], [\"=\",\"name\",\"operatingsystemmajrelease\"]]' --tlsv1 --cacert /etc/puppetlabs/puppet/ssl/certs/ca.pem --cert /etc/puppetlabs/puppet/ssl/certs/#{COMPILE_MASTER_PEM} --key /etc/puppetlabs/puppet/ssl/private_keys/#{COMPILE_MASTER_PEM}`\n Puppet.info(\"#{log_prefix} trusted facts for #{node_ip_hostname} is: response=#{response}\")\n end\n\n if response.nil?\n return nil\n end\n facts = JSON.load(response)\n if !facts.is_a?(Array) && !facts.any?\n return nil\n end\n facts.each do |fact|\n keyed_facts[fact['name']] = fact\n end\n keyed_facts\n end", "def update!(**args)\n @dns_search_domains = args[:dns_search_domains] if args.key?(:dns_search_domains)\n @dns_servers = args[:dns_servers] if args.key?(:dns_servers)\n @ntp_servers = args[:ntp_servers] if args.key?(:ntp_servers)\n end", "def health_checks\n SERVICES.each do |service_name, service_info|\n puts \"Health Checking this service URL: #{service_info[:health_check_url]}\"\n response = RestClient::Request.execute(\n method: :get,\n url: service_info[:health_check_url]\n )\n puts JSON.parse(response)\n end\n end", "def facts_for_node(certnames)\n return {} if certnames.empty? || certnames.nil?\n\n certnames.uniq!\n name_query = certnames.map { |c| [\"=\", \"certname\", c] }\n name_query.insert(0, \"or\")\n\n @logger.debug(\"Querying certnames\")\n result = make_query(name_query, 'inventory')\n\n result&.each_with_object({}) do |node, coll|\n coll[node['certname']] = node['facts']\n end\n end", "def hash\n [host_list, total_matching, total_returned].hash\n end", "def update_job_servers\n # Check if it's been > TIME_BETWEEN_CHECKS or we have no good servers\n return unless time_to_check_connections || @job_servers.empty?\n\n logger.debug \"Found #{@bad_servers.size} zombie connections, checking pulse.\"\n @servers_mutex.synchronize do\n @bad_servers.each do |connection|\n begin\n message = \"Testing server #{connection}...\"\n if connection.is_healthy?\n logger.debug \"#{message} Connection is healthy, putting back into service\"\n activate_connection(connection)\n else\n logger.debug \"#{message} Still down.\"\n end\n end\n end\n end\n\n # Sleep for a few to allow a chance for the world to become sane\n if @job_servers.empty?\n logger.warn \"No job servers available, sleeping for #{SLEEP_TIME} seconds\"\n sleep(SLEEP_TIME)\n end\n\n @last_check_time = Time.now\n end", "def all(domain,username,password,dc_ip)\n\n enum_dom_users(domain,username,password,dc_ip)\n\n enum_dom_groups(domain,username,password,dc_ip)\n\n groupArr = [\"Domain Admins\",\"Domain Computers\",\"Enterprise Admins\",\"Administrators\"]\n\n groupArr.each {|group| enum_group_membership(domain,username,password,dc_ip,group)}\n\n priv_groups(domain,username,password,dc_ip)\n\n grab_attr(domain,username,password,dc_ip)\nend", "def update_nodes\n mongo_driver = Kymera::MongoDriver.new(address, port, database, 'nodes')\n @registered_nodes = mongo_driver.get_collection('nodes')\n end", "def update_status\n @servers.each do |type|\n begin\n @server_status.send(\"#{type}_reinitialize\") # re-ping the server\n @status[type] = @server_status.send(\"#{type}_all_info\")\n\n # All sorts of invalid input can potentially cause an error. Whatever it is, just make sure we return a valid object.\n rescue Exception => e\n warn \"[#{Time.now}] #{e.inspect}\"\n e.backtrace.each do |msg|\n warn \"[#{Time.now}] #{msg}\"\n end\n @status[type] = {}\n end\n end\n end", "def parse_dns(raw)\n # Filtering Lines with Comments and Empty Lines\n dns_filter = raw.select { |x| x[0] != \"#\" && x != \"\\n\" }.map {|x| x.split(\", \")}\n\n # Building the Hash\n dns_hash = {}\n dns_filter.each do |x| \n dns_hash[x[1]] = {\n :type => x[0],\n :target => x[2]\n }\n end\n \n dns_hash\nend", "def checkHosts(layer)\n @host_facts.each do |f|\n # each host has a list of facts\n f[1].each do |l|\n if l['deploop_category'] == layer\n up = @mchandler.ifHostUp f[0]\n if @opt.verbose\n puts \"checking host #{f[0]} is up: \" \n puts up\n end\n if !up\n msg = \"ERROR: host \\'#{f[0]}\\' is unreachable. Aboring.\"\n @outputHandler.msgError msg\n end\n deplUp = @mchandler.checkIfDeploopHost f[0]\n if @opt.verbose\n puts \"checking Deploop enabled host #{f[0]}: \" \n puts deplUp\n end\n if !deplUp\n msg = \"ERROR: host \\'#{f[0]}\\' is not Deploop enabled, fix this. Aborting.\"\n @outputHandler.msgError msg\n end\n end\n end\n\n end # @host_facts.each\n msg = \"The layer \\'#{layer}\\' has all host Deploop enabled\"\n @outputHandler.msgOutput msg\n end", "def parse_dns(raw)\n # Filtering Lines with Comments and Empty Lines\n dns_filter = raw.select {|x| x[0]!= \"#\" && x != \"\\n\" }\n\n # Creating a List with 3 Columns\n dns_filter_list = []\n dns_filter.each {|x| dns_filter_list.push(x.split(\", \"))}\n\n # Creating the List each DNS for Hash\n record_type_list = []\n source_list = []\n destination_list = []\n\n dns_filter_list.each do |x|\n record_type_list.push(x[0])\n source_list.push(x[1])\n destination_list.push(x[2])\n end\n\n # Building the Hash\n dns_hash = {\n \"RECORDTYPE\".to_sym => record_type_list,\n \"SOURCE\".to_sym => source_list,\n \"DESTINATION\".to_sym => destination_list,\n }\n return dns_hash\nend", "def get_prim_uniq_sites\n\t\tputs \"Retrieve and prime unique sites in the site store. \" if @verbose\n\t\thost_tracker=Wmap::HostTracker.instance\n\t\thost_tracker.data_dir=@data_dir\n\t\tprimary_host_tracker=Wmap::HostTracker::PrimaryHost.instance\n\t\tprimary_host_tracker.data_dir=@data_dir\n\t\tprimary_host_tracker.hosts_file = primary_host_tracker.data_dir + \"/\" + \"prime_hosts\"\n\t\tprimary_host_tracker.known_hosts=primary_host_tracker.load_known_hosts_from_file(@hosts_file)\n\t\t# Step 1. Retrieve the unique site list first\n\t\tsites=get_uniq_sites\n\t\tprim_uniq_sites=Array.new\n\t\t# Step 2. Iterate on the unique site list, spit out the site in the primary host format one at a time\n\t\tsites.map do |site|\n\t\t\tputs \"Work on priming unique site: #{site}\" if @verbose\n\t\t\thost=url_2_host(site)\n\t\t\t# case#1, for the IP only site, do nothing (presuming 'refresh_ip_sites' or 'refresh_all' method already take care of the potential discrepancy here).\n\t\t\tif is_ip?(host)\n\t\t\t\tprim_uniq_sites.push(site)\n\t\t\t\tnext\n\t\t\tend\n\t\t\tip=@known_sites[site]['ip']\n\t\t\t# case#2, for site with an unique IP, do nothing\n\t\t\tputs \"Local hosts table entry count for #{ip}: #{host_tracker.alias[ip]}\" if @verbose\n\t\t\tif host_tracker.alias[ip] == 1\n\t\t\t\tprim_uniq_sites.push(site)\n\t\t\t\tnext\n\t\t\tend\n\t\t\t# case#3, case of multiple IPs for A DNS record, where the site IP may have 0 alias count, do nothing\n\t\t\tif host_tracker.alias[ip] == nil\n\t\t\t\tprim_uniq_sites.push(site)\n\t\t\t\tnext\n\t\t\tend\n\t\t\t# case#4, for the site has a duplicate IP with others, we try to determine which one is the primary site\n\t\t\t# raise \"Error: inconsistency detected on record: #{site}. Please run the following shell command to refresh it first: \\n\\srefresh #{site}\" if tracker1.alias[ip].nil?\n\t\t\tif ( primary_host_tracker.known_hosts.key?(ip) and (host_tracker.alias[ip] > 1) )\n\t\t\t\tnew_host=primary_host_tracker.prime(host)\n\t\t\t\tputs \"Host: #{host}, New host:#{new_host}\" if @verbose\n\t\t\t\tunless host==new_host\n\t\t\t\t\tnew_site=site.sub(host,new_host)\n\t\t\t\t\traise \"Site not found in the site tracking data repository: #{new_site}. You may need to add it into the site store first. Execute the following shell command before trying again: \\n\\twadd #{new_site}\\n\" unless @known_sites.key?(new_site)\n\t\t\t\t\tnew_ip=@known_sites[new_site]['ip']\n\t\t\t\t\tif new_ip==ip\t\t# consistency check\n\t\t\t\t\t\tsite=new_site\n\t\t\t\t\telse\n\t\t\t\t\t\t# TBD - case of multiple IPs for A DNS record\n\t\t\t\t\t\t#raise \"Inconsistency found on prime host entrance: #{new_ip}, #{ip}; #{new_site}, #{site}. Please refresh your entries by running the following shell command: \\n\\s refresh #{new_site}\"\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\t\tprim_uniq_sites.push(site)\n\t\tend\n\t\tprimary_host_tracker=nil\n\t\thost_tracker=nil\n\t\treturn prim_uniq_sites\n\t#rescue => ee\n\t#\tputs \"Exception on method #{__method__}: #{ee}\"\n\tend", "def health_check\n ret = {}\n unready = []\n # We are ignoring the ceph nodes, as they should already be in crowbar_upgrade state\n NodeObject.find(\"NOT roles:ceph-*\").each do |node|\n unready << node.name unless node.ready?\n end\n ret[:nodes_not_ready] = unready unless unready.empty?\n failed = Proposal.all.select { |p| p.active? && p.failed? }\n ret[:failed_proposals] = failed.map(&:display_name) unless failed.empty?\n ret\n end", "def wanted_records(records, domains)\n _records = records.select { |m| domains.include?(m[1]) }\n _records = _records.each_cons(2).select { |a, b| a.last == b.last }\n _records = _records.group_by(&:last).keys.map do |v|\n { record_id: v.first, ip_address: v.last }\n end\n _records\n end", "def deploy_dns host_instance\n # Before we deploy puppet, we need to (possibly generate) and read out the nsupdate key(s)\n domain_list = [@deployment.dns.app_domain]\n if @deployment.dns.register_components?\n domain_list << @deployment.dns.component_domain\n end\n domain_list.each do |dns_domain|\n print \"* Checking for #{dns_domain} DNS key... \"\n key_filepath = \"/var/named/K#{dns_domain}*.key\"\n key_check = host_instance.exec_on_host!(\"ls #{key_filepath}\")\n if key_check[:exit_code] == 0\n puts 'found.'\n else\n # No key; build one.\n puts 'not found; attempting to generate.'\n key_gen = host_instance.exec_on_host!(\"dnssec-keygen -a HMAC-MD5 -b 512 -n USER -r /dev/urandom -K /var/named #{dns_domain}\")\n if key_gen[:exit_code] == 0\n puts '* Key generation successful.'\n else\n display_error_info(host_instance, key_gen, 'Could not generate a DNS key.')\n return false\n end\n end\n\n # Copy the public key info to the config file.\n key_text = host_instance.exec_on_host!(\"cat #{key_filepath}\")\n if key_text[:exit_code] != 0 or key_text[:stdout].nil? or key_text[:stdout] == ''\n display_error_info(host_instance, key_text, \"Could not read DNS key data from #{key_filepath}.\")\n return false\n end\n\n # Format the public key correctly.\n key_vals = key_text[:stdout].strip.split(' ')\n nsupdate_key = \"#{key_vals[6]}#{key_vals[7]}\"\n if dns_domain == @deployment.dns.app_domain\n @puppet_global_config['bind_key'] = nsupdate_key\n else\n @puppet_global_config['dns_infrastructure_key'] = nsupdate_key\n end\n end\n\n # Make sure BIND is enabled.\n dns_restart = host_instance.exec_on_host!('service named restart')\n if dns_restart[:exit_code] == 0\n puts '* BIND DNS enabled.'\n else\n display_error_info(host_instance, dns_restart, \"Could not enable BIND DNS on #{host_instance.host}.\")\n return false\n end\n return true\nend", "def replace_nameservers(hostname, new_attributes, domains: [])\n transaction do\n domain_scope = domains.dup\n domain_list = []\n failed_list = []\n\n nameservers.where(hostname: hostname).find_each do |origin|\n idn = origin.domain.name\n puny = origin.domain.name_puny\n next unless domains.include?(idn) || domains.include?(puny) || domains.empty?\n\n if domain_not_updatable?(hostname: new_attributes[:hostname], domain: origin.domain)\n failed_list << idn\n next\n end\n\n new_nameserver = Nameserver.new\n new_nameserver.domain = origin.domain\n new_nameserver.attributes = new_attributes\n new_nameserver.save!\n\n domain_scope.delete_if { |i| i == idn || i == puny }\n domain_list << idn\n\n origin.destroy!\n end\n\n self.domains.where(name: domain_list).find_each(&:update_whois_record) if domain_list.any?\n [domain_list.uniq.sort, (domain_scope + failed_list).uniq.sort]\n end\n end", "def parse_dns(dns_raw)\n dns_records = {}\n dns_raw.each do |rec|\n rec=rec.chomp\n unless rec[0] == \"#\" || rec.empty?\n records = rec.split(/,/)\n records = records.map {|recd| recd.strip()}\n unless dns_records.has_key?(records[0])\n dns_records.store(records[0],[[records[1],records[2]]])\n else\n dns_records[records[0]].push([records[1],records[2]])\n end\n end\n end\n return dns_records\nend", "def check_passenger_monitoring\n passenger_plugins = [\n {\"plugin_name\"=>\"passenger\", \"plugin_type\"=>\"passenger_instances\",\"field\"=>\"value\"},\n {\"plugin_name\"=>\"passenger\", \"plugin_type\"=>\"passenger_processes\",\"field\"=>\"max\"},\n {\"plugin_name\"=>\"passenger\", \"plugin_type\"=>\"passenger_queued\",\"field\"=>\"value\"},\n {\"plugin_name\"=>\"passenger\", \"plugin_type\"=>\"passenger_requests\",\"field\"=>\"value\"}\n ]\n sleep 60 # wait for some data to be available\n @servers.each do |server|\n unless server.multicloud\n#passenger commands to generate data for collectd to return\n# for ii in 1...100\n# # how do we force there to be data?? For now just check that the graph exists - cause the\n# # bug was missing graphs.\n# end\n passenger_plugins.each do |plugin|\n monitor = obj_behavior(server, :get_sketchy_data, {'start' => -60,\n 'end' => -20,\n 'plugin_name' => plugin['plugin_name'],\n 'plugin_type' => plugin['plugin_type']})\n value = monitor['data'][\"#{plugin['field']}\"]\n puts \"Checking #{plugin['plugin_name']}-#{plugin['plugin_type']}: value #{value}\"\n raise \"No #{plugin['plugin_name']}-#{plugin['plugin_type']} data\" unless value.length > 0\n# # Need to check for that there is at least one non 0 value returned.\n# for nn in 0...value.length\n# if value[nn] > 0\n# break\n# end\n# end\n# raise \"No #{plugin['plugin_name']}-#{plugin['plugin_type']} time\" unless nn < value.length\n puts \"Monitoring is OK for #{plugin['plugin_name']}-#{plugin['plugin_type']}\"\n end\n end\n end\n end", "def parse_dns(raw)\n # Filtering Lines with Comments and Empty Lines\n dns_filter = raw.select { |x| x[0] != \"#\" && x != \"\\n\" }\n\n # Creating a List with 3 Columns\n dns_filter_list = []\n dns_filter.each { |x| dns_filter_list.push(x.split(\", \")) }\n\n # Creating the List each DNS for Hash\n record_type_list = []\n source_list = []\n destination_list = []\n\n dns_filter_list.each do |x|\n record_type_list.push(x[0])\n source_list.push(x[1])\n destination_list.push(x[2])\n end\n\n # Building the Hash\n dns_hash = {\n \"RECORDTYPE\".to_sym => record_type_list,\n \"SOURCE\".to_sym => source_list,\n \"DESTINATION\".to_sym => destination_list,\n }\n return dns_hash\nend", "def updateTable(cmd)\n # puts \"TRYING TO UPDATE TABLE\"\n sentFrom = cmd.shift\n curr_edge_time = nil\n new_edge_time = nil\n new_edge_cost = nil\n node = $node_info.new\n arr = nil\n hops = nil\n lis = nil\n loop{\n new_edge_time = cmd[3].to_i\n new_edge_cost = cmd[2].to_i\n\n $lock.synchronize{\n curr_edge_time = $network.get_time(cmd[0],cmd[1])\n }\n\n if curr_edge_time == 0\n #name of srcNode,name of destNode,cost of edge,time of Edge\n $lock.synchronize{\n $network.undir_connection(cmd[0], cmd[1], new_edge_time, new_edge_cost)\n }\n if ($rt.has_key?(cmd[0]) != true)\n node.src = $hostname\n node.dst = cmd[0]\n node.cost = nil #do dijsktras\n node.nexthop = nil #do dijsktras\n $lock.synchronize{\n $rt[cmd[0]] = node\n }\n end \n if($rt.has_key?(cmd[1]) != true)\n node.src = $hostname\n node.dst = cmd[1]\n node.cost = nil #do dijsktras\n node.nexthop = nil #do dijsktras\n $lock.synchronize{\n $rt[cmd[1]] = node\n }\n \n end\n\n elsif curr_edge_time < new_edge_time\n $lock.synchronize{\n $network.update_cost(cmd[0], cmd[1], new_edge_time, new_edge_cost)\n }\n end \n \n cmd.shift(4)\n break if cmd.length < 4\n \n # puts \"ABOUT TO RUN DIJKSTRAS\"\n $lock.synchronize{\n arr = $network.dijkstra($hostname) \n }\n $full_path = arr[0]\n #puts \"THIS IS THE RETURN OF DIJKSTRAS #{arr}\" \n $lock.synchronize{\n $rt.each_key {|key|\n update = $node_info.new \n # puts \"Key IS #{key}\"\n hops = arr[0]\n lis = arr[1]\n prevs = hops[key]\n update.src = $hostname\n update.dst = key\n update.cost = lis[key]\n update.nexthop = prevs[1]\n $rt[key] = update\n # puts \"ROUTING TABLE #{$rt}\"\n }\n }\n }\nend", "def known_host_hash?(hostlist, entries); end", "def update\n\t\trespond_to do |format|\n\t\t\tif @domain.update_attributes(:hostname => params[:hostname])\n\t\t\t\tformat.json { head :no_content, status: :ok }\n\t\t\telse\n\t\t\t\tformat.json { render json: @domain.errors, status: :unprocessable_entity }\n\t\t\tend\n\t\tend\n\n\t\t# Fetch the hostname IP address and update the record in a new thread\n\t\tt1=Thread.new{fetch_origin_ip()}\n\t\tt1.join\n\tend", "def auto_discover_nodes!\n @servers = execute(:all_nodes)\n end", "def nodetool_status()\n out = `/opt/cassandra/bin/nodetool status`\n raise 'nodetool status failed' if $? != 0\n rows = out.split(\"\\n\")\n hash = {}\n dc_exp = /Datacenter: (.*)/\n #vnode\n #-- Address Load Tokens Owns Host ID Rack\n #non-vnode\n #-- Address Load Owns Host ID Token Rack\n #node_ex = /^(?<status>[UD\\?][NLJM]) +(?<address>(?:[0-9]{1,3}\\.){3}[0-9]{1,3}) +(?<load>(\\d+\\.?\\d* (TB|GB|MB|KB|bytes))|\\?) +(?<tokens>\\d+) +(?<owns>(\\d+\\.?\\d*%|\\?)) +(?<hostid>[a-z0-9\\-]+) +(?<rack>.*)$/\n node_ex = /^([UD\\?][NLJM]) +((?:[0-9]{1,3}\\.){3}[0-9]{1,3}) +((?:\\d+\\.?\\d* (?:TB|GB|MB|KB|bytes))|\\?) +(\\d+) +(\\d+\\.?\\d*%|\\?) +([a-z0-9\\-]+) +(.*)$/\n datacenter = nil\n rows.each do |row|\n m = dc_exp.match(row)\n if m\n datacenter = m[1]\n next\n end\n m = node_ex.match(row)\n next if m == nil\n node = {'datacenter' => datacenter}\n hash[m[2]] = node\n i = 0\n %w(status address load tokens owns hostid rack).each do |name|\n node[name] = m[i += 1]\n end\n # m.names.each do |name|\n # node[name] = m[name]\n # end\n end\n return hash\n end", "def check_flags(hosts)\n flags = {}\n hosts.keys.each do |host|\n flags[host] = {}\n\n errors = check_sources(hosts[host])\n next if errors == ''\n flags[host]['text'] = errors\n end\n flags\n end", "def parse_old\n\t@services=Hash.new\n\tf_site=File.open(ARGV[0],'r')\n\tf_site.each do |line|\n\t\tsite=line.chomp.strip\n\t\tsite=Wmap::HostTracker.instance.url_2_site(site)\n\t\tabort \"Error on processing site: #{site}\" if site.nil?\n\t\thost=Wmap::HostTracker.instance.url_2_host(site)\n\t\tabort \"Error on processing host: #{host}\" if host.nil?\n\t\tip=Wmap::HostTracker.instance.local_host_2_ip(host)\n\t\tip=Wmap::HostTracker.instance.host_2_ip(host) if ip.nil?\n\t\tnext if ip.nil?\n\t\tnext unless Wmap::HostTracker.instance.is_ip?(ip)\n\t\tport=Wmap::HostTracker.instance.url_2_port(site)\n\t\tkey=ip+\":\"\n\t\tkey+=port.to_s\n\t\t@services[key]=true unless @services.key?(key)\n\tend\n\tf_site.close\nend", "def update_node_search_domains(node_search_domains_properties, opts = {})\n data, _status_code, _headers = update_node_search_domains_with_http_info(node_search_domains_properties, opts)\n return data\n end", "def common_nodes(remote, opts={:heads => nil, :force => nil, :base => nil})\n # variable prep!\n node_map = changelog.node_map\n search = []\n unknown = []\n fetch = {}\n seen = {}\n seen_branch = {}\n opts[:base] ||= {}\n opts[:heads] ||= remote.heads\n \n # if we've got nothing...\n if changelog.tip == NULL_ID\n opts[:base][NULL_ID] = true # 1 is stored in the Python\n \n return [NULL_ID], [NULL_ID], opts[:heads].dup unless opts[:heads] == [NULL_ID]\n return [NULL_ID], [], [] # if we didn't trip ^, we're returning this\n end\n \n # assume we're closer to the tip than the root\n # and start by examining heads\n UI::status 'searching for changes'\n \n opts[:heads].each do |head|\n if !node_map.include?(head)\n unknown << head\n else\n opts[:base][head] = true # 1 is stored in the Python\n end\n end\n \n opts[:heads] = unknown # the ol' switcheroo\n return opts[:base].keys, [], [] if unknown.empty? # BAIL\n \n # make a hash with keys of unknown\n requests = Hash.with_keys unknown\n count = 0\n \n # Search through the remote branches\n # a branch here is a linear part of history, with 4 (four)\n # parts:\n #\n # head, root, first parent, second parent\n # (a branch always has two parents (or none) by definition)\n #\n # Here's where we start using the Hashes instead of Arrays\n # trick. Keep an eye out for opts[:base] and opts[:heads]!\n unknown = remote.branches(*unknown)\n until unknown.empty?\n r = []\n \n while node = unknown.shift\n next if seen.include?(node[0])\n UI::debug \"examining #{short node[0]}:#{short node[1]}\"\n \n if node[0] == NULL_ID\n # Do nothing...\n elsif seen_branch.include? node\n UI::debug 'branch already found'\n next\n elsif node_map.include? node[1]\n UI::debug \"found incomplete branch #{short node[0]}:#{short node[1]}\"\n search << node[0..1]\n seen_branch[node] = true # 1 in the python\n else\n unless seen.include?(node[1]) || fetch.include?(node[1])\n if node_map.include?(node[2]) and node_map.include?(node[3])\n UI::debug \"found new changset #{short node[1]}\"\n fetch[node[1]] = true # 1 in the python\n end # end if\n \n node[2..3].each do |p|\n opts[:base][p] = true if node_map.include? p\n end\n end # end unless\n \n node[2..3].each do |p|\n unless requests.include?(p) || node_map.include?(p)\n r << p\n requests[p] = true # 1 in the python\n end # end unless\n end # end each\n end # end if\n \n seen[node[0]] = true # 1 in the python\n end # end while\n \n unless r.empty?\n count += 1\n \n UI::debug \"request #{count}: #{r.map{|i| short i }}\"\n \n (0 .. (r.size-1)).step(10) do |p|\n remote.branches(r[p..(p+9)]).each do |b|\n UI::debug \"received #{short b[0]}:#{short b[1]}\"\n unknown << b\n end\n end\n end # end unless\n end # end until\n \n # sorry for the ambiguous variable names\n # the python doesn't name them either, which\n # means I have no clue what these are\n find_proc = proc do |item1, item2|\n fetch[item1] = true\n opts[:base][item2] = true\n end\n \n # do a binary search on the branches we found\n search, new_count = *binary_search(:find => search,\n :repo => remote,\n :node_map => node_map,\n :on_find => find_proc)\n count += new_count # keep keeping track of the total\n \n # sanity check, because this method is sooooo fucking long\n fetch.keys.each do |f|\n if node_map.include? f\n raise RepoError.new(\"already have changeset #{short f[0..3]}\")\n end\n end\n \n if opts[:base].keys == [NULL_ID]\n if opts[:force]\n UI::warn 'repository is unrelated'\n else\n raise RepoError.new('repository is unrelated')\n end\n end\n \n UI::debug \"found new changesets starting at #{fetch.keys.map{|f| short f }.join ' '}\"\n UI::debug \"#{count} total queries\"\n \n # on with the show!\n [opts[:base].keys, fetch.keys, opts[:heads]]\n end", "def domain_update(args)\n response = send_request(domain_update_xml(args))\n\n get_result(response)\n end", "def update_subdomains\n backup_subdomains\n parse_subdomains\n end", "def call\n result = {}\n\n # loop on local remotes\n @local_campaigns.each do |local_campaign|\n discrepancies = []\n\n # find remote campaign using external reference\n remote_campaign = remote_campaign_by_local_reference(local_campaign.external_reference)\n\n if remote_campaign\n DISCREPANCY_ATTRIBUTES.each do |local_attr, remote_attr|\n if local_campaign[local_attr] != remote_campaign[remote_attr]\n discrepancies << discrepancy_hash(remote_attr, remote_campaign[remote_attr], local_campaign[local_attr])\n end\n end\n else\n @missing_remote_campaigns << new_campaign_hash(local_campaign)\n end\n\n unless discrepancies.empty?\n @changed_campaigns << changed_campaign_hash(local_campaign.external_reference, discrepancies)\n end\n end\n\n result[:changed_campaigns] = @changed_campaigns unless @changed_campaigns.empty?\n result[:missing_remote_campaigns] = @missing_remote_campaigns unless @missing_remote_campaigns.empty?\n\n result\n end", "def parse_old\n\thost_tracker=Wmap::HostTracker.instance\n\t@services=Hash.new\n\tf_site=File.open(ARGV[0],'r')\n\tf_site.each do |line|\n\t\tsite=line.chomp.strip\n\t\tsite=host_tracker.url_2_site(site)\n\t\tabort \"Error on processing site: #{site}\" if site.nil?\n\t\thost=host_tracker.url_2_host(site)\n\t\tabort \"Error on processing host: #{host}\" if host.nil?\n\t\tip=host_tracker.local_host_2_ip(host)\n\t\tip=host_tracker.host_2_ip(host) if ip.nil?\n\t\tnext if ip.nil?\n\t\tnext unless host_tracker.is_ip?(ip)\n\t\tport=host_tracker.url_2_port(site)\n\t\tkey=ip+\":\"\n\t\tkey+=port.to_s\n\t\t@services[key]=true unless @services.key?(key)\n\tend\n\tf_site.close\n\thost_tracker=nil\nend", "def refresh_uniq_sites\n\t\tputs \"Refresh unique site entries in the site store. \" if @verbose\n\t\tchanges=Hash.new\n\t\tsites=get_uniq_sites\n\t\tif sites.size > 0\n\t\t\tchanges=bulk_refresh(sites)\n\t\telse\n\t\t\tputs \"Error: no entry is refreshed. Please check your site store and try again.\"\n\t\tend\n\t\treturn changes\n\trescue => ee\n\t\tputs \"Exception on method #{__method__}: #{ee}\" if @verbose\n\tend", "def query_couchbase_servers\n\n couchbase_servers = Hash.new\n \n r=rightscale_server_collection 'couchbase_cluster_nodes' do\n tags [\"couchbase:cluster_ip=#{cluster_ip}\"]\n secondary_tags [\"server:uuid=*\", \"couchbase:listen_ip=*\"]\n action :nothing\n end\n r.run_action(:load)\n \n node[:server_collection]['couchbase_cluster_nodes'].to_hash.values.each do |tags|\n uuid = RightScale::Utils::Helper.get_tag_value('server:uuid', tags)\n ip = RightScale::Utils::Helper.get_tag_value('couchbase:listen_ip', tags)\n couchbase_servers[uuid] = {}\n couchbase_servers[uuid][:ip] = ip\n end\n \n couchbase_servers\n \n end", "def checkNodes(deployed_containers,nova_ip,quantum_ip,token)\n novaIP = URI.parse(nova_ip)\n nova = Ropenstack::Nova.new(novaIP, token)\n\n quantumIP = URI.parse(quantum_ip)\n quantum = Ropenstack::Quantum.new(quantumIP, token)\n\n deployed_containers.each do |deployed_container|\n logger.info \"Checking nodes for deployed container:\" + deployed_container.id.to_s\n # Check all VMs in the container\n deployed_container.deployed_vms.each do |vm|\n begin\n # Ask openstack for the server details\n server = nova.servers(vm.openstack_id)\n logger.info \"Server \" + vm.openstack_id + \" is up.\"\n rescue\n # If openstack returns an error, delete the vm\n logger.info \"Server \" + vm.openstack_id + \" is down. Deleting from deployed container.\"\n vm.destroy()\n end\n end\n\n # Check all networks in the container\n deployed_container.deployed_networks.each do |network|\n begin\n # Ask openstack for the network details\n net = quantum.networks(network.openstack_id)\n logger.info \"Network \" + network.openstack_id + \" is up.\"\n rescue\n # If openstack returns an error, delete the network\n logger.info \"Network \" + network.openstack_id + \" is down. Deleting from deployed container.\"\n network.destroy()\n end\n end\n\n # Check all routers in the container\n deployed_container.deployed_routers.each do |router|\n begin\n # Ask openstack for the router details\n r = quantum.routers(router.openstack_id)\n logger.info \"Router \" + router.openstack_id + \" is up.\"\n rescue\n # If openstack returns an error, delete the router\n logger.info \"Router \" + router.openstack_id + \" is down. Deleting from deployed container.\"\n router.destroy()\n end\n end\n end\n end" ]
[ "0.64395267", "0.59737647", "0.59626085", "0.58956754", "0.57425475", "0.5713824", "0.56919813", "0.5617458", "0.5540912", "0.55262655", "0.5474444", "0.54590106", "0.54089713", "0.53691214", "0.53557086", "0.53376764", "0.53181106", "0.52742827", "0.5227042", "0.5222256", "0.51900923", "0.51652694", "0.5130863", "0.51302844", "0.512649", "0.51199454", "0.51156604", "0.5104164", "0.50755167", "0.5070298", "0.5019904", "0.5014401", "0.49922612", "0.4980031", "0.49574998", "0.4939231", "0.49183133", "0.4890204", "0.4883723", "0.48800507", "0.48672166", "0.4860614", "0.4860614", "0.48567855", "0.48403022", "0.48395556", "0.48383382", "0.48367897", "0.48270208", "0.48246422", "0.4816585", "0.48159578", "0.48080322", "0.48071817", "0.4793177", "0.4793104", "0.47883922", "0.47796252", "0.47613707", "0.4749689", "0.47398195", "0.47301605", "0.47095507", "0.47015205", "0.47007436", "0.46892026", "0.4689026", "0.46884277", "0.46851262", "0.46850613", "0.4682543", "0.46705887", "0.46640602", "0.4659189", "0.46549731", "0.46485987", "0.46485522", "0.46476847", "0.46391046", "0.46375316", "0.46374074", "0.46339238", "0.4629583", "0.46283916", "0.4628288", "0.46268195", "0.4625736", "0.4624579", "0.4624443", "0.46231252", "0.46208444", "0.46117902", "0.4605749", "0.46025214", "0.46004647", "0.45971292", "0.45959598", "0.45918384", "0.45865962", "0.4577508" ]
0.675163
0
Use the provided +parser+ to build an update for the node with the specified +identifier+ and return it as a Hash.
def parse_record( parser, identifier ) expires = parser.expires_on if parser.property_any_supported?( :expires_on ) if !parser.registered? return { error: 'Not registered.' } elsif expires && expires <= Time.now return { error: "Expired on #{expires}" } end return Whois::Parser::PROPERTIES.each_with_object({}) do |prop, data| next unless parser.property_any_supported?( prop ) val = parser.public_send( prop ) case prop when :nameservers data[ 'nameservers' ] = val.map( &:name ) when :available?, :registered? data[ prop.to_s[0..-2] ] = val when :registrant_contacts, :admin_contacts, :technical_contacts data[ prop ] = val.map do |contact| "%s <%s>" % [ contact.name, contact.email ] end when :status data[ prop ] = val.map( &:to_s ) else data[ prop ] = val.to_s end end rescue Whois::ParserError, NoMethodError => err msg = "%p while parsing record for %s: %s" % [ err.class, identifier, err.message ] self.log.error( msg ) self.log.debug { err.backtrace.join("\n ") } return { warning: "Record fetched, but the record could not be parsed." } end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update(hash); end", "def parse()\n r = Hash.new\n r[:timestamp] = @id >> Worker::TIMESTAMP_LEFT_SHIFT_BITS\n r[:time] = Worker::EPOCH.to_i + (@id >> Worker::TIMESTAMP_LEFT_SHIFT_BITS) / 1000.0\n r[:node] = (@id >> Worker::WORKERID_LEFT_SHIFT_BITS) & (-1 ^ (-1 << Worker::NODEID_BITS))\n r[:sequence] = @id & Worker::SEQUENCE_MASK\n r\n end", "def parse_hash_def\n ref, hash = parse_hash\n\n [ref, hash, parse]\n end", "def update!(**args)\n @big_branch = args[:big_branch] if args.key?(:big_branch)\n @debug_info = args[:debug_info] if args.key?(:debug_info)\n @key = args[:key] if args.key?(:key)\n @node = args[:node] if args.key?(:node)\n @retrieval_timestamp = args[:retrieval_timestamp] if args.key?(:retrieval_timestamp)\n @site = args[:site] if args.key?(:site)\n @timestamp = args[:timestamp] if args.key?(:timestamp)\n @tree_info = args[:tree_info] if args.key?(:tree_info)\n end", "def hash\n node_id.hash\n end", "def node_hash(node_id)\n \n end", "def to_hash\n h = hash.dup\n @hash.each do |name, node|\n h[name] = node.to_hash if node.respond_to? :to_hash\n end\n h\n end", "def semantic_update\n update_params = {}\n to_update = semantic_find\n params.each_pair{|k,v| update_params[k]=v if(k.to_s != \"id\" &&\n to_update.attribute_names.include?(k)) }\n\n to_update.update_attributes(update_params.symbolize_keys)\n to_update\n end", "def update(node); end", "def to_hash\n @to_hash ||= RISBN::NokogiriUtils.hash_from_node(xml_nodes)[:entry]\n end", "def calculate_hash!\n prefix = PREFIX_NAME_LOOKUP[self.type]\n # add special cases for refs\n self.hash_id = NodeId.sha1(\"#{prefix} #{self.size}\\0#{self.content}\")\n end", "def update_from_json(hash)\n end", "def node_hash_from_node(ast)\n hash = {}\n ast.children.each { |cn| hash[cn.children[0]] = cn.children[1] }\n hash\n end", "def update!(**args)\n @identifier = args[:identifier] if args.key?(:identifier)\n @type = args[:type] if args.key?(:type)\n end", "def update!(**args)\n @identifier = args[:identifier] if args.key?(:identifier)\n @type = args[:type] if args.key?(:type)\n end", "def parser(info_hash)\n return_hash = {}\n info_hash.delete(\"Slug\")\n\n # Iterate through the given information and update the hash\n info_hash.each do |key, value|\n attribute = camel_to_snake(key)\n\n if attribute == \"date\"\n return_hash[attribute] = date_formatter(value)\n else\n return_hash[attribute] = value\n end\n end\n\n return_hash\n end", "def hash\n expr.hash\n end", "def update\n @p.transaction do\n @p['hash'] = @hash\n end\n end", "def update!\n @etcd_node = etcd.set(etcd_key, value: to_json, prevExist: true).node\n end", "def update!(**args)\n @expression = args[:expression] if args.key?(:expression)\n @name = args[:name] if args.key?(:name)\n @tag_color = args[:tag_color] if args.key?(:tag_color)\n end", "def recalculate_hash_at(node)\n return node._hash = node.value if node.value\n recalculate_hash_at(node.left) if node.left\n recalculate_hash_at(node.right) if node.right\n node._hash = self.class.hash_children(*node_subhashes(node))\n end", "def hash\n return unless doc_value?\n result['doc']['hash']\n end", "def update(para)\n if para.kind_of? Hash\n from_hash(para)\n end\n\nend", "def deep_update(*other_hashes, &blk); end", "def to_hash\n thishash = Hash.new()\n thishash['node_number'] = @node_number\n thishash['port'] = @port\n unless @slaveof.nil?\n thishash['slaveof'] = @slaveof['host'] + \":\" + @slaveof['port'].to_s\n end\n return thishash\n end", "def parse(hash)\n raise ArgumentError, \"Requires a hash to read in\" unless hash.is_a?(Hash)\n obj = new\n\n # Exclude from checking elements we've already matched\n matching_names = []\n\n hash.each do |key, value|\n if elem = @_elements[key]\n elem.set(obj, value)\n matching_names << elem.sym.to_s if elem.name == key\n elsif @_strict\n raise ConfigurationError, \"Unsupported attribute '#{key}: #{value}' for #{self.name}\"\n end\n end\n\n @_elements.each do |key, elem|\n next if hash.has_key?(key)\n next if matching_names.include?(key)\n elem.set_default(obj) \n end\n\n obj\n end", "def pub_hash_update(delete: false)\n publication.pub_hash[:identifier] = pub_hash_reject\n publication.pub_hash[:identifier] << identifier unless delete\n end", "def to_hash(hash={})\n node_hash = {}\n\n # Insert node hash into parent hash correctly.\n insert_node_hash_into_parent(hash, name, node_hash)\n\n # Handle child elements\n each_child do |child|\n handle_child_element(child, node_hash)\n end\n\n # Remove content node if it is blank\n remove_blank_content_node node_hash\n\n # Handle attributes\n each_attr { |a| node_hash[a.name] = a.value }\n\n hash\n end", "def edit_hash(hash)\n raw = edit_data(hash, false)\n Chef::JSONCompat.parse(raw)\n end", "def to_hash\n {\n :identifier => @identifier,\n :scheme => @scheme\n }\n end", "def parse (node, oeCount)\n\t\thash = {}\n\t\tpid = node['id'].split('_').last\n\t\thash[:pid] = pid\n\t\thash[:quantity] = node.css('#'+pid.to_s).text\n\t\thash[:title] = node.css('.itemTitle').text\n\t\thash[:price] = node.css('.normalprice').text\n\t\thash[:sale] = node.css('.productSalePrice').text\n\t\treturn hash\n\tend", "def update(hash)\r\n\tputs \"Whichs itme's quantity would you like to update?\"\r\n\titem=gets.chomp\r\n\tputs \"The new quantity?\"\r\n\tnum=gets.chomp\r\n\th2 = { \"#{item}\" => num}\r\n\thash.merge!(h2) \r\n\treturn hash\r\n\t\r\nend", "def to_hash(hash = {})\n node_hash = {}\n\n # Insert node hash into parent hash correctly.\n case hash[name]\n when Array then hash[name] << node_hash\n when Hash then hash[name] = [hash[name], node_hash]\n when nil then hash[name] = node_hash\n end\n\n # Handle child elements\n children.each do |c|\n if c.element?\n c.to_hash(node_hash)\n elsif c.text? || c.cdata?\n node_hash[CONTENT_ROOT] ||= +''\n node_hash[CONTENT_ROOT] << c.content\n end\n end\n\n # Remove content node if it is blank and there are child tags\n node_hash.delete(CONTENT_ROOT) if node_hash.length > 1 && Utils.object_blank?(node_hash[CONTENT_ROOT])\n\n # Handle attributes\n attribute_nodes.each { |a| node_hash[a.node_name] = a.value }\n\n hash\n end", "def hash\n @hash ||= opts[:parser].parse(data.to_s.gsub(/\\<!\\[CDATA\\[([^\\]]+)\\]\\]\\>/) {$1})\n end", "def to_hash\n root.to_hash\n end", "def set_update_values(link_hash)\n\thash = CreateHash.read_hash_file(link_hash[\"asset\"])\n\tnew_hash = {}\n\tnew_hash.store(\"Asset Number\", hash[\"fields\"][\"Asset Number\"])\n\tnew_hash.store(\"Purchase Date\", hash[\"fields\"][\"Purchase Date\"])\n\tnew_hash.store(\"Purchase Cost\", hash[\"fields\"][\"Purchase Cost\"])\n\tnew_hash.store(\"Supplier\", hash[\"fields\"][\"Supplier\"])\n\tnew_hash.store(\"Order Number\", hash[\"fields\"][\"Order Number\"])\n\treturn new_hash\nend", "def update_node_with_values(node, updates, merge = false)\n updated = []\n\n # merge the normal attributes (but not tags)\n updated << 'normal' if update_attrs(node, updates['normal'], merge)\n\n # update runlist\n updated << 'run_list' if update_runlist(node, updates['run_list'])\n\n # update chef env\n if update_chef_env(node, updates['chef_environment'])\n updated << 'chef_environment'\n end\n\n # merge tags\n updated << 'tags' if update_tags(node, updates['tags'])\n\n # return false if no updates, else return array of property names\n !updated.empty? && updated\n end", "def hash\n [name, operator, expanded].hash\n end", "def node_to_hash(node)\n puts \"You must define a `node_to_hash` method in your child class to parse the Nokogiri nodes\"\n end", "def hash_for_merging(hash)\n new_hash = { id: hash['message_id'].to_i,\n date: Time.at(hash['date'].to_i),\n from: User.new(hash['from'], @bot),\n chat: Chat.new(hash['chat'], @bot) }\n\n type = TYPES.find { |t| hash[t.to_s] }\n new_hash[type] = hash[type.to_s] # TODO: fail if type not found\n\n new_hash\n end", "def to_hash\n @parsed_body ||= JSON.parse(@body, symbolize_names: true)\n end", "def update(hash = nil)\n attach\n hash&.each { |k, v| send(\"#{k}=\", v) }\n yield(self) if block_given?\n self\n end", "def to_h\n hash = {\n name: name,\n contents: contents\n }\n hash[:id] = id if id\n hash[:syntax] = syntax || 'autodetect'\n hash[:size] = size if size\n\n hash\n end", "def hash # Hack for Ruby 1.8.6\n @node.id.hash ^ self.class.hash\n end", "def to_hash(overrides = {})\n defaults = {interpolate: true, context: self}\n overrides = defaults.merge(overrides)\n hash = {}\n @children.each_pair do |name, child|\n if child.kind_of?(BranchNode)\n hash[name] = child.to_hash(overrides)\n elsif child.kind_of?(PropertyNode)\n hash[name] = child.get_value(overrides)\n end\n end\n # Return the hash.\n hash\n end", "def update!(**args)\n @digest = args[:digest] if args.key?(:digest)\n @hash_prop = args[:hash_prop] if args.key?(:hash_prop)\n @url = args[:url] if args.key?(:url)\n end", "def new_or_update!(hash={}, options = {:hard_update => true})\n hash.symbolize_keys!\n if hash[:id].blank?\n self.new(hash)\n else\n rec = self.find(hash[:id])\n if options[:hard_update]\n rec.update_attributes!(hash.except(:id))\n else\n rec.update_attributes(hash.except(:id))\n end\n rec\n end\n end", "def update_node(params)\n node = @tinkit_class.get(params[@key_field])\n raise \"No node to update for #{@key_field} => #{params[@key_field]}.\"\\\n \"Maybe you wanted to create a new node instead?\" unless node\n #TODO: What if params includes attachments?\n \n joha_fields = JohaDataDefn.keys\n param_keys = params.keys\n param_keys.delete(@key_field)\n param_keys.each do |key|\n next unless joha_fields.include? key\n new_data = params[key]\n node._user_data[key] = new_data\n param_keys.delete(key)\n end\n \n\n if param_keys.size > 0\n node_user_data[@user_data_field] ||= {}\n param_keys.each do |key|\n node._user_data[@user_data_field][key] = params[key]\n end\n end \n end", "def update!(**args)\n @identifier = args[:identifier] if args.key?(:identifier)\n @info = args[:info] if args.key?(:info)\n @kind = args[:kind] if args.key?(:kind)\n @mention = args[:mention] if args.key?(:mention)\n @score = args[:score] if args.key?(:score)\n @source = args[:source] if args.key?(:source)\n @target = args[:target] if args.key?(:target)\n @type = args[:type] if args.key?(:type)\n @type_id = args[:type_id] if args.key?(:type_id)\n end", "def update!(**args)\n @digest = args[:digest] if args.key?(:digest)\n end", "def eval_update_hash(node)\n\t\tconfig = lib('requirements')\n \n update_hash = {status: \"Ok\"}\n update_hash[\"selenium-server.jar\"] = config[\"selenium\"][\"server\"]\n update_hash[\"e3s-proxy.jar\"] = config[\"selenium\"][\"e3s_proxy\"]\n update_hash[\"rabbitmq-client.jar\"] = config[\"rabbitmq-java-client\"]\n update_hash[\"IEDriverServer.exe\"] = config[\"ie_driver_server\"][\"#{node['bit']}bit\"] if has_driver?(node, \"internet explorer\")\n \n update_hash\n\tend", "def deep_copy_node(oldhash)\n newhash = {}\n newhash['fqdn'] = oldhash['fqdn'] || oldhash['id'].gsub('_', '.') # Fix the fqdn, since periods couldn't be used in the databag ID.\n newhash['chef_environment'] = oldhash['chef_environment']\n begin\n newhash['chef_environment'] ||= oldhash.chef_environment # In case it's an actual Chef node and not a hash emulating one.\n rescue\n end\n newhash['rdiff-backup'] = oldhash['rdiff-backup'].to_hash\n return newhash\nend", "def hash\n [@id].hash\n end", "def to_hash\n return @hash_cache if @hash_cache\n # Rails.logger.debug \"to_hash #{options.inspect} #{dump_xml}\"\n t0 = Time.now\n x = Benchmark.measure { @hash_cache = Xmlhash.parse(dump_xml) }\n @@xml_time += Time.now - t0\n # Rails.logger.debug \"after to_hash #{JSON.pretty_generate(@hash_cache)}\"\n # puts \"to_hash #{self.class} #{x}\"\n @hash_cache\n end", "def update!(**args)\n @description = args[:description] if args.key?(:description)\n @implicit = args[:implicit] if args.key?(:implicit)\n @info = args[:info] if args.key?(:info)\n @semantic_node = args[:semantic_node] if args.key?(:semantic_node)\n @type = args[:type] if args.key?(:type)\n end", "def calculate_hash!\n entry_hash = to_hash\n entry_hash['description']=nil\n @hash = entry_hash.hash\n end", "def hash\n\t\t[@id].hash\n\tend", "def update_hash\n nh = nil\n\n if is_branch != 0\n sha512 = OpenSSL::Digest::SHA512.new\n sha512 << HASH_+PREFIXES[:inner_node]\n hashes.each { |k,h|\n sha512 << v\n }\n nh = sha512.digest\n end\n\n return false if nh == self.hash\n self.hash = nh\n return true\n end", "def hash\n [@collection.full_namespace, @opts.hash, @selector.hash].hash\n end", "def to_hash(hash = {})\n node_hash = {}\n\n # Insert node hash into parent hash correctly.\n case hash[name]\n when Array then hash[name] << node_hash\n when Hash then hash[name] = [hash[name], node_hash]\n when nil then hash[name] = node_hash\n end\n\n # Handle child elements\n each_child do |c|\n if c.element?\n c.to_hash(node_hash)\n elsif c.text? || c.cdata?\n node_hash[CONTENT_ROOT] ||= +\"\"\n node_hash[CONTENT_ROOT] << c.content\n end\n end\n\n # Remove content node if it is blank\n if node_hash.length > 1 && node_hash[CONTENT_ROOT].blank?\n node_hash.delete(CONTENT_ROOT)\n end\n\n # Handle attributes\n each_attr { |a| node_hash[a.name] = a.value }\n\n hash\n end", "def to_hash\n hsh = {\n id: id,\n status: status.to_sym,\n connect: running? ? connect.to_h : nil,\n time: info.wallclock_time.to_i / 60 # only update every minute\n }\n Digest::SHA1.hexdigest(hsh.to_json)\n end", "def update\n # don't need to update; hash is shared\n end", "def update!(**args)\n @create_time = args[:create_time] if args.key?(:create_time)\n @hashes = args[:hashes] if args.key?(:hashes)\n @name = args[:name] if args.key?(:name)\n @owner = args[:owner] if args.key?(:owner)\n @size_bytes = args[:size_bytes] if args.key?(:size_bytes)\n @update_time = args[:update_time] if args.key?(:update_time)\n end", "def update(hash)\n peek.merge!(hash)\n self\n end", "def to_hash\n body.to_hash\n end", "def to_h\n if merge_ee?\n deep_merge(@hash, @ee_hash)\n else\n @hash\n end\n end", "def to_hash\n @to_hash ||= JSON.parse(body)\n rescue\n @to_hash = {}\n end", "def parse_update_response(body)\n dom = parse_and_validate_response(body, :root_name => 'update')\n dom.root.if_attribute_value(:time) { |v| Time.parse(v) }\n end", "def to_hash\n @document[\"@search.action\"] = \"merge\"\n @document\n end", "def modify_identifier(identifier, metadata_hash)\n request_uri = '/id/' + identifier\n uri = URI(ENDPOINT + request_uri)\n request = Net::HTTP::Post.new uri.request_uri\n response = call_api(uri, request, metadata_hash)\nend", "def hash\n id.hash\n end", "def hash\n id.hash\n end", "def update\n return unless @hash\n begin\n lockf = File.open(@path+\".lock\", File::CREAT|File::RDWR, 0600)\n lockf.flock File::LOCK_EX\n f = File.open(@path+\".new\", File::CREAT|File::TRUNC|File::WRONLY, 0600)\n for k,v in @hash\n f.printf \"%s=%s\\n\", CGI.escape(k), CGI.escape(String(Marshal.dump(v)))\n end\n f.close\n File.rename @path+\".new\", @path\n ensure\n f&.close\n lockf&.close\n end\n end", "def hash\n [name, url].hash\n end", "def update_file(file, hash)\n\tupdate_file = File.open(file, 'w')\n\tupdate_file.write(hash.to_yaml)\nend", "def update!(**args)\n @sha256_checksum = args[:sha256_checksum] if args.key?(:sha256_checksum)\n @uri = args[:uri] if args.key?(:uri)\n end", "def parse\n if @resource.nil?\n create_dataset(doi_string: @id_string) # @id string will be nil if not specified, so minted, otherwise to be created\n else\n clear_previous_metadata\n end\n user_id = @hash['userId'] || @user.id\n @resource.update(\n title: @hash['title'],\n user_id: user_id,\n current_editor_id: user_id,\n skip_datacite_update: @hash['skipDataciteUpdate'] || false,\n skip_emails: @hash['skipEmails'] || false,\n loosen_validation: @hash['loosenValidation'] || false\n )\n # probably want to clear and re-add authors for data updates\n @hash[:authors]&.each { |author| add_author(json_author: author) }\n StashDatacite::Description.create(description: @hash[:abstract], description_type: 'abstract', resource_id: @resource.id)\n TO_PARSE.each { |item| dynamic_parse(my_class: item) }\n @resource.identifier\n end", "def update\n @parser = Parser.find(params[:id])\n\n respond_to do |format|\n if @parser.update_attributes(params[:parser])\n format.html { redirect_to @parser, :notice => 'Parser was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @parser.errors, :status => :unprocessable_entity }\n end\n end\n end", "def method_missing(symbol, *args)\n if symbol == :to_ary\n merged_attributes.send(symbol, *args)\n elsif args.empty?\n Chef.deprecated :attributes, %q{method access to node attributes (node.foo.bar) is deprecated and will be removed in Chef 13, please use bracket syntax (node[\"foo\"][\"bar\"])}\n self[symbol]\n elsif symbol.to_s =~ /=$/\n Chef.deprecated :attributes, %q{method setting of node attributes (node.foo=\"bar\") is deprecated and will be removed in Chef 13, please use bracket syntax (node[\"foo\"]=\"bar\")}\n key_to_set = symbol.to_s[/^(.+)=$/, 1]\n self[key_to_set] = (args.length == 1 ? args[0] : args)\n else\n raise NoMethodError, \"Undefined node attribute or method `#{symbol}' on `node'\"\n end\n end", "def update!(**args)\n @etag = args[:etag] if args.key?(:etag)\n @inherit_from_parent = args[:inherit_from_parent] if args.key?(:inherit_from_parent)\n @reset = args[:reset] if args.key?(:reset)\n @rules = args[:rules] if args.key?(:rules)\n @update_time = args[:update_time] if args.key?(:update_time)\n end", "def to_hash\n {\n amount: self.amount,\n address: self.to_node.address\n }\n end", "def update(hash)\n @__table__ ||= {}\n if hash\n for k,v in hash\n @__table__[k.to_sym] = v\n new_ostruct_member(k)\n end\n end\n end", "def update!(**args)\n @attribute = args[:attribute] if args.key?(:attribute)\n @child_dom_tree_node_index = args[:child_dom_tree_node_index] if args.key?(:child_dom_tree_node_index)\n @current_source_url = args[:current_source_url] if args.key?(:current_source_url)\n @document = args[:document] if args.key?(:document)\n @html_tag_type = args[:html_tag_type] if args.key?(:html_tag_type)\n @is_clickable = args[:is_clickable] if args.key?(:is_clickable)\n @name = args[:name] if args.key?(:name)\n @origin_url = args[:origin_url] if args.key?(:origin_url)\n @referenced_resource_index = args[:referenced_resource_index] if args.key?(:referenced_resource_index)\n @render_tree_node_index = args[:render_tree_node_index] if args.key?(:render_tree_node_index)\n @type = args[:type] if args.key?(:type)\n @value = args[:value] if args.key?(:value)\n end", "def hash\n id.hash\n end", "def update(hash)\n # Try to find an existing entry in @data.\n ind = nil\n @data.each_with_index{ |v, i| ind = i if(v[:id] == hash[:id] || v[:command] == hash[:command]) }\n return false unless ind\n @data[ind].merge!(hash)\n return true\n end", "def hash\n id.hash\n end", "def hash\n id.hash\n end", "def hash\n id.hash\n end", "def hash\n id.hash\n end", "def hash\n id.hash\n end", "def hash\n id.hash\n end", "def hash\n id.hash\n end", "def hash\n id.hash\n end", "def hash\n id.hash\n end", "def remote_sha256_update_info\n return nil unless (temp_remote_sha256_update_info = read_attribute(:remote_sha256_update_info))\n # logger.debug2 \"temp_remote_sha256_update_info = #{temp_remote_sha256_update_info}\"\n YAML::load temp_remote_sha256_update_info\n end", "def hash\n [_hash, name, owner].hash\n end", "def change_sha(node)\n node['delivery']['change']['sha']\n end", "def to_hash\n h = super\n h['uid'] = @uid\n h\n end", "def update!(**args)\n @mid = args[:mid] if args.key?(:mid)\n @unique_id = args[:unique_id] if args.key?(:unique_id)\n end", "def update\n respond_to do |format|\n if @hash_tag.update(hash_tag_params)\n format.html { redirect_to @hash_tag, notice: 'Hash tag was successfully updated.' }\n format.json { render :show, status: :ok, location: @hash_tag }\n else\n format.html { render :edit }\n format.json { render json: @hash_tag.errors, status: :unprocessable_entity }\n end\n end\n end" ]
[ "0.49581718", "0.48748443", "0.46445203", "0.4626717", "0.4610098", "0.45846266", "0.45733577", "0.45426938", "0.45315206", "0.45207796", "0.4496077", "0.44676703", "0.44437933", "0.4423479", "0.4423479", "0.43993577", "0.43958464", "0.43929496", "0.43842736", "0.43687123", "0.4364932", "0.43572283", "0.43175477", "0.43064958", "0.43037155", "0.42940184", "0.42862818", "0.42804992", "0.42630404", "0.42578134", "0.41833633", "0.41825095", "0.4178099", "0.41679317", "0.4161089", "0.41603005", "0.41576692", "0.4156947", "0.4151197", "0.41481736", "0.4142245", "0.41415396", "0.41349968", "0.41327322", "0.41275007", "0.41242224", "0.41217604", "0.4115517", "0.41041696", "0.41041684", "0.4102512", "0.4102435", "0.41022232", "0.4095147", "0.40921634", "0.40871596", "0.40865514", "0.40861455", "0.40855926", "0.40845248", "0.40775955", "0.4073901", "0.40639043", "0.4050429", "0.4042929", "0.40400878", "0.40392315", "0.40365717", "0.40355027", "0.40332794", "0.40307218", "0.40307218", "0.4023411", "0.4018694", "0.40181664", "0.40153423", "0.40143144", "0.40137857", "0.4009454", "0.4009219", "0.40082887", "0.40039948", "0.4002084", "0.400094", "0.40006974", "0.40002948", "0.40002948", "0.40002948", "0.40002948", "0.40002948", "0.40002948", "0.40002948", "0.40002948", "0.40002948", "0.39962503", "0.39940068", "0.39878803", "0.39838022", "0.39799035", "0.3974945" ]
0.4228231
30
Create a new DNS monitor with the given +options+.
def initialize( options=DEFAULT_OPTIONS ) options = DEFAULT_OPTIONS.merge( options || {} ) options.each do |name, value| self.public_send( "#{name}=", value ) end @resolver = Resolv::DNS.new end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def initialize(options = {})\n @options = {\n :daemonize => DEFAULT_DAEMONIZE,\n :host => DEFAULT_HOST,\n :path => DEFAULT_PATH,\n :pid_file => DEFAULT_PID_FILE,\n :port => DEFAULT_PORT,\n :threads_number => DEFAULT_THREADS_NUMBER,\n :timeout => DEFAULT_TIMEOUT\n }.merge(options)\n \n @stats = {}\n end", "def create_dhcp_server(network, options)\n end", "def create(options = {})\n response = request(:post, \"/settings/hypervisor_zones.json\", :query => {:pack => options})\n end", "def initialize(info = {})\n super\n register_options(\n [\n OptAddressRange.new('RHOSTS', [true, 'The multicast address or CIDR range of targets to query', '224.0.0.252']),\n Opt::RPORT(5355),\n # TODO: allow more than one\n OptString.new('NAME', [true, 'The name to query', 'localhost']),\n OptString.new('TYPE', [true, 'The query type (name, # or TYPE#)', 'A'])\n ],\n self.class\n )\n end", "def find_or_create_host(opts)\n\t\treport_host(opts.merge({:wait => true}))\n\tend", "def initialize(address, options = {})\n @address = Address.new(address)\n @options = options\n @mutex = Mutex.new\n @monitor = Monitor.new(self, options)\n @description = Description.new(self)\n end", "def create options = {}\n raise ArgumentError.new \"Bad options passed #{options}\" unless options[ :id ]\n case options[:type]\n when :zmq\n Zmq.new options\n else\n Redis.new options\n end\n end", "def initialize(options = {})\n @formatter = Format.new\n @options = options\n @limit = options[:limit]\n @interval = options[:interval] || 5\n case options[:type]\n when :icmp\n @ping_handler = Net::Ping::External.new(@options[:host], @options[:port])\n when :udp\n @ping_handler = Net::Ping::UDP.new(@options[:host], @options[:port])\n when :tcp\n @ping_handler = Net::Ping::TCP.new(@options[:host], @options[:port])\n when :http, :https\n @ping_handler = Net::Ping::HTTP.new(@options[:url])\n when :ldap, :ldaps\n @ping_handler = Net::Ping::LDAP.new(@options[:url])\n end\n end", "def create(opts)\n opts = check_params(opts,[:search_base_dns,:servers])\n super(opts)\n end", "def create(options = {})\n response = request(:post, \"/network_zones.json\", :query => {:pack => options})\n end", "def initialize(options = {})\n @registry = options.fetch(:registry) { Restforce::DB::Registry }\n @interval = options.fetch(:interval) { DEFAULT_INTERVAL }\n @delay = options.fetch(:delay) { DEFAULT_DELAY }\n @verbose = options.fetch(:verbose) { false }\n @logger = options.fetch(:logger)\n @tracker = options.fetch(:tracker)\n @runner = options.fetch(:runner)\n @exit = options.fetch(:run_once)\n @history = options.fetch(:history)\n\n DB.reset\n DB.configure { |config| config.parse(options[:config]) }\n end", "def new(options) \n Client.get(\"/patterns/new\", :query => options)\n end", "def add(options = {})\n entry = Entry.new(\n ip_address: options[:ip_address],\n hostname: options[:hostname],\n aliases: options[:aliases],\n comment: options[:comment],\n priority: options[:priority],\n )\n\n @entries << entry\n remove_existing_hostnames(entry) if options[:unique]\n end", "def initialize(options={})\n @options = options\n @nsqd_tcp_addresses = s_to_a(options[:nsqd_tcp_addresses])\n @lookupd_tcp_addresses = s_to_a(options[:lookupd_tcp_addresses])\n @lookupd_poll_interval = options[:lookupd_poll_interval] || 120\n @long_id = options[:long_id] || Socket.gethostname\n @short_id = options[:short_id] || @long_id.split('.')[0]\n NSQ.logger = options[:logger] if options[:logger]\n NSQ.logger.level = options[:logger_level] if options[:logger_level]\n\n @selector = ::NIO::Selector.new\n @timer = Timer.new(@selector)\n @topic_count = Hash.new(0)\n @subscribers = {}\n @subscriber_mutex = Monitor.new\n @name = \"#{@long_id}:#{@short_id}\"\n\n raise 'Must pass either option :nsqd_tcp_addresses or :lookupd_http_addresses' if @nsqd_tcp_addresses.empty? && @lookupd_http_addresses.empty?\n\n @conns = {}\n @last_lookup = nil\n\n @logger.info(\"starting reader for topic '%s'...\" % self.topic) if @logger\n end", "def initialize(options = {})\n default_options = { :port => 6543,\n :status_port => 6544,\n :connection_type => :playback,\n :protocol_version => MythTV::DEFAULT_PROTOCOL_VERSION }\n \n options = default_options.merge(options)\n\n # Set up a local logging object\n @log = MythTV::Utils.setup_logging(options)\n \n # We cannot start unless we've been given a host to connect to\n raise ArgumentError, \"You must specify a :host key and value to initialize()\" unless options.has_key?(:host)\n\n @host = options[:host]\n @port = options[:port]\n @status_port = options[:status_port]\n @protocol_version = options[:protocol_version]\n\n @socket = TCPSocket.new(@host, @port)\n \n check_proto\n \n if options[:connection_type] == :playback\n announce_playback()\n elsif options[:connection_type] == :filetransfer\n announce_filetransfer(options[:filename])\n else\n raise ArgumentError, \"Unknown connection type '#{options[:connection_type]}'\"\n end\n end", "def start_server(options = {})\n\n # Backward compatibility\n if options.is_a? String\n url = options\n port = nil\n logfile = nil\n else\n url = options[:url]\n port = options[:port]\n logfile = options[:logfile]\n end\n\n url = ENV['TALKSHOW_REMOTE_URL'] if ENV['TALKSHOW_REMOTE_URL']\n port = ENV['TALKSHOW_PORT'] if ENV['TALKSHOW_PORT']\n logfile = ENV['TALKSHOW_LOG'] if ENV['TALKSHOW_LOG']\n\n Talkshow::Server.set_port port if port\n Talkshow::Server.set_logfile logfile if logfile\n \n if !url\n @type = :thread\n @question_queue = ::Queue.new\n @answer_queue = ::Queue.new\n @thread = Thread.new do\n Talkshow::Server.question_queue(@question_queue)\n Talkshow::Server.answer_queue(@answer_queue)\n Talkshow::Server.run!\n end\n else\n @type = :remote\n @question_queue = Talkshow::Queue.new(url)\n @answer_queue = Talkshow::Queue.new(url)\n end\n \n end", "def initialize( options )\n\t\tLoggability.format_as( :color ) if $stderr.tty?\n\t\t@options = options\n\n\t\tif @options.debug\n\t\t\t$DEBUG = true\n\t\t\t$VERBOSE = true\n\t\t\tLoggability.level = Logger::DEBUG\n\t\telsif @options.loglevel\n\t\t\tLoggability.level = @options.loglevel\n\t\tend\n\n\t\tMongrel2::Config.configure( :configdb => @options.config )\n\tend", "def initialize(options={})\n @options = options\n super(nil)\n @logdev = LogDevice.new(self)\n @logdev.run_socket_thread\n\n @formatter = proc do |severity, time, progname, msg|\n if msg.is_a?(Exception)\n \"#{severity}: #{msg.message} (#{msg.class})\\n\" + (msg.backtrace || []).join(\"\\n\")\n else\n \"#{severity}: #{msg}\"\n end\n end\n end", "def initialize(options)\n @observer_queue = 'rpm_worker_observer'\n @observer_class = 'AbfWorker::RpmWorkerObserver'\n super options\n @runner = DockerRpmWorker::Runners::Rpm.new(self, options)\n init_live_logger(\"abfworker::rpm-worker-#{@build_id}\")\n init_file_logger(ENV['HOME'] + '/script_output.log')\n initialize_live_inspector options['time_living']\n end", "def initialize(options = {})\n expect! options => { :adapter => String }\n\n adapter_url = options[:adapter]\n\n EM.next_tick do\n @adapter = RubPubSub::Adapter.create(adapter_url)\n end\n end", "def initialize(options, args)\n defaults = {\n interval: 1\n }\n @options = defaults.merge((options || {}).each { |k, v| { k => v } })\n return unless args.first\n\n resolve_addressing args.first\n normalise_options\n end", "def initialize(options = nil)\n @config = DEFAULTS.merge(options || { })\n \n merge_config_options\n assign_extracted_host_port\n end", "def create(options = {})\n raise ArgumentError.new(\"You must provide :cores and :ram\") if options[:ram].nil? or options[:cores].nil?\n raise ArgumentError.new(\":ram has to be at least 256MiB and a multiple of it\") if options[:ram].to_i < 256 or (options[:ram].to_i % 256) > 0\n raise ArgumentError.new(\":availability_zone has to be either 'AUTO', 'ZONE_1', or 'ZONE_2'\") if options[:availability_zone] and !['AUTO', 'ZONE_1', 'ZONE_2'].include? options[:availability_zone]\n raise ArgumentError.new(\":os_type has to be either 'WINDOWS' or 'OTHER'\") if options[:os_type] and !['WINDOWS', 'OTHER'].include? options[:os_type]\n options[:server_name] = options.delete :name if options[:name]\n response = Profitbricks.request :create_server, options\n self.find(:id => response[:server_id])\n end", "def initialize options\n # stats\n\n @stats = {}\n\n clear_stats\n\n if options[:host]\n port = options[:port]\n port = DEFAULT_PORT if port.nil?\n @socket = TCPSocket.new options[:host], port\n elsif options[:socket]\n @socket = options[:socket]\n end\n\n @number = 0\n\n set_options options\n end", "def create(options = {})\n options[:name] ||= SecureRandom.hex\n\n create_options = { p: port }\n create_options[:c] = options[:name] if options[:name]\n create_options[:d] = options[:dir] if options[:dir]\n exec(\"create\", create_options)\n\n options[:name]\n end", "def create\n begin\n enterMaintenanceMode\n rescue\n Puppet.err 'Could not find Host system.Either Host is not exist or disconnected'\n end\n end", "def initialize(options = {})\n find_xvfb\n\n @display = options.fetch(:display, 99).to_i\n @reuse_display = options.fetch(:reuse, true)\n @dimensions = options.fetch(:dimensions, '1280x1024x24')\n\n #TODO more logic here, autopicking the display number\n if @reuse_display\n launch_xvfb unless read_pid\n elsif read_pid\n raise Exception.new(\"Display :#{display} is already taken and reuse=false\")\n else\n launch_xvfb\n end\n\n raise Exception.new(\"Xvfb did not launch - something's wrong\") unless read_pid\n end", "def initialize(options={})\n @host = options[:host] or raise ArgumentError, \"#{self.class.name} requires :host\"\n @port = options[:port] or raise ArgumentError, \"#{self.class.name} requires :port\"\n @node_id = options[:node_id]\n @protocol = options[:protocol] || DEFAULT_PROTOCOL\n @logger = options[:logger] || Logger.new($stdout)\n @health = :good\n end", "def initialize(options = {})\n @port = 7\n @service_check = false\n @data_must_match = true\n @data = \"net-ping2\\n\"\n @timeout = 10\n @bind_host = nil\n @bind_port = nil\n super(options)\n end", "def create_server(options = {})\n begin\n server = connection.servers.create(options[:server_def])\n rescue Excon::Errors::BadRequest => e\n response = Chef::JSONCompat.from_json(e.response.body)\n if response['badRequest']['code'] == 400\n message = \"Bad request (400): #{response['badRequest']['message']}\"\n ui.fatal(message)\n else\n message = \"Unknown server error (#{response['badRequest']['code']}): #{response['badRequest']['message']}\"\n ui.fatal(message)\n end\n raise CloudExceptions::ServerCreateError, message\n end\n\n msg_pair(\"Instance Name\", server.name)\n msg_pair(\"Instance ID\", server.id)\n\n print \"\\n#{ui.color(\"Waiting for server [wait time = #{options[:server_create_timeout]}]\", :magenta)}\"\n\n # wait for it to be ready to do stuff\n server.wait_for(Integer(options[:server_create_timeout])) { print \".\"; ready? }\n\n puts(\"\\n\")\n server\n end", "def initialize options\n @count = options[:count] || Float::INFINITY\n @drb_config = DRb::DRbServer.make_config\n @incoming_packets = Queue.new\n @incomplete_streams = {}\n @incomplete_timestamps = {}\n @loader = DRbDump::Loader.new @drb_config\n @quiet = options[:quiet]\n @resolver = Resolv if options[:resolve_names]\n @run_as_directory = options[:run_as_directory]\n @run_as_user = options[:run_as_user]\n\n initialize_devices options[:devices]\n\n @capps = []\n @drb_streams = {}\n @running = false\n @statistics = DRbDump::Statistics.new\n end", "def initialize(options = {})\n # merge default options with options hash\n o = {\n :domain => 'http://www.dev.usys.org',\n :key => '',\n :user_agent => 'FsCommunicator/0.1 (Ruby)', # should be overridden by options user_agent\n :session => nil,\n :handle_throttling => false,\n :logger => nil,\n :timeout => nil\n }.merge(options)\n @domain = o[:domain]\n @key = o[:key]\n @user_agent = o[:user_agent]\n @session = o[:session]\n @handle_throttling = o[:handle_throttling]\n @logger = o[:logger]\n @timeout = o[:timeout]\n end", "def initialize( options={} )\n\t\toptions = DEFAULT_OPTIONS.merge( options )\n\n\t\t@host = options[:host]\n\t\t@port = options[:port]\n\t\t@connect_type = options[:connect_type]\n\t\t@results_class = options[:results_class]\n\n\t\t@conn = nil\n\t\t@bound_user = nil\n\n\n\t\t@object_conversions = DEFAULT_OBJECT_CONVERSIONS.dup\n\t\t@attribute_conversions = DEFAULT_ATTRIBUTE_CONVERSIONS.dup\n\t\t@registered_controls = []\n\n\t\t@base_dn = options[:base_dn] || self.get_default_base_dn\n\t\t@base = nil\n\n\t\t# Immediately bind if credentials are passed to the initializer.\n\t\tif ( options[:bind_dn] && options[:pass] )\n\t\t\tself.bind( options[:bind_dn], options[:pass] )\n\t\tend\n\tend", "def create(options)\n new(options).create\n end", "def initialize(watchers = [], options = {})\n super\n @standalone = options[:standalone].nil? ? true : options[:standalone]\n @port = options[:port] || 3000\n @env = options[:env] || 'development'\n ping_opt = unless options[:touch].nil?\n UI.info \"Warning: The :touch option has been replaced by the :ping option, usage is still the same.\"\n options[:touch]\n else\n options[:ping]\n end\n @ping = ping_opt.eql?(true) ? '/' : ping_opt\n end", "def use(host, options)\r\n @servers[host] = Server.new(self, host, options)\r\n end", "def create_host_only_network(options)\n end", "def initialize(options) # :yield: mesg\n @options = options\n @options[\"Host\"] = \"localhost\" unless @options.has_key?(\"Host\")\n @options[\"Port\"] = 23 unless @options.has_key?(\"Port\")\n @options[\"Prompt\"] = /[$%#>] \\z/n unless @options.has_key?(\"Prompt\")\n @options[\"Timeout\"] = 10 unless @options.has_key?(\"Timeout\")\n @options[\"Waittime\"] = 0 unless @options.has_key?(\"Waittime\")\n unless @options.has_key?(\"Binmode\")\n @options[\"Binmode\"] = false\n else\n unless (true == @options[\"Binmode\"] or false == @options[\"Binmode\"])\n raise ArgumentError, \"Binmode option must be true or false\"\n end\n end\n\n unless @options.has_key?(\"Telnetmode\")\n @options[\"Telnetmode\"] = true\n else\n unless (true == @options[\"Telnetmode\"] or false == @options[\"Telnetmode\"])\n raise ArgumentError, \"Telnetmode option must be true or false\"\n end\n end\n\n @telnet_option = { \"SGA\" => false, \"BINARY\" => false }\n\n if @options.has_key?(\"Output_log\")\n @log = File.open(@options[\"Output_log\"], 'a+')\n @log.sync = true\n @log.binmode\n end\n\n if @options.has_key?(\"Dump_log\")\n @dumplog = File.open(@options[\"Dump_log\"], 'a+')\n @dumplog.sync = true\n @dumplog.binmode\n def @dumplog.log_dump(dir, x) # :nodoc:\n len = x.length\n addr = 0\n offset = 0\n while 0 < len\n if len < 16\n line = x[offset, len]\n else\n line = x[offset, 16]\n end\n hexvals = line.unpack('H*')[0]\n hexvals += ' ' * (32 - hexvals.length)\n hexvals = format(\"%s %s %s %s \" * 4, *hexvals.unpack('a2' * 16))\n line = line.gsub(/[\\000-\\037\\177-\\377]/n, '.')\n printf \"%s 0x%5.5x: %s%s\\n\", dir, addr, hexvals, line\n addr += 16\n offset += 16\n len -= 16\n end\n print \"\\n\"\n end\n end\n\n if @options.has_key?(\"Proxy\")\n if @options[\"Proxy\"].kind_of?(Net::Telnet)\n @sock = @options[\"Proxy\"].sock\n elsif @options[\"Proxy\"].kind_of?(IO)\n @sock = @options[\"Proxy\"]\n else\n raise \"Error: Proxy must be an instance of Net::Telnet or IO.\"\n end\n else\n message = \"Trying \" + @options[\"Host\"] + \"...\\n\"\n yield(message) if block_given?\n @log.write(message) if @options.has_key?(\"Output_log\")\n @dumplog.log_dump('#', message) if @options.has_key?(\"Dump_log\")\n\n begin\n if @options[\"Timeout\"] == false\n @sock = TCPSocket.open(@options[\"Host\"], @options[\"Port\"])\n else\n Timeout.timeout(@options[\"Timeout\"], Net::OpenTimeout) do\n @sock = TCPSocket.open(@options[\"Host\"], @options[\"Port\"])\n end\n end\n rescue Net::OpenTimeout\n raise Net::OpenTimeout, \"timed out while opening a connection to the host\"\n rescue\n @log.write($ERROR_INFO.to_s + \"\\n\") if @options.has_key?(\"Output_log\")\n @dumplog.log_dump('#', $ERROR_INFO.to_s + \"\\n\") if @options.has_key?(\"Dump_log\")\n raise\n end\n @sock.sync = true\n @sock.binmode\n\n message = \"Connected to \" + @options[\"Host\"] + \".\\n\"\n yield(message) if block_given?\n @log.write(message) if @options.has_key?(\"Output_log\")\n @dumplog.log_dump('#', message) if @options.has_key?(\"Dump_log\")\n end\n\n end", "def initialize(host, options = T.unsafe(nil)); end", "def dash(options={})\n set RGhost::Dash.new(options)\n end", "def daemon=(options = {})\n if options.respond_to?(:keys)\n daemon.bind = options[:bind] if options[:bind]\n daemon.port = options[:port] if options[:port]\n daemon.log_file = options[:log_file] if options[:log_file]\n daemon.timer = options[:timer] if options[:timer]\n end\n end", "def initialize(info = {})\n super\n register_options(\n [\n Opt::RPORT(123),\n ], self.class)\n\n register_advanced_options(\n [\n OptInt.new('VERSION', [true, 'Use this NTP version', 2]),\n OptInt.new('IMPLEMENTATION', [true, 'Use this NTP mode 7 implementation', 3])\n ], self.class)\n end", "def initialize( options )\n @notification = options.fetch(:notification)\n @output = options.fetch(:output) { STDOUT }\n end", "def initialize\n @hostname = Socket.gethostname()\n @dns_port = 53\n @ttl = 7200\n @priority = 1\n @weight = 5\n @resolver = nil\n @ipv4 = nil\n @ipv6 = nil\n @sleep_time = 60\n @max_dns_response_time=10\n @zone = \"\"\n @transport = :udp\n end", "def create(name, type)\n configure [\"aaa group server #{type} #{name}\", 'exit']\n end", "def initialize(options = nil)\n super() # MonitorMixin\n options ||= {}\n @logger = options[:logger]\n # Initializes each instance variable of the same name as option\n # keyword.\n default_options.each_pair { |key, default|\n instance_variable_set(\"@#{key}\", options.fetch(key, default))\n }\n end", "def create_network_switch(options)\n load_path = Pathname.new(File.expand_path(\"../scripts\", __FILE__))\n script_path = load_path.join('create_switch.ps1')\n execute(script_path, options)\n end", "def create_monitor(_mon_name, mon_params)\n res = @dog.monitor(mon_params['type'], mon_params['query'], mon_params)\n if res[0] == '200'\n logger.info \"\\tMonitor #{res[1]['id']} created successfully\"\n return res[1]['id']\n else\n logger.error \"\\tError creating monitor: #{res}\"\n end\n end", "def create(options={})\n unless options[:ostemplate]\n # We need at least a valid ostemplate\n raise ArgumentError, \"Create requires argument :ostemplate.\"\n end\n\n cmd = \"#{@vzctl} create #{@ctid}\"\n\n options.each do |opt,val|\n cmd << \" --#{opt}\"\n cmd << \" #{val}\"\n end\n\n execute(cmd)\n\n Log.debug(\"Reading new container configuration file: #{@configfile}\")\n @config = Config.new(load_config_file)\n @config.add_observer(self)\n end", "def initialize(hosts, options)\n @hosts = hosts\n @options = options\n @logger = options[:logger]\n @perf_timestamp = Time.now\n @hosts.map { |h| setup_perf_on_host(h) }\n end", "def initialize(options)\n @options =\n if options.kind_of?(String)\n Hash[ [:host, :port].zip(options.split(\":\")) ]\n else\n options.dup\n end.tap { |o| o[:port] = o[:port].to_i }\n\n @pool = Pool.new(\n host: @options[:host],\n port: @options[:port],\n size: @options.fetch(:pool_size, 8),\n ttl: 60\n )\n\n assert_valid_pool unless @options[:verify] == false\n end", "def create_server opts\n Server.create opts.merge :account => self\n end", "def initialize(options={})\n @timers = []\n @settings = options[:settings]\n @logger = options[:logger]\n @redis = options[:redis]\n @options = @settings.to_hash.fetch(:tessen, {})\n end", "def initialize(options, monitoring, cluster)\n @options = options\n @monitoring = monitoring\n @cluster = cluster\n end", "def create_host_only_network(options)\n # Create the interface\n execute_prlsrvctl('net', 'add', options[:network_id], '--type', 'host-only')\n\n # Configure it\n args = ['--ip', \"#{options[:adapter_ip]}/#{options[:netmask]}\"]\n if options[:dhcp]\n args.concat(['--dhcp-ip', options[:dhcp][:ip],\n '--ip-scope-start', options[:dhcp][:lower],\n '--ip-scope-end', options[:dhcp][:upper]])\n end\n\n execute_prlsrvctl('net', 'set', options[:network_id], *args)\n\n # Return the details\n {\n name: options[:network_id],\n ip: options[:adapter_ip],\n netmask: options[:netmask],\n dhcp: options[:dhcp]\n }\n end", "def initialize(info = {})\n super\n\n register_options(\n [\n Opt::LHOST,\n Opt::LPORT(4444)\n ], Msf::Handler::ReverseUdp)\n\n # XXX: Not supported by all modules\n register_advanced_options(\n [\n OptAddress.new('ReverseListenerBindAddress', [ false, 'The specific IP address to bind to on the local system']),\n OptInt.new('ReverseListenerBindPort', [ false, 'The port to bind to on the local system if different from LPORT' ]),\n OptString.new('ReverseListenerComm', [ false, 'The specific communication channel to use for this listener']),\n OptBool.new('ReverseListenerThreaded', [ true, 'Handle every connection in a new thread (experimental)', false])\n ] +\n Msf::Opt::stager_retry_options,\n Msf::Handler::ReverseUdp)\n\n self.conn_threads = []\n end", "def initialize(options = {})\n super\n @options = Options.with_defaults(options) \n @notifier = Notifier.new(@options) \n end", "def initialize options = {}\n options[:DoNotListen] = true\n sockets_key = options.delete(:LaunchdSockets) || 'WEBrickSockets'\n\n super\n\n launch_checkin\n\n servers = launch_sockets sockets_key, TCPServer\n\n listeners.replace servers\n\n trap 'TERM' do shutdown end\n end", "def initialize(options = {}, &block)\n options = options.dup\n @server = options.delete(:server)\n @udp_flags = options.delete(:udp_flags) || 0\n raise(ArgumentError, 'Missing mandatory argument: :server') unless @server\n\n super(options, &block)\n reopen\n end", "def create_server(options = {})\n begin\n add_custom_attributes(options[:server_def])\n server = connection.servers.create(options[:server_def])\n rescue Excon::Error::BadRequest => e\n response = Chef::JSONCompat.from_json(e.response.body)\n if response[\"badRequest\"][\"code\"] == 400\n message = \"Bad request (400): #{response[\"badRequest\"][\"message\"]}\"\n ui.fatal(message)\n else\n message = \"Unknown server error (#{response[\"badRequest\"][\"code\"]}): #{response[\"badRequest\"][\"message\"]}\"\n ui.fatal(message)\n end\n raise CloudExceptions::ServerCreateError, message\n rescue Fog::Errors::Error => e\n raise CloudExceptions::ServerCreateError, e.message\n end\n\n print \"\\n#{ui.color(\"Waiting for server [wait time = #{options[:server_create_timeout]}]\", :magenta)}\"\n\n # wait for it to be ready to do stuff\n server.wait_for(Integer(options[:server_create_timeout])) { print \".\"; ready? }\n\n puts(\"\\n\")\n server\n end", "def setup(options)\n @nick = options['nick'] if options['nick']\n @server = options['server'] if options['server']\n @port = options['port'].to_i if options['port']\n connect\n end", "def create\n Puppet.debug \"starting create #{self.class.to_s}\"\n dns_service = get_dns_service(get_fqdn)\n dns_service.create_record(get_fqdn, get_type, get_ip) if dns_service != nil\n Puppet.debug \"done with create #{self.class.to_s}\"\n end", "def create_db(options)\n info \"Created database '#{options[:name]}'\"\n end", "def initialize(info = {})\n super(update_info(info,\n 'Name' => 'Backdoor Sniffer',\n 'Description' => %q{\n This module will attempt to sniff out well known backdoors hidden in popular directories and setup/backup files \n that should not be public facing. The list of directories and page names are taken from various repositories of\n backdoors.\n },\n 'Author' =>\n [\n 'h4cklife',\n ],\n 'License' => MSF_LICENSE\n )\n )\n\n deregister_options('RHOST', 'DigestAuthIIS', 'NTLM::SendLM',\n 'NTLM::SendNTLM', 'VHOST', 'RPORT', 'NTLM::SendSPN', 'NTLM::UseLMKey',\n 'NTLM::UseNTLM2_session', 'NTLM::UseNTLMv2')\n\n register_options(\n [\n\t\t\t\tOptString.new('DOMAIN', [true, 'Target domain']),\n OptString.new('OUTFILE', [false, 'A filename to store the results']),\n\t\t\t\tOptBool.new('DATABASE', [false, 'Add search results to thea loot database', false]),\n\t\t\t\tOptBool.new('SSL', [false, 'Use SSL', false]),\n\t\t\t\tOptBool.new('VERBOSE', [false, 'Verbose mode', false]),\n ], self.class)\n\n end", "def create_new_record(options = {})\n @mongo_record = LogMessage.new(options.merge({\n :messages => [],\n :time => Time.now.getutc,\n :progname => resolve_application_name,\n :combined => true\n }))\n end", "def initialize( options={} )\n end", "def initialize(info = {})\n\t\tsuper\n\n\t\t# Register our options, overriding the RHOST/RPORT from TCP\n\t\tregister_options(\n\t\t\t[\n\t\t\t\tOpt::RPORT(25),\n\t\t\t\tOptString.new('MAILFROM', [ true, 'The FROM address of the e-mail', '[email protected]']),\n\t\t\t\tOptString.new('MAILTO', [ true, 'The TO address of the e-mail']),\n\t\t\t\tOptString.new('VERBOSE', [ false, 'Display verbose information']),\n\t\t\t], Msf::Exploit::Remote::SMTPDeliver)\n\t\tregister_autofilter_ports([ 25, 465, 587, 2525, 25025, 25000])\n\t\tregister_autofilter_services(%W{ smtp smtps})\n\tend", "def initialize(options={})\n @config = DEFAULTS.merge! options\n @config[:console_id] = CONSOLE_MAP[@config[:console]] if CONSOLE_MAP.has_key? @config[:console]\n\n setup_connection\n setup_resources\n end", "def find_or_create_host(opts)\n host = get_host(opts.clone)\n return host unless host.nil?\n\n report_host(opts)\n end", "def initialize(info = {})\n\t\tsuper\n\t\tregister_options(\n\t\t\t[\n\t\t\t\tOpt::LPORT(4444),\n\t\t\t\t#OptAddress.new('RHOST', [false, 'The target address', '']),\n\t\t\t], Msf::Handler::BeEFBind)\n\tend", "def initialize(options={})\n @server_list = ServerList.new\n @groups = Hash.new { |h,k| h[k] = ServerList.new }\n @gateway = nil\n @open_groups = []\n @connect_threads = []\n @on_error = :fail\n @default_user = ENV['USER'] || ENV['USERNAME'] || \"unknown\"\n\n @open_connections = 0\n @pending_sessions = []\n @session_mutex = Mutex.new\n\n options.each { |opt, value| send(\"#{opt}=\", value) }\n end", "def initialize(aOptions)\n\t\t@options = aOptions\n\t\t@thread = nil\n\t\t@name = aOptions[:name] || random_word(8,8)\n\t\tif not @logger = aOptions[:logger]\n\t\t\t@logger = Logger.new(STDERR)\n\t\t\[email protected] = Logger::DEBUG\n\t\tend\n\t\tself.start() if aOptions[:auto_start]\n\tend", "def initialize(host, options={})\n @host = host\n @options = DEFAULT_CONFIGURATION.merge(options)\n end", "def create(opts)\n opts = check_params(opts,[:servers])\n super(opts)\n end", "def vps_create(options)\n #puts \"#{@beanstalk.last_conn.addr}: create #{options['code']}\\n\"\n puts options.inspect\n vps = Antir::VPS.new\n\n code = options['code']\n vps.id = code\n vps.name = code\n vps.ip = \"10.10.1.#{code}\"\n puts vps.inspect\n vps.create\n\n #@report.send_string(\"created #{options['code']}\")\n end", "def new\n @host = Host.new\n end", "def initialize(options)\n @options = options\n @subscribers = []\n @discovered_nodes = []\n end", "def initialize(options = {})\n @daemonize = options[:daemonize].nil? ? true : options[:daemonize]\n @pid_file = options[:pid_file].nil? ? nil : File.absolute_path(options[:pid_file])\n end", "def create(name)\n configure [\"interface #{name}\", 'no ip address', 'switchport']\n end", "def initialize(options={})\n @options = {:host => nil,\n :vendor => \"Dell\",\n :model => nil,\n :version => \"2c\",\n :community => \"public\"\n }\n @options.merge!(options) if options.is_a?(Hash)\n @debug = false\n end", "def initialize(info = {})\n\t\tsuper\n\n\t\tregister_options(\n\t\t\t[\n\t\t\t\tOpt::LPORT(4444),\n\t\t\t\tOptAddress.new('RHOST', [false, 'The target address', '']),\n\t\t\t], Msf::Handler::BindTcp)\n\n\t\tself.conn_threads = []\n\t\tself.listener_threads = []\n\t\tself.listener_pairs = {}\n\tend", "def initialize(options)\n Resque.redis = \"#{options[:server]}:#{options[:port]}\"\n end", "def create(options = {})\n nil\n end", "def create(options)\n API::request(:post, 'background_checks', options)\n end", "def initialize(info = {})\n super\n\n # Register the options that all SNMP exploits may make use of.\n register_options(\n [\n Opt::RHOST,\n Opt::RPORT(161),\n OptString.new('COMMUNITY', [ true, 'SNMP Community String', 'public' ]),\n OptString.new('VERSION', [ true, 'SNMP Version <1/2c>', '1' ]),\n OptInt.new('TIMEOUT', [ true, 'SNMP Timeout', 1 ]),\n OptInt.new('RETRIES', [ true, 'SNMP Retries', 1 ])\n ], Msf::Exploit::Remote::SNMPClient)\n end", "def initialize(options = {})\n @options = options\n @group = options.delete(:group)\n @watchers = options.delete(:watchers) { [] }\n @callbacks = options.delete(:callbacks) { [] }\n _register_callbacks\n end", "def initialize(options = {})\n @seq = 0\n @bind_port = 0\n @bind_host = nil\n @data_size = 56\n\n @ping_id = (Thread.current.object_id ^ Process.pid) & 0xffff\n\n super(options)\n end", "def initialize(options); end", "def initialize(options)\n @options = options\n @verbose = options[:Verbose]\n @boxes = options[:Boxes]\n Net::IMAP.debug = options[:Debug]\n end", "def sparkRegisterMonitorCmdParser(className)\n logNormal($sparkPrefix + \" sparkRegisterMonitorCmdParser \" + className + \"\\n\")\n sparkGetMonitorServer()\n sparkCreate(className, $serverPath+'monitor/SparkMonitor/'+className)\nend", "def create_datagram_socket(options=nil)\n if !block_given? && options == nil\n return ::Vertx::Util::Utils.safe_create(@j_del.java_method(:createDatagramSocket, []).call(),::Vertx::DatagramSocket)\n elsif options.class == Hash && !block_given?\n return ::Vertx::Util::Utils.safe_create(@j_del.java_method(:createDatagramSocket, [Java::IoVertxCoreDatagram::DatagramSocketOptions.java_class]).call(Java::IoVertxCoreDatagram::DatagramSocketOptions.new(::Vertx::Util::Utils.to_json_object(options))),::Vertx::DatagramSocket)\n end\n raise ArgumentError, \"Invalid arguments when calling create_datagram_socket(options)\"\n end", "def initialize logger, options\n @logger = logger\n @options = options\n raise AgentError, \"no configuration specified.\" unless @options\n @locker = Monitor.new\n @threads = nil\n end", "def create_gdom_disk(options)\n client_disk = options['q_struct']['gdom_disk'].value\n disk_size = options['q_struct']['gdom_size'].value\n disk_size = disk_size.downcase\n vds_disk = options['name']+\"_vdisk0\"\n if not client_disk.match(/\\/dev/)\n if not File.exist?(client_disk)\n message = \"Information:\\tCreating guest domain disk \"+client_disk+\" for client \"+options['name']\n command = \"mkfile -n #{disk_size} #{client_disk}\"\n output = execute_command(options,message,command)\n end\n end\n message = \"Information:\\tChecking Virtual Disk Server device doesn't already exist\"\n command = \"ldm list-services |grep 'primary-vds0' |grep '#{vds_disk}'\"\n output = execute_command(options,message,command)\n if not output.match(/#{options['name']}/)\n message = \"Information:\\tAdding disk device to Virtual Disk Server\"\n command = \"ldm add-vdsdev #{client_disk} #{vds_disk}@primary-vds0\"\n output = execute_command(options,message,command)\n end\n return\nend", "def initialize(options={})\n @logger = options.delete(:logger) || self.class.default_logger\n @options = options\n options.host = '127.0.0.1' if options.host.nil? || options.host == 'localhost'\n options.port ||= 9312\n\n # If you use localhost, MySQL insists on a socket connection, but Sphinx\n # requires a TCP connection. Using 127.0.0.1 fixes that.\n # so does passing in the socket file e.g. socket:'/usr/local/sphinx/var/run/sphinx.sock'\n # nb: sphinx.conf listen definition must specify mysql41 as the protocol, e.g.,\n # listen = localhost:9312:mysql41\n\n super( { symbolize_keys: true, database_timezone: :utc, application_timezone: :local }.merge( options ) )\n end", "def initialize( options )\n\t\tTreequel.logger.formatter = Treequel::ColorLogFormatter.new( Treequel.logger )\n\n\t\tif options.debug\n\t\t\t$DEBUG = true\n\t\t\t$VERBOSE = true\n\t\t\tTreequel.logger.level = Logger::DEBUG\n\t\telsif options.loglevel\n\t\t\tTreequel.logger.level = Treequel::LOG_LEVELS[ options.loglevel ]\n\t\tend\n\n\t\t@options = options\n\t\tif @options.all?\n\t\t\t@options[:attrtypes] =\n\t\t\t\t@options[:objectclasses] =\n\t\t\t\t@options[:syntaxes] =\n\t\t\t\t@options[:matching_rules] =\n\t\t\t\t@options[:matching_rule_uses] =\n\t\t\t\ttrue\n\t\tend\n\n\t\t@directory = Treequel.directory( options.ldapurl )\n\t\t@prompt = HighLine.new\n\n\t\[email protected]_at = @prompt.output_cols - 10\n\n\t\tself.log.debug \"Created new treewhat command object for %s\" % [ @directory ]\n\tend", "def connect_to_gdom_console(options)\n check_cdom_vntsd()\n check_gdom_exists(options)\n check_gdom_is_running(options)\n vcc_port = get_gdom_console_port(options)\n vcc_port = vcc_port.chomp\n handle_output(options,\"\") \n handle_output(options,\"To connect to console of Guest Domain #{options['name']} type the following command: \")\n handle_output(options,\"\") \n handle_output(options,\"telnet localhost #{vcc_port}\")\n handle_output(options,\"\") \n return\nend", "def create(_options)\n raise NotImplementedError\n end", "def create_nic(options)\n Nic.create(options.merge(:server_id => self.id))\n end", "def create(name)\n configure([\"interface #{name}\", 'no switchport'])\n end", "def create\n name, type = resource[:name].split('/')\n rdata = resource[:rdata]\n ttl = resource[:ttl]\n case type\n when 'MX'\n Array(rdata).each_with_index do |exchange, index|\n preference = Array(resource[:preference])[index]\n nsupdate(\"server #{server}\n update add #{name} #{ttl} MX #{preference} #{exchange}\n send\")\n end\n when 'SRV'\n Array(rdata).each_with_index do |target, index|\n port = Array(resource[:port])[index]\n weight = Array(resource[:weight])[index]\n priority = Array(resource[:priority])[index]\n nsupdate(\"server #{server}\n update add #{name} #{ttl} SRV #{priority} #{weight} #{port} #{target}\n send\")\n end\n else\n nsupdate(\"server #{server}\n update add #{name} #{ttl} #{type} #{Array(rdata).first}\n send\")\n end\n end" ]
[ "0.6064956", "0.60221183", "0.55477023", "0.5499154", "0.54941326", "0.5460354", "0.5424016", "0.5369096", "0.5354829", "0.5324311", "0.5321214", "0.53147143", "0.5308993", "0.53043556", "0.52905476", "0.5269478", "0.52592635", "0.52573234", "0.5248966", "0.5223063", "0.5221611", "0.5220858", "0.5213802", "0.52058554", "0.5205364", "0.51932114", "0.5191507", "0.51735294", "0.5165734", "0.5153798", "0.5149431", "0.51354903", "0.5095465", "0.509328", "0.50929666", "0.50927573", "0.5091255", "0.507079", "0.506457", "0.5061077", "0.5060642", "0.5054271", "0.50507104", "0.5044791", "0.50392056", "0.50353813", "0.5035171", "0.5032984", "0.5023865", "0.50207424", "0.502039", "0.5013796", "0.5010311", "0.50102305", "0.50017893", "0.50010335", "0.49913245", "0.49908116", "0.4977871", "0.49768302", "0.49623886", "0.49580303", "0.4956458", "0.49557495", "0.4949359", "0.49483094", "0.49438676", "0.49438375", "0.4938587", "0.49380857", "0.49379385", "0.4936633", "0.4934832", "0.49204355", "0.48957562", "0.48938686", "0.4881119", "0.4870183", "0.4869755", "0.4869641", "0.48664793", "0.48630217", "0.48618576", "0.48613864", "0.48562896", "0.48512077", "0.4841142", "0.48410386", "0.48211", "0.48091358", "0.48068908", "0.48035598", "0.4787475", "0.47865716", "0.47855008", "0.47849363", "0.47834885", "0.47831285", "0.478176", "0.47812665" ]
0.5793687
2
Run the domain check for each of the specified Hash of +nodes+ and return a Hash of updates for them based on their DNS domain record's status.
def run( nodes ) self.log.debug "Got %d nodes to check with %p" % [ nodes.length, self ] lookups = self.create_lookups( nodes ) return self.wait_for_responses( lookups, nodes ) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run( nodes )\n\t\t\tself.log.debug \"Got nodes to check with %p: %p\" % [ self, nodes ]\n\n\t\t\trecords = nodes.each_with_object( {} ) do |(identifier, node), hash|\n\t\t\t\tself.log.debug \"Looking up whois info for %p (%p)\" % [ identifier, node ]\n\t\t\t\thash[ identifier ] = self.client.lookup( node['name'] )\n\t\t\tend\n\n\t\t\treturn records.each_with_object( {} ) do |(identifier, record), hash|\n\t\t\t\tparser = record.parser\n\t\t\t\thash[ identifier ] = self.parse_record( parser, identifier )\n\t\t\tend\n\n\t\tend", "def find_applying_nodes(hosts, statuses = [])\n Log.debug(\"checking applying status of #{hosts.inspect}\")\n @client.filter[\"identity\"].clear\n hosts.each do |host|\n @client.identity_filter(host)\n end\n\n results = @client.status\n\n hosts.each do |host|\n result = results.select { |r| r[:sender] == host }.first\n status = statuses.select { |s| s[:name] == host }.first\n\n unless status\n status = make_status(host)\n statuses << status\n end\n\n if result\n # check the value of applying as defined in the agent ddl\n if result[:data][:applying] == true\n # we're applying\n if result[:data][:initiated_at]\n # it's a new agent, we can record when it started\n Log.debug(\"#{host} run was started at #{result[:data][:initiated_at]}\")\n status[:initiated_at] = result[:data][:initiated_at]\n else\n Log.debug(\"#{host} run started\")\n end\n else\n # Here we check the \"asked to run but not yet started\" state.\n if result[:data][:lastrun].to_i >= status[:initiated_at]\n Log.debug(\"#{host} run completed\")\n # The node has finished applying, remove from the running set\n statuses.reject! { |s| s[:name] == host }\n next\n else\n # We haven't started yet that we can see, increment the check counter\n status[:checks] += 1\n Log.debug(\"#{host} starting, checks #{status[:checks]}\")\n end\n end\n else\n # We didn't get a result from this host, log and record a check happened\n log(\"Host #{host} did not respond to the status action.\")\n status[:no_response] += 1\n end\n\n if status[:no_response] >= 5\n # If we missed many responses to status, assume it's a dead node\n log(\"Host #{host} failed to respond multiple times. Skipping.\")\n statuses.reject! { |s| s[:name] == host }\n end\n\n if status[:checks] >= 5\n # If we hit more than 5 checks, assume it couldn't start\n log(\"Host #{host} did not move into an applying state. Skipping.\")\n statuses.reject! { |s| s[:name] == host }\n end\n end\n\n return statuses\n end", "def wait_for_responses( lookups, nodes )\n\t\t\tupdate = {}\n\n\t\t\tuntil lookups.empty?\n\n\t\t\t\tlookups.keys.each do |thr|\n\t\t\t\t\tnext if thr.alive?\n\n\t\t\t\t\tidentifier = lookups.delete( thr )\n\t\t\t\t\tbegin\n\t\t\t\t\t\trecords = thr.value\n\n\t\t\t\t\t\tif !records\n\t\t\t\t\t\t\tupdate[ identifier ] = { error: \"Lookup failed (timeout).\" }\n\t\t\t\t\t\telsif records.empty?\n\t\t\t\t\t\t\tupdate[ identifier ] = { error: \"Lookup failed (no records returned).\" }\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\tnode_data = nodes[ identifier ]\n\t\t\t\t\t\t\tupdate[ identifier ] = self.compare_values( records, node_data )\n\t\t\t\t\t\tend\n\t\t\t\t\trescue SystemCallError => err\n\t\t\t\t\t\tmsg = \"%p: %s\" % [ err.class, err.message ]\n\t\t\t\t\t\tself.log.error \"%s while looking up %s\" % [ msg, identifier ]\n\t\t\t\t\t\tupdate[ identifier ] = { error: msg }\n\t\t\t\t\tend\n\t\t\t\tend\n\n\t\t\tend\n\n\t\t\treturn update\n\t\tend", "def check\n begin\n domains = File.read(@params[:domains_path]).split\n rescue Errno::ENOENT \n STDERR.puts \"File #{@params[:domains_path]} does not exist\"\n exit 1 \n end\n\n checks = domains.map do |domain|\n rdap = Net::HTTP.get(URI(\"#{@params[:rdap_url]}/domain/#{domain}\"))\n db.check domain, rdap \n end\n\n message = checks.map {|check| check.status}.to_json\n STDOUT.puts message\n\n if @params[:gchat] \n # GChat gets every status update\n GChat.new(@params[:gchat]).message(message) \n end\n if @params[:mandrill_key] && @params[:mandrill_email] && checks.any?(&:changed?)\n # We only email changed domains\n Mandrill.new(@params[:mandrill_key], @params[:mandrill_email]).message(message)\n end\n end", "def dns_check\n gen_host_records # These are the hosts we have\n load_all_subnets # These are the DNS entries\n \n # We want a standard layout, with the hypervisor API entries being \n @host_record.each do |hr| # Array of host record Hash's\n hn = hr[:hostname]\n shn = hn.split('.',2)[0] # Remove the domain\n forward_hr = @forward_host_record[hn] # Find Host Record\n if forward_hr.nil?\n # We have no IPAM entry for this hostname\n if (rhr = @reverse_host_records[hr[:ip]])\n puts \"Only Reverse IPAM entry for #{shn}: #{rhr}\"\n @infoblox.create_host_record(ip_address: hr[:ip], hostname: hn, aliases: hr[:aliases])\n else\n puts \"No IPAM entry for hostrecord: #{hr}\"\n @infoblox.create_host_record(ip_address: hr[:ip], hostname: hn, aliases: hr[:aliases])\n end\n else\n # We have an IPAM record for this hostname\n if forward_hr[:ip] != hr[:ip]\n puts \"IP mismatch #{shn} #{hr[:ip]} != #{forward_hr[:ip]} for IPAM: #{forward_hr}\"\n elsif forward_hr[:hostname] != hn\n # Reference must be via ALIASES or CNAMES\n if forward_hr[:aliases].include?(shn)\n puts \"Hostname #{shn} is an ALIAS. IPAM: #{forward_hr}\"\n elsif forward_hr[:cnames].include?(hn)\n puts \"Hostname #{shn} is a CNAME. IPAM: #{forward_hr}\"\n end\n end\n end\n end\n \n # We want to find IPAM entries, not matching existing @host_record entries\n @reverse_host_records.each do |ip, ahr| # Hash to array of host records from IPAM, indexed by IP\n ahr.each do |hr| # One IP can have multiple host records, with associated ALIAS and CNAME records\n local_hr = @host_record_index[hr[:hostname]]\n if local_hr.nil?\n puts \"No local entry #{hr[:hostname]} for #{hr}\"\n end\n end\n end\nend", "def run( nodes )\n\t\t\tresults = {}\n\t\t\thydra = Typhoeus::Hydra.new( self.runner_settings )\n\n\t\t\tnodes.each do |identifier, node|\n\t\t\t\tself.log.debug \"Making request for node %s\" % [ identifier ]\n\t\t\t\trequest = self.request_for_node( node )\n\t\t\t\trequest.on_complete do |response|\n\t\t\t\t\tself.log.debug \"Handling response for %s\" % [ identifier ]\n\t\t\t\t\tresults[ identifier ] =\n\t\t\t\t\t\tself.make_response_results( response, node )\n\t\t\t\tend\n\t\t\t\thydra.queue( request )\n\t\t\tend\n\n\t\t\thydra.run\n\n\t\t\treturn results\n\t\tend", "def transform_hosts(hosts)\n require 'time'\n\n node_data = []\n\n hosts.each do |host|\n if host[:report_timestamp].nil?\n # This can happen in weird cases. Mark as an expired node, so\n # the expired logic doesn't try to do math on a nil timestamp.\n last_checkin = nil\n formatted_checkin = 'N/A'\n host[:expired] = nil\n else\n last_checkin = Time.now - Time.parse(host[:report_timestamp])\n formatted_checkin = sprintf(\"%#{@options.round_to}f\",(last_checkin * @options.divisor).abs)\n end\n node_data << {\n :last_checkin => last_checkin,\n :expired => host[:expired].nil? ? false : host[:expired],\n :certname => host[:certname],\n :environment => host[:report_environment].nil? ? 'N/A' : host[:report_environment],\n :status => host[:latest_report_status].nil? ? 'N/A' : host[:latest_report_status],\n :formatted_checkin => formatted_checkin\n }\n end\n\n unless @options.environments.empty?\n node_data.delete_if {|node| not @options.environments.include? node[:environment] }\n end\n unless @options.statuses.empty?\n node_data.delete_if {|node| not @options.statuses.include? node[:status] }\n end\n\n node_data\n end", "def parse_dns(nodeList)\n find_if_exists = 0\n domain = Hash.new{|hsh,key| hsh[key] = []}\n address = Hash.new{|hsh,key| hsh[key] = []}\n cname = Hash.new{|hsh,key| hsh[key] = []}\n\n nodeArr = []\n # To remove null values for '.split' method to work\n nodeList.each do |node|\n if node == ''|| node.empty? || node == \"\\n\"\n next\n end\n nodeArr.push(node.strip.split(','))\n end\n # Creating the key value Hash\n nodeArr.each do |(type,domain,source)|\n if type == \"CNAME\"\n cname[domain.strip.to_sym].push(source.strip)\n elsif type == \"A\"\n address[domain.strip.to_sym].push(source.strip)\n end\n end\n # Adding CNAME hash and ADDRESS hash into domain hash\n domain[:CNAME].push(cname)\n domain[:ADDRESS].push(address)\n return domain\nend", "def update_from_host_store!\n\t\tputs \"Invoke internal procedures to update the sub-domain list from the host store.\"\n\t\t# Step 1 - obtain the latest sub-domains\n\t\tmy_tracker = Wmap::HostTracker.instance\n\t\tmy_tracker.data_dir = @data_dir\n\t\tmy_tracker.hosts_file = my_tracker.data_dir + \"/\" + \"hosts\"\n\t\tmy_tracker.load_known_hosts_from_file(my_tracker.hosts_file)\n\t\tsubs = my_tracker.dump_sub_domains - [nil,\"\"]\n\t\tmy_tracker = nil\n\t\t# Step 2 - update the sub-domain list\n\t\tunless subs.empty?\n\t\t\t#subs.map { |x| self.add(x) unless domain_known?(x) }\n\t\t\tself.bulk_add(subs,@max_parallel)\n\t\tend\n\t\tputs \"Update discovered sub-domains into the store: #{@known_internet_sub_domains}\"\n\t\tself.save!(file_domains=@file_sub_domains, domains=@known_internet_sub_domains)\n\trescue Exception => ee\n\t\tputs \"Exception on method #{__method__}: #{ee}\" if @verbose\n\t\treturn nil\n\tend", "def compare_domain(args)\r\n server_list = args[:server_list]\r\n domain = args[:domain_name]\r\n rtype = args[:rtype]\r\n rdata = args[:actual_rdata]\r\n rdata = (rtype == \"NAPTR\") ? rdata : rdata.downcase\r\n r = \"\"\r\n failed_rlist = []\r\n @timeout = 30\r\n sleep 15 if args[:sleepfirst]\r\n server_list.each do |server|\r\n dig_pass = \"succeed to dig @#{server} #{domain} #{rtype} => #{rdata}\"\r\n dig = `dig @#{server} #{domain} #{rtype}`\r\n if dig.include?(rdata)\r\n puts dig_pass\r\n else\r\n puts \"dig @#{server} #{domain} #{rtype} failed as expected!\" if args[:expected_dig_fail]\r\n return \"succeed\" if args[:expected_dig_fail]\r\n begin\r\n Timeout::timeout(@timeout){\r\n while !dig.include?(rdata)\r\n sleep 5\r\n dig_retry = `dig @#{server} #{domain} #{rtype}`\r\n puts dig_pass if dig_retry.include?(rdata)\r\n break if dig_retry.include?(rdata)\r\n end\r\n }\r\n rescue Timeout::Error\r\n puts \"Error => dig @#{server} #{domain} #{rtype} timed out!\"\r\n failed_rlist << \"failed\"\r\n end\r\n end\r\n end\r\n failed_rlist.empty? ? 'succeed' : 'failed'\r\n end", "def update_dns()\n #\n # Handle each host in the config file at a time\n #\n @config['hosts'].each {|h|\n #\n # Skip update if current public IP matches the IP for the host in the cache file\n #\n if @cache[h['host']] && @myip.eql?(@cache[h['host']]['ip'])\n @logger.info \"Skipping #{h['host']} - Already pointing to #{@myip}\"\n else\n url = \"https://domains.google.com/nic/update?hostname=#{h['host']}&myip=#{@myip}\"\n @logger.info \"Updating host [#{h['host']}] - #{url}\"\n\n #\n # Access Google Domains API to update IP\n #\n open(url,\n :http_basic_authentication => [h['username'],h['password']],\n \"User-Agent\" => \"#{@options[:user_agent]}\") {|r|\n if r.status[0] == \"200\"\n r.each_line {|line|\n if (/(?<sts>(good|nochg))\\s+(?<ip>(\\d+\\.\\d+\\.\\d+\\.\\d+)?)/ =~ line)\n #\n # Cache if API call was successful\n #\n @cache[h['host']] = {'ip' => ip}\n @logger.debug \"[#{@responses[sts][0]}][#{sts}] : [#{@responses[sts][1]}]\"\n else\n @logger.warn \"[#{@responses[line][0]}][#{line}] : [#{@responses[line][1]}]\"\n end\n }\n else\n @logger.error \"Error status returned #{r.status.inspect}\"\n end\n }\n write_cache_file\n end\n }\n end", "def check_domains(sld_or_domains, tlds=nil)\n query_args = {'Command' => 'Check'}\n if sld_or_domains.is_a? Array\n query_args['DomainList'] = sld_or_domains.join(',')\n else\n query_args['SLD'] = sld_or_domains\n end\n unless tlds == nil\n if tlds.is_a? Array\n query_args['TLDList'] = tlds.join(',')\n else\n query_args['TLD'] = tlds\n end\n end\n query_push(query_args)\n get_response\n # Set up a hash of domains\n checked_domains = {}\n if query_args['DomainList']\n sld_or_domains.each {|domain| checked_domains[domain] = nil}\n elsif query_args['TLDList']\n tlds.each {|tld| checked_domains[sld_or_domains + '.' + tld] = nil}\n else\n checked_domains[sld_or_domains + '.' + tlds] = nil\n end\n # RRPCodes: 210 = available, 211 = not available\n if checked_domains.length > 1\n # If we have multiple domains, run a loop to fill in results\n x = 1\n @result['DomainCount'].to_i.times do\n domain = @result['Domain' + x.to_s]\n if @result['RRPCode' + x.to_s].to_i == 210\n checked_domains[domain] = true\n elsif @result['RRPCode' + x.to_s].to_i == 211\n checked_domains[domain] = false\n end\n x += 1\n end\n else\n if @result['RRPCode'].to_i == 210\n checked_domains[sld_or_domains + '.' + tlds] = true\n elsif @result['RRPCode'].to_i == 211\n checked_domains[sld_or_domains + '.' + tlds] = false\n end\n end\n puts checked_domains.to_yaml\n end", "def gather_pollable_domains\n @logger.info 'CsyncJob Generate: Gathering current domain(s) data'\n Nameserver.select(:hostname_puny, :domain_id).all.each do |ns|\n %i[secure insecure].each do |i|\n @input_store[i][ns.hostname_puny] = [] unless @input_store[i].key? ns.hostname_puny\n end\n\n append_domains_to_list(ns)\n end\n end", "def update_node_info_cache()\n @all_stats = []\n @nodes.each { |node|\n ip = node.private_ip\n acc = AppControllerClient.new(ip, @@secret)\n\n begin\n @all_stats << acc.get_stats()\n rescue FailedNodeException\n Djinn.log_warn(\"Failed to get status update from node at #{ip}, so \" +\n \"not adding it to our cached info.\")\n end\n }\n end", "def domain_check(*domains)\n domains.flatten!\n response = send_request(domain_check_xml(*domains))\n\n get_result(:xml => response, :callback => :domain_check_process)\n end", "def refresh_ip_sites\n\t\tputs \"Refresh all entries that contain an IP address instead of a FQDN ... \"\n\t\tsites=get_ip_sites\n\t\tlive_sites=sites.delete_if { |x| @known_sites[x]['code'] == 10000 or @known_sites[x]['code'] == 20000 }\n\t\tchanges=Hash.new\n\t\tchanges=bulk_refresh(live_sites)\n\t\t@known_sites.merge!(changes)\n\t\tputs \"Done refresh IP sites.\"\n\t\treturn changes\n\trescue => ee\n\t\tputs \"Exception on method #{__method__}: #{ee}\" if @verbose\n\tend", "def refresh_all\n\t\tputs \"Refresh all the entries in the local host repository in one shot.\"\n\t\tchanges=Hash.new\n\t\thosts=@known_hosts.keys\n\t\t@known_hosts=Hash.new\n\t\tchanges=bulk_add(hosts)\n\t\t@known_hosts.merge!(changes)\n\t\t#@known_hosts.keys.map do |key|\n\t\t#\tunless is_ip?(key)\n\t\t#\t\thost=refresh(key)\n\t\t#\t\tchanges.push(host) unless host.nil?\n\t\t#\tend\n\t\t#end\n\t\tputs \"\\n#{changes.size} Entries Refreshed:\" if changes.size>0\n\t\t#changes.map { |x| puts x }\n\t\tputs \"Done refreshing the local hosts.\"\n\t\treturn changes\n\trescue => ee\n\t\tputs \"Exception on method #{__method__}: #{ee}\"\n\tend", "def ddns_update(ip)\n ddns_update_he(HEuser, HEpw, HEtid, ip)\n {\n \"canishe.com\" => [ \"mail\", \"www\", \"@\", \"*\" ],\n \"gaelan.me\" => [ \"www\", \"@\" ],\n \"psd-chinese.net\" => [ \"www\", \"@\" ],\n }.each do |zone, hosts|\n hosts.each do |host|\n ddns_update_iwmn(host, zone, ip, IWMNuser, IWMNpw)\n end\n end\n\t{\n \"marimbaboise.com\" => [ \"www\", \"@\" ],\n }.each do |zone, hosts|\n hosts.each do |host|\n ddns_update_enom(host, zone, ip, BRpw)\n end\n end\nend", "def check_hostnames\n all_good = true\n \n @check_groups.each do |group|\n group.checks.each do |check|\n unless check.hostname && Dnsruby::Resolv.getaddress(check.hostname)\n puts \"Error: check #{check.name} has invalid hostname '#{check.hostname}'\"\n all_good = false\n end\n end\n end\n \n all_good\n end", "def domains_slaves_sync(domains, masterip)\n existing_domains = domains_slaves_list\n\n # Add any missing domains\n responses = []\n domains.each do |domain|\n unless existing_domains.include?(domain)\n response = domains_slaves_add(domain, masterip)\n response[:domain] ||= domain\n responses << response\n end\n end\n\n # Now check the existing domains\n existing_domains.keys.sort.each do |domain|\n if domains.include?(domain)\n # Update the masterip if it isn't correct\n if existing_domains[domain] != masterip\n response = domains_slaves_updatemasterip(domain, masterip)\n response[:domain] ||= domain\n responses << response\n end\n else\n # Delete domain; not on list\n response = domains_slaves_delete(domain)\n response[:domain] ||= domain\n responses << response\n end\n end\n\n responses\n end", "def create_lookups( nodes )\n\t\t\treturn nodes.each_with_object( {} ) do |(identifier, node), hash|\n\t\t\t\tself.log.debug \"Creating lookup for node: %p\" % [ node ]\n\t\t\t\tname = node['name'] or next\n\t\t\t\trecord_type = node['record_type'] || 'A'\n\t\t\t\trecord_class = Resolv::DNS::Resource::IN.const_get( record_type ) or\n\t\t\t\t\traise \"Unsupported record type %p!\" % [ record_type ]\n\n\t\t\t\tself.log.debug \"Looking up %s record for %s (%s)\" % [ record_type, name, identifier ]\n\t\t\t\tthr = Thread.new do\n\t\t\t\t\tself.resolver.getresources( name, record_class )\n\t\t\t\tend\n\t\t\t\thash[ thr ] = identifier\n\t\t\tend\n\t\tend", "def domain_update(domain, fields)\n unless ([ :admin, :tech, :billing, :nservers ] - fields.keys).empty?\n raise ArgumentError, \"Required fields not found\"\n end\n query :domain_modify, {\n domain: domain,\n admin_c: fields[:admin],\n tech_c: fields[:tech],\n billing_c: fields[:billing],\n ns_list: fields[:nservers].join(':')\n }\n end", "def hostnames(nodes)\n @referenced_nodes ||= ObjectList.new\n nodes = listify(nodes)\n nodes.each_node do |node|\n @referenced_nodes[node.name] ||= node\n end\n return nodes.values.collect {|node| node.domain.name}\n end", "def get_host_keys(_nodes, metadata)\n updated_metadata = {}\n # Get the list of nodes, per hostname (just in case several nodes share the same hostname)\n # Hash<String, Array<String> >\n hostnames = Hash.new { |hash, key| hash[key] = [] }\n metadata.each do |node, node_metadata|\n if node_metadata[:host_ip]\n hostnames[node_metadata[:host_ip]] << node\n elsif node_metadata[:hostname]\n hostnames[node_metadata[:hostname]] << node\n end\n end\n unless hostnames.empty?\n host_keys_for(*hostnames.keys).each do |hostname, ip|\n hostnames[hostname].each do |node|\n updated_metadata[node] = ip\n end\n end\n end\n updated_metadata\n end", "def process_domains\n domains.each do |domain|\n params = options\n params[:host] = configuration.host\n params[:server] = servers[domain][\"server\"]\n compiler = YMDP::Compiler::Base.new(domain, git_hash, params)\n \n compiler.process_all\n end\n end", "def updates\n @report = {}\n Server.find_each do |server|\n # Go through each package. In some cases (gems) there may be multiple\n # versions of a package on the machine.\n packages = {}\n server.servers_to_packages.find_each do |package_map|\n next unless package_map.status == 'pending'\n package = Package.find(package_map.package_id)\n\n new = {}\n new['provider'] = package.provider\n new['version'] = package.version\n packages[package.name] = [] unless packages.key?(package.name)\n packages[package.name] << new\n end\n @report[server.hostname] = packages unless packages.empty?\n end\n end", "def get_and_update_node_state!(node, attribute_names)\n ret = {}\n external_ref = node[:external_ref]\n external_ref_changed = false\n attribute_names.each do |attribute_name|\n case attribute_name\n when :host_addresses_ipv4\n external_ref_changed = true if NodeState.update_host_addresses_ipv4!(ret, external_ref, node) \n when :fqdn\n external_ref_changed = true if NodeState.update_fqdn!(ret, external_ref, node) \n else\n Log.error(\"Not treating update of BOSH node attribute '#{attribute_name}'\")\n end\n end\n node.update(external_ref: external_ref) if external_ref_changed\n ret \n end", "def fetch_nodes(nodes, dns_cache)\n ret = []\n nodes.each_with_index do |item, index|\n ip, port = item\n host = dns_cache.fetch(ip) {\n |missing_ip|\n host = Resolv.getname(missing_ip)\n dns_cache[ip] = host\n host\n }\n name = \"#{host}:#{port}\"\n role = index == 0 ? 'master' : 'slave'\n node = {\n :host => host, :port => port,\n :name => name, :ip => ip,\n :role => role\n }\n ret << node\n end\n ret\n end", "def health_check\n ret = {}\n unready = []\n NodeObject.all.each do |node|\n unready << node.name unless node.ready?\n end\n ret[:nodes_not_ready] = unready unless unready.empty?\n failed = Proposal.all.select { |p| p.active? && p.failed? }\n ret[:failed_proposals] = failed.map(&:display_name) unless failed.empty?\n ret\n end", "def apply(nodes)\n node_manager.assert_known(nodes)\n for node in nodes\n node_manager.find(node).apply\n end\n end", "def compare_ns_records( records, hosts )\n\t\t\trecord_hosts = Set.new( records.map(&:name) )\n\t\t\thosts = Set.new( hosts.map {|name| Resolv::DNS::Name.create(name + '.')} )\n\n\t\t\tself.log.debug \"Comparing %p to %p\" % [ record_hosts, hosts ]\n\n\t\t\tstatus = nil\n\t\t\tif ( record_hosts ^ hosts ).empty?\n\t\t\t\tstatus = { ns_record: record_hosts.map(&:to_s) }\n\t\t\telsif !( subset = record_hosts - hosts ).empty?\n\t\t\t\tstatus = { error: \"missing NS records: %s\" % [subset.map(&:to_s).join(', ')] }\n\t\t\telsif !( subset = hosts - record_hosts ).empty?\n\t\t\t\tstatus = { error: \"extra NS records: %s\" % [subset.map(&:to_s).join(', ')] }\n\t\t\tend\n\n\t\t\treturn status\n\t\tend", "def query_files_hosts(hostlist, hosts)\n report_dir = get_report_dir\n\n existing_nodes = hostlist.map{|x| x[:certname]}\n\n local_host_template = {\n :deactivated=>false,\n :latest_report_hash=>nil,\n :facts_environment=>nil,\n :cached_catalog_status=>\"not_used\",\n :report_environment=>nil,\n :latest_report_corrective_change=>nil,\n :catalog_environment=>nil,\n :facts_timestamp=>nil,\n :latest_report_noop=>nil,\n :expired=>false,\n :latest_report_noop_pending=>nil,\n :report_timestamp=>nil,\n :certname=>nil,\n :catalog_timestamp=>nil,\n :latest_report_job_id=>nil,\n :latest_report_status=>nil\n }.freeze\n\n local_host_reports = []\n\n if File.directory?(report_dir)\n @logger.debug(\"Processing Report Directory: #{report_dir}\")\n\n Dir.glob(\"#{report_dir}/*\").each do |node_dir|\n @logger.debug(\"Processing Node Directory: #{node_dir}\")\n\n latest_report = Dir.glob(\"#{node_dir}/*.yaml\").sort.last\n if latest_report\n @logger.debug(\"Processing YAML Report: #{latest_report}\")\n\n begin\n require 'puppet'\n\n transaction_report = YAML.load_file(latest_report)\n\n unless (hosts.empty? || hosts.include?(transaction_report.host))\n @logger.debug(\"Skipping #{transaction_report.host} since it is not in the host list\")\n next\n end\n\n if existing_nodes.include?(transaction_report.host)\n @logger.debug(\"Skipping #{transaction_report.host} since it already exists\")\n next\n end\n\n local_host_data = Marshal.load(Marshal.dump(local_host_template))\n local_host_data[:latest_report_hash] = transaction_report.catalog_uuid\n local_host_data[:facts_environment] = transaction_report.environment\n local_host_data[:report_environment] = transaction_report.environment\n local_host_data[:latest_report_corrective_change] = transaction_report.corrective_change\n local_host_data[:catalog_environment] = transaction_report.environment\n local_host_data[:facts_timestamp] = transaction_report.time.to_s\n local_host_data[:latest_report_noop] = transaction_report.noop\n local_host_data[:latest_report_noop_pending] = transaction_report.noop_pending\n local_host_data[:report_timestamp] = transaction_report.time.to_s\n local_host_data[:certname] = transaction_report.host\n local_host_data[:catalog_timestamp] = transaction_report.time.to_s\n local_host_data[:latest_report_job_id] = transaction_report.catalog_uuid\n local_host_data[:latest_report_status] = transaction_report.status\n\n hostlist << local_host_data\n\n @logger.debug(\"Processed Host Report: #{local_host_data}\")\n rescue => e\n @logger.warn \"Error processing report at '#{latest_report}': #{e}\"\n end\n else\n @logger.debug \"Could not find latest report in '#{node_dir}'\"\n end\n end\n else\n @logger.debug \"Could not find report directory at '#{report_dir}'\"\n end\n end", "def resolve_ip_sites\n\t\tputs \"Resolve sites that contain an IP address. Update the site cache table once a hostname is found in the local host table.\" if @verbose\n\t\tupdates=Array.new\n\t\tsites=get_ip_sites\n\t\thost_tracker=Wmap::HostTracker.instance\n\t\thost_tracker.data_dir=@data_dir\n\t\thost_tracker.hosts_file = host_tracker.data_dir + \"/\" + \"hosts\"\n\t\thost_tracker.load_known_hosts_from_file\n\t\tsites.map do |site|\n\t\t\tputs \"Work on resolve the IP site: #{site}\" if @verbose\n\t\t\tip=url_2_host(site)\n\t\t\thostname=host_tracker.local_ip_2_host(ip)\n\t\t\tif hostname.nil?\n\t\t\t\tputs \"Can't resolve #{ip} from the local host store. Skip #{site}\" if @verbose\n\t\t\telse\n\t\t\t\tputs \"Host-name found for IP #{ip}: #{hostname}\" if @verbose\n\t\t\t\tupdates.push(site)\n\t\t\t\trefresh(site)\n\t\t\tend\n\t\tend\n\t\tupdates.sort!\n\t\tputs \"The following sites are now refreshed: #{updates}\" if @verbose\n\t\thost_tracker=nil\n\t\treturn updates\n\trescue Exception => ee\n\t\tputs \"Exception on method #{__method__}: #{ee}\" if @verbose\n\tend", "def verify_prior_nodes_live(node)\n if node.workorder.has_key?(\"rfcCi\")\n ci = node.workorder.rfcCi\n actionName = node.workorder.rfcCi.rfcAction\n else\n ci = node.workorder.ci\n actionName = node.workorder.actionName\n end\n \n timeout_sec = node['solr_api_timeout_sec'].to_i\n \n #get the map with all cloud's id & deployment order in the form |key,value| => |cloudId, deployment_order|\n #ex {34951930=>\"7\", 35709237=>\"8\", 34951924=>\"4\"}\n cloudIdsWithDpmtOrderMap = cloud_deployment_order(node)\n Chef::Log.info(\"Cloud id & deployment order map : #{cloudIdsWithDpmtOrderMap.to_json}\")\n \n #get array of solrcloud nodes for the action selected\n #get list of all solrcloud nodes which are selected for this action in the deployment\n nodesInAction = get_solrcloud_instances_by_action(node, actionName)\n \n thisNodeCiName = ci[:ciName]\n Chef::Log.info(\"This solrcloud node's ciName : #{thisNodeCiName}\")\n \n #get the node cloud id \"solrcloud-34951924-1\" => \"34951924\"\n thisCloudId = thisNodeCiName.split('-')[1]\n \n #get the deployment order of this node's cloud\n thisNodeCloudDpmtOrder = cloudIdsWithDpmtOrderMap.fetch(thisCloudId.to_i)\n Chef::Log.info(\"This node's cloud deployment order : #{thisNodeCloudDpmtOrder}\")\n \n #get all cloud ids having deployment order <= node_cloud_deployment_order. This is required to make sure that all prior cloud deployment was completed\n #ex From all clouds {34951930=>\"7\", 35709237=>\"8\", 34951924=>\"4\"} for node_cloud_deployment_order = 7 => {34951930=>\"7\", 34951924=>\"4\"}\n #same node_cloud_id is inclusive because there may be multiple nodes in node's cloud.\n #This list is used to make sure that all nodes across this cloud list are deployed first\n cloudIdsTobeDeployedPrior = []\n cloudIdsWithDpmtOrderMap.each do |k, v|\n if v.to_i <= thisNodeCloudDpmtOrder.to_i\n cloudIdsTobeDeployedPrior.push k\n end\n end\n Chef::Log.info(\"Cloud ids to be deployed before : #{cloudIdsTobeDeployedPrior.to_json}\")\n \n #get solrcloud nodes for cloud ids to be deployed prior\n nodeIndexesTobeDeployedPrior = []\n nodesInAction.each do |n|\n ciName = n['ciName']\n cloudId = ciName.split('-')[1]\n index = ciName.split('-', 2)[1]\n if cloudIdsTobeDeployedPrior.include? cloudId.to_i\n # prefx the cloud deployment order to determine the order of solr instace in the deployment\n # User might select the lower cloudId with higher deployment order and vice-versa so deployment order will be useful\n nodeIndexesTobeDeployedPrior.push cloudIdsWithDpmtOrderMap[cloudId.to_i]+\"-\"+index\n end\n end\n \n #sort solrcloud_nodes_tobe_deployed_prior by ciName (cloudId & compute index)\n nodeIndexesTobeDeployedPrior.sort! {|a, b| b <=> a}\n #default sorting is in descending order, we want to process the deployment in the ascending order of compute number\n #so reverse the order\n nodeIndexesTobeDeployedPrior.reverse!\n Chef::Log.info(\"solrclouds to executed before #{nodeIndexesTobeDeployedPrior.to_json}\")\n \n computeCloudIdIpMap = get_compute_number_to_ip_map(node)\n Chef::Log.info(\"compute number to ip map : #{computeCloudIdIpMap.to_json}\")\n # prefx the cloud deployment order to determine the order of solr instace in the deployment\n # User might select the lower cloudId with higher deployment order and vice-versa so deployment order will be useful\n thisNodeIndex = thisNodeCloudDpmtOrder+\"-\"+thisNodeCiName.split('-',2)[1]\n Chef::Log.info(\"This node index : #{thisNodeIndex}\")\n \n # select only the nodes with lower index & this node index\n nodeIndexesTobeDeployedPrior = nodeIndexesTobeDeployedPrior.select {|cloudIdIndex| cloudIdIndex <= thisNodeIndex}\n \n index = nodeIndexesTobeDeployedPrior.index {|id| id == thisNodeIndex}\n Chef::Log.info(\"index = #{index}\")\n \n wait_time = index * 10;\n Chef::Log.info(\"Allowing #{wait_time} seconds for prior nodes to start the deployment before\")\n sleep wait_time\n \n nodeIpsTobeDeployedPrior = []\n nodeIndexesTobeDeployedPrior.each do |nodeIndex|\n if !nodeIndex.eql? thisNodeIndex\n nodeIndexWithoutDpmtOrder = nodeIndex.split('-',2)[1]\n Chef::Log.info(\"nodeIndexWithoutDpmtOrder = #{nodeIndexWithoutDpmtOrder}\")\n nodeIpsTobeDeployedPrior.push computeCloudIdIpMap[nodeIndexWithoutDpmtOrder]\n end\n end\n \n # No need to check for other nodes to confirm those are live before processing this node as there are no prior nodes in the list\n if nodeIpsTobeDeployedPrior.empty?\n return\n end\n \n Chef::Log.info(\"nodeIpsTobeDeployedPrior = #{nodeIpsTobeDeployedPrior.to_json}\")\n host = nodeIpsTobeDeployedPrior[0]\n cluster_state = get_cluster_state(host, node['port_no'], timeout_sec)\n nodes_up_status = nodes_live?(host, node['port_no'], nodeIpsTobeDeployedPrior, timeout_sec)\n Chef::Log.info(\"Node live status : #{nodes_up_status}\")\n if !nodes_up_status \n error = \"Some of the prior nodes from list #{nodeIpsTobeDeployedPrior.to_json} in the deployment are not live.\"\n puts \"***FAULT:FATAL=#{error}\"\n raise error\n end\n end", "def parse_dns(dns_raw)\n dns = []\n dns_records = {}\n record_type_A = []\n record_type_A_IP = []\n record_type_CNAME = []\n record_type_CNAME_alias = []\n\n #adds each line to dns array and splipt them with \",\"\n dns_raw.each do |lines_in_files|\n dns.push([lines_in_files.split(\",\")])\n end\n\n #Checks for recordA,IP or recordCNAME and adds them to the respected array\n dns.each do |words_in_files|\n if words_in_files[0][0] == \"A\"\n record_type_A.push(words_in_files[0][1].strip)\n record_type_A_IP.push(words_in_files[0][2].strip)\n elsif words_in_files[0][0] == \"CNAME\"\n record_type_CNAME.push(words_in_files[0][1].strip)\n record_type_CNAME_alias.push(words_in_files[0][2].strip)\n end\n end\n\n #record_A hash stores values of recordA\n record_A = {\n :source => record_type_A,\n :ip => record_type_A_IP,\n }\n\n #recordCNAME hash stores values of recordCNAME\n record_CNAME = {\n :source => record_type_CNAME,\n :alias => record_type_CNAME_alias,\n }\n\n #dns_records gets both Hashes\n dns_records = {\n :A => record_A,\n :CNAME => record_CNAME,\n }\n\n #returns record dns_record with two hashes.\n return dns_records\nend", "def cert_needs_updating?(node)\n if !file_exists?([:node_x509_cert, node.name], [:node_x509_key, node.name])\n return true\n else\n cert = load_certificate_file([:node_x509_cert, node.name])\n if cert.not_after < Time.now.advance(:months => 2)\n log :updating, \"cert for node '#{node.name}' because it will expire soon\"\n return true\n end\n if cert.subject.common_name != node.domain.full\n log :updating, \"cert for node '#{node.name}' because domain.full has changed (was #{cert.subject.common_name}, now #{node.domain.full})\"\n return true\n end\n cert.openssl_body.extensions.each do |ext|\n if ext.oid == \"subjectAltName\"\n ips = []\n dns_names = []\n ext.value.split(\",\").each do |value|\n value.strip!\n ips << $1 if value =~ /^IP Address:(.*)$/\n dns_names << $1 if value =~ /^DNS:(.*)$/\n end\n dns_names.sort!\n if ips.first != node.ip_address\n log :updating, \"cert for node '#{node.name}' because ip_address has changed (from #{ips.first} to #{node.ip_address})\"\n return true\n elsif dns_names != dns_names_for_node(node)\n log :updating, \"cert for node '#{node.name}' because domain name aliases have changed\\n from: #{dns_names.inspect}\\n to: #{dns_names_for_node(node).inspect})\"\n return true\n end\n end\n end\n end\n return false\n end", "def run\n super\n res = []\n entity_name = _get_entity_name\n entity_type = _get_entity_type_string\n\n # skip cdns\n if !get_cdn_domains.select{ |x| entity_name =~ /#{x}/}.empty? || \n !get_internal_domains.select{ |x| entity_name =~ /#{x}/}.empty?\n _log \"This domain resolves to a known cdn or internal host, skipping\"\n return\n end\n\n # check that it resolves\n resolves_to = resolve_names entity_name\n unless resolves_to.first\n _log \"No resolution for this record, unable to check\"\n return \n end\n\n # We use their DNS servers to query\n nameservers= ['185.228.168.168', '185.228.168.169']\n _log \"Querying #{nameservers}\"\n dns_obj = Resolv::DNS.new(nameserver: nameservers)\n \n # Try twice, just in case (avoid FP's)\n res = dns_obj.getaddresses(entity_name)\n res.concat(dns_obj.getresources(entity_name, Resolv::DNS::Resource::IN::CNAME)).flatten\n\n # Detected only if there's no resolution\n if res.any?\n _log \"Resolves to #{res.map{|x| \"#{x.to_s}\" }}. Seems we're good!\"\n else\n source = \"CleanBrowsing\"\n description = \"The Cleanbrowsing DNS security filter focuses on restricting access \" + \n \"to malicious activity. It blocks phishing, spam and known malicious domains.\"\n \n _create_linked_issue(\"blocked_by_dns\", {\n status: \"confirmed\",\n additional_description: description,\n source: source, \n proof: \"Resolved to the following address(es) outside of #{source} (#{nameservers}): #{resolves_to.join(\", \")}\",\n to_reproduce: \"dig #{entity_name} @#{nameservers.first}\",\n references: [{ type: \"remediation\", uri: \"https://cleanbrowsing.org/\" }]\n }) \n \n # Also store it on the entity \n blocked_list = @entity.get_detail(\"suspicious_activity_detected\") || [] \n @entity.set_detail(\"suspicious_activity_detected\", blocked_list.concat([{source: source}]))\n\n end\n\n end", "def convert_fqdns_to_ips(nodes)\n if is_cloud?\n Djinn.log_debug(\"In a cloud deployment, so converting FQDNs -> IPs\")\n else\n Djinn.log_debug(\"Not in a cloud deployment, so not converting FQDNs -> IPs\")\n return nodes\n end\n\n if @creds[\"hostname\"] =~ /#{FQDN_REGEX}/\n begin\n @creds[\"hostname\"] = HelperFunctions.convert_fqdn_to_ip(@creds[\"hostname\"])\n rescue Exception => e\n Djinn.log_fatal(\"Failed to convert main hostname #{@creds['hostname']}\")\n HelperFunctions.log_and_crash(\"Failed to convert main hostname #{@creds['hostname']}\")\n end\n end\n \n nodes.each { |node|\n # Resolve the private FQDN to a private IP, but don't resolve the public\n # FQDN, as that will just resolve to the private IP.\n\n pri = node.private_ip\n if pri =~ /#{FQDN_REGEX}/\n begin\n node.private_ip = HelperFunctions.convert_fqdn_to_ip(pri)\n rescue Exception => e\n node.private_ip = node.public_ip\n end\n end\n }\n\n return nodes\n end", "def expiration_results\n domains = config[:domain].split(',')\n warning_days = config[:warning].to_i\n critical_days = config[:critical].to_i\n max_retries = 4\n\n results = {\n critical: {},\n warning: {},\n ok: {},\n unknown: {}\n }\n whois = Whois::Client.new(timeout: config[:timeout])\n\n domains.each do |domain|\n begin\n tries ||= 0\n whois_result = whois.lookup(domain).parser\n rescue Timeout::Error, Errno::ECONNRESET, Whois::ConnectionError\n tries += 1\n if tries < max_retries\n retry\n else\n results[:unknown][domain] = 'Connection error' unless config[:'ignore-errors']\n next\n end\n end\n\n begin\n expires_on = DateTime.parse(whois_result.expires_on.to_s)\n domain_result = (expires_on - DateTime.now).to_i\n if domain_result <= critical_days\n results[:critical][domain] = domain_result\n elsif domain_result <= warning_days\n results[:warning][domain] = domain_result\n else\n results[:ok][domain] = domain_result\n end\n rescue StandardError\n results[:unknown][domain] = 'Parsing error' unless config[:'ignore-errors']\n end\n end\n results\n end", "def analyze(sites, checks)\n success = Hash.new { |h, k| h[k] = Hash.new(&h.default_proc) }\n counts = Hash.new { |h, k| h[k] = Hash.new(&h.default_proc) }\n checks.each do |nam, check_data|\n success[nam] = sites.select { |_, site| site[nam] =~ check_data[SiteStandards::CHECK_VALIDATE] }.keys\n counts[nam][SITE_PASS] = success[nam].count\n counts[nam][SITE_WARN] = 0 # Reorder output\n counts[nam][SITE_FAIL] = sites.select { |_, site| site[nam].nil? }.count\n counts[nam][SITE_WARN] = sites.size - counts[nam][SITE_PASS] - counts[nam][SITE_FAIL]\n end\n\n return [\n counts, {\n SITE_PASS => '# Sites with links to primary ASF page',\n SITE_WARN => '# Sites with link, but not an expected ASF one',\n SITE_FAIL => '# Sites with no link for this topic'\n }, success\n ]\n end", "def update_from_site_store!\n\t\t\t#begin\n puts \"Invoke internal procedures to update the primary host-name table from the site store.\"\n # Step 1 - update the prime host table based on the SSL cert CN fields\n\t\t\t\tcns=Hash.new\n\t\t\t\tchecker=Wmap::UrlChecker.new(:data_dir=>@data_dir)\n my_tracker = Wmap::SiteTracker.instance\n my_tracker.sites_file = @data_dir + \"sites\"\n my_tracker.load_site_stores_from_file\n\t\t\t\tmy_tracker.get_ssl_sites.map do |site|\n\t\t\t\t\tputs \"Exam SSL enabled site entry #{site} ...\"\n\t\t\t\t\tmy_host=url_2_host(site)\n\t\t\t\t\tnext if @known_hosts.key?(my_host) # add the logic to optimize the process\n\t\t\t\t\tputs \"Pull SSL cert details on site: #{site}\"\n\t\t\t\t\tcn=checker.get_cert_cn(site)\n\t\t\t\t\tunless cn.nil? or cns.key?(cn)\n\t\t\t\t\t\tcns[cn]=true\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\tcns.keys.map do |cn|\n\t\t\t\t\tif is_fqdn?(cn)\n\t\t\t\t\t\tnext if @known_hosts.key?(cn)\n\t\t\t\t\t\tself.add(cn)\n\t\t\t\t\t\tputs \"New entry added: #{cn}\\t#{@known_hosts[cn]}\"\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\t# Step 2 - Save the cache into the file\n\t\t\t\tself.save!\n checker=nil\n my_tracker=nil\n\t\t\t#rescue Exception => ee\n\t\t\t#\tputs \"Exception on method #{__method__}: #{ee}\" if @verbose\n # checker=nil\n # my_tracker=nil\n\t\t\t#\treturn nil\n\t\t\t#end\n\t\tend", "def hosts_file(nodes=nil)\n if nodes.nil?\n if @referenced_nodes && @referenced_nodes.any?\n nodes = @referenced_nodes\n nodes = nodes.merge(nodes_like_me[:services => 'mx']) # all nodes always need to communicate with mx nodes.\n end\n end\n return {} unless nodes\n hosts = {}\n my_location = @node['location'] ? @node['location']['name'] : nil\n nodes.each_node do |node|\n hosts[node.name] = {'ip_address' => node.ip_address, 'domain_internal' => node.domain.internal, 'domain_full' => node.domain.full}\n node_location = node['location'] ? node['location']['name'] : nil\n if my_location == node_location\n if facts = @node.manager.facts[node.name]\n if facts['ec2_public_ipv4']\n hosts[node.name]['ip_address'] = facts['ec2_public_ipv4']\n end\n end\n end\n host_pub_key = Util::read_file([:node_ssh_pub_key,node.name])\n if host_pub_key\n hosts[node.name]['host_pub_key'] = host_pub_key\n end\n end\n hosts\n end", "def hosts_file(nodes=nil)\n if nodes.nil?\n if @referenced_nodes && @referenced_nodes.any?\n nodes = @referenced_nodes\n nodes = nodes.merge(nodes_like_me[:services => 'mx']) # all nodes always need to communicate with mx nodes.\n end\n end\n return {} unless nodes\n hosts = {}\n my_location = @node['location'] ? @node['location']['name'] : nil\n nodes.each_node do |node|\n hosts[node.name] = {'ip_address' => node.ip_address, 'domain_internal' => node.domain.internal, 'domain_full' => node.domain.full}\n node_location = node['location'] ? node['location']['name'] : nil\n if my_location == node_location\n if facts = @node.manager.facts[node.name]\n if facts['ec2_public_ipv4']\n hosts[node.name]['ip_address'] = facts['ec2_public_ipv4']\n end\n end\n end\n host_pub_key = Util::read_file([:node_ssh_pub_key,node.name])\n if host_pub_key\n hosts[node.name]['host_pub_key'] = host_pub_key\n end\n end\n hosts\n end", "def hosts(touchAndPrune=false)\n hosts=@vp_lock.synchronize{@hostname2vp.keys}\n if touchAndPrune\n check_up_hosts(hosts)\n else\n hosts\n end\n end", "def hash_nodes(statements, nodes, grounded_hashes)\n hashes = grounded_hashes.dup\n ungrounded_hashes = {}\n hash_needed = true\n\n # We may have to go over the list multiple times. If a node is marked as\n # grounded, other nodes can then use it to decide their own state of\n # grounded.\n while hash_needed\n starting_grounded_nodes = hashes.size\n nodes.each do | node |\n unless hashes.member? node\n grounded, hash = node_hash_for(node, statements, hashes)\n if grounded\n hashes[node] = hash\n end\n ungrounded_hashes[node] = hash\n end\n end\n\n # after going over the list, any nodes with a unique hash can be marked\n # as grounded, even if we have not tied them back to a root yet.\n uniques = {}\n ungrounded_hashes.each do |node, hash|\n uniques[hash] = uniques.has_key?(hash) ? false : node\n end\n uniques.each do |hash, node|\n hashes[node] = hash if node\n end\n hash_needed = starting_grounded_nodes != hashes.size\n end\n [hashes, ungrounded_hashes]\n end", "def dns_update(zone, records)\n update = Dnsruby::Update.new(zone)\n records.each do |r|\n if r.type.upcase == 'ADD'\n s = \"#{Domain} 3600 #{Type} #{RDATA}\"\n rr = Dnsruby::RR.create(s)\n update.add(rr)\n else\n update.delete(r['Domain'], r['Type'], r['RDATA'])\n end\n end\n update\n end", "def check_domains_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: DomainApi.check_domains ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling DomainApi.check_domains\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling DomainApi.check_domains\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/domain/check_all'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'Array<Array>'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"DomainApi.check_domains\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: DomainApi#check_domains\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def poll\n unless memcached_nodes.empty?\n memcached_nodes.each do | hostname_port |\n stats_text = issue_stats hostname_port \n if stats_text.present?\n @last_stats[hostname_port] = parse_and_report_stats hostname_port, stats_text\n else\n @last_stats[hostname_port] = {}\n end \n end\n\n aggregate_stats\n logger.debug \"Done with aggs\" \n end\n end", "def frwdlp(session,hostlst,domain,dest)\n\tdest = dest + \"-DNS-forward-lookup.txt\"\n\tprint_status(\"Performing DNS Forward Lookup for hosts in #{hostlst} for domain #{domain}\")\n\tfilewrt(dest,\"DNS Forward Lookup for hosts in #{hostlst} for domain #{domain}\")\n\tresult = []\n\tthreads = []\n\ttmpout = []\n\tbegin\n\tif ::File.exists?(hostlst)\n\t\t::File.open(hostlst).each {|line|\n \t\t\tthreads << ::Thread.new(line) { |h|\n \t\t\t#print_status(\"checking #{h.chomp}\")\n\t\t \tr = session.sys.process.execute(\"nslookup #{h.chomp}.#{domain}\", nil, {'Hidden' => true, 'Channelized' => true})\n \t\t \twhile(d = r.channel.read)\n \t\t\tif d =~ /(Name)/\n \t\t\t\td.scan(/Name:\\s*\\S*\\s*Address\\w*:\\s*.*?.*?.*/) do |n|\n \t\t\t\ttmpout << n.split\n \t\t\tend\n \t\t\tbreak\n \t\tend\n end\n\n r.channel.close\n r.close\n\t\t\t}\n\t\t}\n\tthreads.each { |aThread| aThread.join }\n\ttmpout.uniq.each do |t|\n \tprint_status(\"\\t#{t.join.sub(/Address\\w*:/, \"\\t\")}\")\n \tfilewrt(dest,\"#{t.join.sub(/Address\\w*:/, \"\\t\")}\")\n end\n\n\telse\n\t\tprint_status(\"File #{hostlst}does not exists!\")\n\t\texit\n\tend\n\trescue ::Exception => e\n \t\tprint_status(\"The following Error was encountered: #{e.class} #{e}\")\n\tend\nend", "def bulk_vhost_update(dbcheck, tenant_array)\n #tenant_status['vhost_updated'] = false\n command = \"chef-client -o prom-classfront::classflow-conf\"\n\n # list of environments that require vhost updates\n tenvs = [] \n tenant_array.each do |tenant|\n tenant_config = tenant[0]\n tenant_status = tenant[1]\n tenvs << tenant_config['env']\n end\n tenant_envlist = tenvs.uniq\n tenant_envlist.each do |tenant_env|\n front_end = dbcheck.lookup_frontend_servers(tenant_env)\n front_end.each do |instance|\n logger.info \"Updating vhost through chef-client run on #{instance}.\"\n begin\n run_ssh_command(instance, command)\n \n # Provides error details on connection failure\n rescue SocketError => e\n logger.error \"Unable to update vhost on instance #{instance}\"\n logger.error \"#{e}\"\n end\n end\n end\n \n # Mark each tenant as updated\n tenant_array.each do |tenant|\n tenant_status = tenant[1]\n tenant_status['vhost_updated'] = true\n end\n end", "def refresh_all\n\t\tputs \"Refresh all the entries within the local site store ... \"\n\t\tchanges=Hash.new\n\t\tchanges=bulk_refresh(@known_sites.keys)\n\t\t@known_sites.merge!(changes)\n\t\tputs \"Done refresh all entries.\"\n\t\treturn changes\n\trescue => ee\n\t\tputs \"Exception on method #{__method__}: #{ee}\" if @verbose\n\tend", "def find_nodes_to_verify(references)\n nodes = {}\n\n references.each do |uri, _digest_value|\n uri = uri.sub(/^#/, '')\n node = find_node_by_uri(uri)\n\n nodes[uri] = calculate_digest(node)\n end\n\n nodes\n end", "def check_domains(opts = {})\n data, _status_code, _headers = check_domains_with_http_info(opts)\n data\n end", "def run_checks(key_ttl)\n # We also need to perform the auditing checks against the config\n # Checks to be performed :\n # b) Warn if number of prepublished ZSKs < ZSK:Standby\n # Do this by [alg, alg_length] - so only select those keys which match the config\n @config.keys.zsks.each {|zsk|\n prepublished_zsk_count = @cache.prepublished.keys.select {|k|\n k.zone_key? && !k.sep_key? && (k.algorithm == zsk.algorithm) &&\n (k.key_length == zsk.alg_length)\n }.length\n if (prepublished_zsk_count < zsk.standby)\n msg = \"Not enough prepublished ZSKs! Should be #{zsk.standby} but have #{prepublished_zsk_count}\"\n @parent.log(LOG_WARNING, msg)\n end\n }\n @cache.inuse.each {|key, time|\n timestamp = time[0]\n first_timestamp = time[1]\n # Ignore this check if the key was already in use at the time at which the lifetime policy was changed.\n # How do we know to which AnyKey group this key belongs? Can only take a guess by [algorithm, alg_length] tuple\n # Also going to have to put checks in place where key protocol/algorithm is checked against policy :-(\n # - no we don't! These are only checked when we are loading a new key - not one we've seen before.\n # and of course, a new key should be created with the correct values!\n key_group_policy_changed = false\n # First, find all the key groups which this key could belong to\n keys = @config.changed_config.zsks\n if (key.sep_key?)\n keys = @config.changed_config.ksks\n end\n possible_groups = keys.select{|k| (k.algorithm == key.algorithm) &&\n (k.alg_length == key.key_length)}\n # Then, find the latest timestamp (other than 0)\n key_group_policy_changed_time = 0\n if (possible_groups.length == 0)\n # Can't find the group this key belongs to\n if (@config.changed_config.kasp_timestamp < first_timestamp)\n # @TODO@ o if there has been no change in any of the configured keys then error (the key shouldn't exist)\n # Shouldn't this be caught by something else?\n end\n # o if there has been a change since the key was first seen, then don't raise any errors for this key\n else\n possible_groups.each {|g|\n if (g.timestamp > key_group_policy_changed_time)\n key_group_policy_changed_time = g.timestamp\n key_group_policy_changed = true\n end\n }\n next if (key_group_policy_changed && (first_timestamp < key_group_policy_changed_time))\n end\n\n if (key.zone_key? && !key.sep_key?)\n # d) Warn if ZSK inuse longer than ZSK:Lifetime + Enforcer:Interval\n # Get the ZSK lifetime for this type of key from the config\n zsks = @config.keys.zsks.select{|zsk|\n (zsk.algorithm == key.algorithm) &&\n (zsk.alg_length == key.key_length)}\n next if (zsks.length == 0)\n # Take the \"safest\" value - i.e. the longest one in this case\n zsk_lifetime = 0\n zsks.each {|z|\n zsk_lifetime = z.lifetime if (z.lifetime > zsk_lifetime)\n }\n lifetime = zsk_lifetime + @enforcer_interval + @validity\n if timestamp < (Time.now.to_i - lifetime)\n msg = \"ZSK #{key.key_tag} in use too long - should be max #{lifetime} seconds but has been #{Time.now.to_i-timestamp} seconds\"\n @parent.log(LOG_WARNING, msg)\n end\n else\n # c) Warn if KSK inuse longer than KSK:Lifetime + Enforcer:Interval\n # Get the KSK lifetime for this type of key from the config\n ksks = @config.keys.ksks.select{|ksk| (ksk.algorithm == key.algorithm) &&\n (ksk.alg_length == key.key_length)}\n next if (ksks.length == 0)\n # Take the \"safest\" value - i.e. the longest one in this case\n ksk_lifetime = 0\n ksks.each {|k|\n ksk_lifetime = k.lifetime if (k.lifetime > ksk_lifetime)\n }\n lifetime = ksk_lifetime + @enforcer_interval + @validity\n if timestamp < (Time.now.to_i - lifetime)\n# msg = \"KSK #{key.key_tag} in use too long - should be max #{lifetime} seconds but has been #{Time.now.to_i-timestamp} seconds\"\n msg = \"KSK #{key.key_tag} reaching end of lifetime - should be max #{lifetime} seconds but has been #{Time.now.to_i-timestamp} seconds, not including time taken for DS to be seen\"\n @parent.log(LOG_WARNING, msg)\n end\n end\n }\n if (@config.audit_tag_present)\n check_inuse_keys_history(key_ttl)\n end\n end", "def resolve(dns_records, lookup_chain, domain)\n record_A = dns_records[:A]\n record_CNAME = dns_records[:CNAME]\n\n #if domain in recordA\n #add destination to lookup_chain\n if record_A[:source].include?(domain)\n lookup_chain.push(record_A[:ip][record_A[:source].index(domain)])\n\n #if domain in recordCNAME\n #add destination to lookup_chain\n #update domain with destination\n #call the funtion again with new domain(Recursion)\n elsif record_CNAME[:source].include?(domain)\n lookup_chain.push(record_CNAME[:alias][record_CNAME[:source].index(domain)])\n domain = record_CNAME[:alias][record_CNAME[:source].index(domain)]\n resolve(dns_records, lookup_chain, domain)\n else\n return lookup_chain\n end\nend", "def check_up_hosts(hostlisthash, settings={ :retry => true, :maxalert => NO_EMAIL, :timeout => 30})\n if hostlisthash.class==Array\n hostlisthash=hostlisthash.to_h(true)\n end\n if not settings.include?(:timeout)\n settings[:timeout]=30\n end\n if not settings.include?(:retry)\n settings[:retry]=true\n end\n if not settings.include?(:maxalert)\n settings[:maxalert]=NO_EMAIL\n end\n results, unsuccessful_hosts=issue_command_on_hosts(hostlisthash,settings){|h,p| h.backtic(\"hostname --fqdn\").chomp(\"\\n\").strip.downcase}\n uphosts=[]\n results.each{|vp|\n uphosts << ($rename_vp.has_key?(vp.at(0)) ? $rename_vp[vp.at(0)] : vp.at(0))\n if vp.at(0) != vp.at(1)\n log { \"check_up_hosts(): vp.at(0) != vp.at(1): #{vp.join(\" \")}\" }\n end\n }\n # if prune\n # unsuccessful_hosts.each{|h|\n # self.unregister_host(h)\n # }\n # end\n return uphosts\n end", "def evaluate_checks\n log.info(\"Evaluating Checks: '#{@config['checks'].length}'\")\n\n @config['checks'].each do |check|\n check_name = check['check']\n check_cfg = check['cfg']\n\n collect_metrics(check_name, check_cfg).each do |metric|\n status = 0\n\n # on service it will come with \"state_required\" flag\n if check_name == 'service'\n # adding defaults in case they are not set\n check_cfg = check_cfg.merge(\n 'state' => 'active',\n 'state_required' => 1\n )\n # giving a service hint by adding it's name\n check_name = \"service_#{check_cfg['name']}\"\n status = equals(metric['value'], check_cfg['state_required'])\n else\n # normal threshold evaluation\n status = evaluate(\n metric['value'],\n check_cfg['warn'],\n check_cfg['crit']\n )\n end\n\n template_variables = metric\n template_variables['cfg'] = check_cfg\n\n append_event(\n \"check_#{check_name}\",\n @tmpl.render(check['check'], template_variables),\n status,\n metric['source']\n )\n end\n end\n end", "def run_batch(batch)\n\n\t\t@results = {}\n\t\t@aliases = {}\n\n\t\tprint_status(\"Sending probes to #{batch[0]}->#{batch[-1]} (#{batch.length} hosts)\")\n\n\t\tbegin\n\t\t\tudp_sock = nil\n\t\t\tidx = 0\n\n\t\t\t# Create an unbound UDP socket if no CHOST is specified, otherwise\n\t\t\t# create a UDP socket bound to CHOST (in order to avail of pivoting)\n\t\t\tudp_sock = Rex::Socket::Udp.create( { 'LocalHost' => datastore['CHOST'] || nil, 'Context' => {'Msf' => framework, 'MsfExploit' => self} })\n\t\t\tadd_socket(udp_sock)\n\n\t\t\t# Try three times since NTP servers can be a bit busy\n\t\t\t1.upto(3) do\n\t\t\tbatch.each do |ip|\n\t\t\t\tnext if @results[ip]\n\n\t\t\t\tbegin\n\t\t\t\t\tdata = probe_pkt_ntp(ip)\n\t\t\t\t\tudp_sock.sendto(data, ip, datastore['RPORT'].to_i, 0)\n\t\t\t\trescue ::Interrupt\n\t\t\t\t\traise $!\n\t\t\t\trescue ::Rex::HostUnreachable, ::Rex::ConnectionTimeout, ::Rex::ConnectionRefused\n\t\t\t\t\tnil\n\t\t\t\tend\n\n\t\t\t\tif (idx % 30 == 0)\n\t\t\t\t\twhile (r = udp_sock.recvfrom(65535, 0.1) and r[1])\n\t\t\t\t\t\tparse_reply(r)\n\t\t\t\t\tend\n\t\t\t\tend\n\n\t\t\t\tidx += 1\n\t\t\tend\n\t\t\tend\n\n\t\t\twhile (r = udp_sock.recvfrom(65535, 10) and r[1])\n\t\t\t\tparse_reply(r)\n\t\t\tend\n\n\t\trescue ::Interrupt\n\t\t\traise $!\n\t\trescue ::Exception => e\n\t\t\tprint_status(\"Unknown error: #{e.class} #{e}\")\n\t\tend\n\n\t\[email protected] do |k|\n\n\t\t\treport_service(\n\t\t\t\t:host => k,\n\t\t\t\t:proto => 'udp',\n\t\t\t\t:port => datastore['RPORT'].to_i,\n\t\t\t\t:name => 'NTP'\n\t\t\t)\n\n\t\t\treport_note(\n\t\t\t\t:host => k,\n\t\t\t\t:proto => 'udp',\n\t\t\t\t:port => datastore['RPORT'].to_i,\n\t\t\t\t:type => 'ntp.monlist',\n\t\t\t\t:data => {:monlist => @results[k]}\n\t\t\t)\n\n\t\t\tif (@aliases[k] and @aliases[k].keys[0] != k)\n\t\t\t\treport_note(\n\t\t\t\t\t:host => k,\n\t\t\t\t\t:proto => 'udp',\n\t\t\t\t\t:port => datastore['RPORT'].to_i,\n\t\t\t\t\t:type => 'ntp.addresses',\n\t\t\t\t\t:data => {:addresses => @aliases[k].keys}\n\t\t\t\t)\n\t\t\tend\n\n\t\t\tif (datastore['StoreNTPClients'])\n\t\t\t\tprint_status(\"#{k} Storing #{@results[k].length} NTP client hosts in the database...\")\n\t\t\t\t@results[k].each do |r|\n\t\t\t\t\tmaddr,mport,mserv = r\n\t\t\t\t\treport_note(:host => maddr, :type => 'ntp.client.history', :data => {:address => maddr, :port => mport, :server => mserv})\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\n\tend", "def postprocess(msgs, _results)\n groups = {}\n msgs.each do |m|\n os = m.metadata[:originserver]\n\n next if os.nil? || os[:address].nil?\n\n address = IPAddr.new os[:address]\n range = address.ipv4? ? 16..32 : 48..128\n range.each do |mask|\n groups[address.mask(mask)] ||= []\n groups[address.mask(mask)] << m.digest_as_hex\n end\n end\n\n { address_ranges: groups }\n end", "def for_tree_nodes(tree_nodes, check_root_record = true)\n return {} if tree_nodes.empty?\n assert_same_type!(tree_nodes)\n\n # E.g. ArchivalObject\n node_model = tree_nodes[0].class.node_model\n\n # E.g. Resource\n root_model = tree_nodes[0].class.root_model\n\n # Initialize our result map to true -- assuming \"published\" by default.\n result = Hash[tree_nodes.map {|node| [node, true]}]\n\n if check_root_record\n # If we're the top-level call, we'll check the repository and root\n # record's publication status. There's no point doing this at every\n # level of the tree, but do it up front to save some potential work.\n root_record_id_to_child = {}\n tree_nodes.each do |node|\n if repository_published?(node[:repository_id])\n root_record_id_to_child[node.root_record_id] ||= []\n root_record_id_to_child[node.root_record_id] << node\n else\n result[node] = false\n end\n end\n\n return result if root_record_id_to_child.empty?\n\n root_model\n .filter(:id => root_record_id_to_child.keys)\n .filter(Sequel.|({:publish => 0},\n {:suppressed => 1}))\n .select(:id)\n .each do |root_record|\n root_record_id_to_child.fetch(root_record.id).each do |node|\n result[node] = false\n end\n end\n end\n\n parent_id_to_child = {}\n tree_nodes.each do |node|\n if result[node] && node.publish == 1 && node.suppressed == 0\n # OK so far, but check the ancestors.\n if node.parent_id\n parent_id_to_child[node.parent_id] ||= []\n parent_id_to_child[node.parent_id] << node\n end\n else\n # Unpublished/suppressed. Nothing more to check.\n result[node] = false\n end\n end\n\n unless parent_id_to_child.empty?\n parent_ids = parent_id_to_child.keys\n parent_publication_status = for_tree_nodes(node_model.filter(:id => parent_ids)\n .select(:id, :parent_id, :root_record_id, :publish, :suppressed)\n .all,\n false)\n\n parent_publication_status.each do |parent, published|\n # If the parent was unpublished, that overrides our previous result.\n parent_id_to_child.fetch(parent.id).each do |node|\n result[node] &&= published\n end\n end\n end\n\n result\n end", "def intelligent_nodeps(mapping, package, nodeps_sym = :delete, deps_sym = :cant_delete, active_criteria = false)\n vnfds, nsds, testds, files, cant_delete_vnfds = [], [], [], [], []\n cant_delete_nsds, cant_delete_testds, cant_delete_files = [], [], []\n mapping.each do |content|\n # next if content['content-type'].split('.')[-2] == 'osm'\n if content['content-type'].split('.')[-1] == 'vnfd'\n if check_dependencies( content, package.pd, active_criteria)\n logger.info 'VNFD ' + content['id'][:name] + ' has more than one dependency'\n cant_delete_vnfds << content['id']\n else\n vnfds << content['id']\n end\n elsif content['content-type'].split('.')[-1] == 'nsd'\n if check_dependencies(content, package.pd, active_criteria)\n logger.info 'NSD ' + content['id'][:name] + ' has more than one dependency'\n cant_delete_nsds << content['id']\n else\n nsds << content['id']\n end\n elsif content['content-type'].split('.')[-1] == 'tstd'\n if check_dependencies(content, package.pd, active_criteria)\n logger.info 'TESTD ' + content['id'][:name] + ' has more than one dependency'\n cant_delete_testds << content['id']\n else\n testds << content['id']\n end\n elsif content['content-type'].split('.')[-1] != 'ref'\n if check_dependencies_files(content, package.pd, active_criteria)\n logger.info 'File with {uuid =>' + content[:uuid] + '} has more than one dependency'\n cant_delete_files << {uuid: content[:uuid]}\n else\n files << {uuid: content[:uuid]}\n end\n end\n end\n { nodeps_sym => { vnfds: vnfds, nsds: nsds, testds: testds, files: files },\n deps_sym => { vnfds: cant_delete_vnfds, nsds: cant_delete_nsds,\n testds: cant_delete_testds, files: cant_delete_files} }\n end", "def domain_update(args)\n if args.key?(:chg) && args[:chg].key?(:registrant)\n raise ArgumentError, 'You need to do a trade or recover operation to change the registrant'\n end\n has_contacts = args.key?(:add) && args[:add].key?(:contacts) || args.key?(:add) && args[:add].key?(:contacts)\n has_ns = args.key?(:add) && args[:add].key?(:ns) || args.key?(:add) && args[:add].key?(:ns)\n has_other = args.key?(:add) && args[:add].key?(:status) || args.key?(:add) && args[:add].key?(:status) || args.key?(:chg) && args[:chg].key?(:authInfo)\n if [has_contacts, has_ns, has_other].count { |v| v } > 1\n raise ArgumentError, \"You can't update all that at one time\"\n end\n [:add, :rem].each do |ar|\n if args.key?(ar) && args[ar].key?(:ns) && args[ar][:ns].first.is_a?(String)\n args[ar][:ns] = args[ar][:ns].map { |ns| { :hostName => ns } }\n end\n end\n super\n end", "def get_njobs(nodes)\n\n # Reset job count on each input node\n nodes.each do |wn|\n wn[:njobs] = -1\n end\n\n begin\n pbsnodes_xml = REXML::Document.new( %x[ #{$cmd_pbsnodes} 2> /dev/null ] )\n rescue\n return\n end\n\n return if pbsnodes_xml.elements.empty?\n\n pbsnodes_xml.elements.each('//Data/Node') do |node_xml|\n\n name = node_xml.elements['name'].text\n is_offline = node_xml.elements['state'].text.include?('offline')\n is_down = node_xml.elements['state'].text.include?('down')\n\n jobs_xml = node_xml.elements['jobs']\n if jobs_xml\n njobs = jobs_xml.text.split(' ').length\n else\n njobs = 0\n end\n\n # Find matching input nodes: FQDN must be set, node must be up and offline\n nodes.each do |wn|\n next unless wn[:fqdn] and wn[:fqdn] == name and is_offline and !is_down\n wn[:njobs] = njobs\n end\n\n end\n\nend", "def update_node_search_domains_with_http_info(node_search_domains_properties, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: NsxComponentAdministrationApi.update_node_search_domains ...\"\n end\n # verify the required parameter 'node_search_domains_properties' is set\n if @api_client.config.client_side_validation && node_search_domains_properties.nil?\n fail ArgumentError, \"Missing the required parameter 'node_search_domains_properties' when calling NsxComponentAdministrationApi.update_node_search_domains\"\n end\n # resource path\n local_var_path = \"/node/network/search-domains\"\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(node_search_domains_properties)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'NodeSearchDomainsProperties')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: NsxComponentAdministrationApi#update_node_search_domains\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def pdb_get_facts(node_ip_hostname)\n keyed_facts = {}\n\n if test_env\n response = \"[{\\\"certname\\\":\\\"host-name-01.domain.com\\\",\\\"name\\\":\\\"trusted\\\",\\\"value\\\":{\\\"authenticated\\\":\\\"remote\\\",\\\"certname\\\":\\\"host-name-01.domain.com\\\",\\\"domain\\\":\\\"domain.com\\\",\\\"extensions\\\":{\\\"company_trusted_swimlane\\\":\\\"n/a\\\",\\\"pp_datacenter\\\":\\\"mtv\\\",\\\"pp_environment\\\":\\\"qa\\\",\\\"pp_product\\\":\\\"test\\\",\\\"pp_role\\\":\\\"rabbit_mq\\\"},\\\"hostname\\\":\\\"host-name-01\\\"},\\\"environment\\\":\\\"tier2\\\"},{\\\"certname\\\":\\\"puppet.upguard.org\\\",\\\"environment\\\":\\\"production\\\",\\\"name\\\":\\\"virtual\\\",\\\"value\\\":\\\"#{TEST_OS_VIRT_PLATFORM}\\\"},{\\\"certname\\\":\\\"puppet.upguard.org\\\",\\\"environment\\\":\\\"production\\\",\\\"name\\\":\\\"operatingsystemmajrelease\\\",\\\"value\\\":\\\"#{TEST_OS_MAJOR_RELEASE}\\\"},{\\\"certname\\\":\\\"puppet.upguard.org\\\",\\\"environment\\\":\\\"production\\\",\\\"name\\\":\\\"operatingsystem\\\",\\\"value\\\":\\\"#{TEST_OS}\\\"}]\"\n else\n response = `curl -X GET #{PUPPETDB_URL}/pdb/query/v4/nodes/#{node_ip_hostname}/facts -d 'query=[\"or\", [\"=\",\"name\",\"trusted\"], [\"=\",\"name\",\"virtual\"], [\"=\",\"name\",\"operatingsystem\"], [\"=\",\"name\",\"operatingsystemmajrelease\"]]' --tlsv1 --cacert /etc/puppetlabs/puppet/ssl/certs/ca.pem --cert /etc/puppetlabs/puppet/ssl/certs/#{COMPILE_MASTER_PEM} --key /etc/puppetlabs/puppet/ssl/private_keys/#{COMPILE_MASTER_PEM}`\n Puppet.info(\"#{log_prefix} trusted facts for #{node_ip_hostname} is: response=#{response}\")\n end\n\n if response.nil?\n return nil\n end\n facts = JSON.load(response)\n if !facts.is_a?(Array) && !facts.any?\n return nil\n end\n facts.each do |fact|\n keyed_facts[fact['name']] = fact\n end\n keyed_facts\n end", "def update!(**args)\n @dns_search_domains = args[:dns_search_domains] if args.key?(:dns_search_domains)\n @dns_servers = args[:dns_servers] if args.key?(:dns_servers)\n @ntp_servers = args[:ntp_servers] if args.key?(:ntp_servers)\n end", "def health_checks\n SERVICES.each do |service_name, service_info|\n puts \"Health Checking this service URL: #{service_info[:health_check_url]}\"\n response = RestClient::Request.execute(\n method: :get,\n url: service_info[:health_check_url]\n )\n puts JSON.parse(response)\n end\n end", "def facts_for_node(certnames)\n return {} if certnames.empty? || certnames.nil?\n\n certnames.uniq!\n name_query = certnames.map { |c| [\"=\", \"certname\", c] }\n name_query.insert(0, \"or\")\n\n @logger.debug(\"Querying certnames\")\n result = make_query(name_query, 'inventory')\n\n result&.each_with_object({}) do |node, coll|\n coll[node['certname']] = node['facts']\n end\n end", "def hash\n [host_list, total_matching, total_returned].hash\n end", "def update_job_servers\n # Check if it's been > TIME_BETWEEN_CHECKS or we have no good servers\n return unless time_to_check_connections || @job_servers.empty?\n\n logger.debug \"Found #{@bad_servers.size} zombie connections, checking pulse.\"\n @servers_mutex.synchronize do\n @bad_servers.each do |connection|\n begin\n message = \"Testing server #{connection}...\"\n if connection.is_healthy?\n logger.debug \"#{message} Connection is healthy, putting back into service\"\n activate_connection(connection)\n else\n logger.debug \"#{message} Still down.\"\n end\n end\n end\n end\n\n # Sleep for a few to allow a chance for the world to become sane\n if @job_servers.empty?\n logger.warn \"No job servers available, sleeping for #{SLEEP_TIME} seconds\"\n sleep(SLEEP_TIME)\n end\n\n @last_check_time = Time.now\n end", "def all(domain,username,password,dc_ip)\n\n enum_dom_users(domain,username,password,dc_ip)\n\n enum_dom_groups(domain,username,password,dc_ip)\n\n groupArr = [\"Domain Admins\",\"Domain Computers\",\"Enterprise Admins\",\"Administrators\"]\n\n groupArr.each {|group| enum_group_membership(domain,username,password,dc_ip,group)}\n\n priv_groups(domain,username,password,dc_ip)\n\n grab_attr(domain,username,password,dc_ip)\nend", "def update_nodes\n mongo_driver = Kymera::MongoDriver.new(address, port, database, 'nodes')\n @registered_nodes = mongo_driver.get_collection('nodes')\n end", "def update_status\n @servers.each do |type|\n begin\n @server_status.send(\"#{type}_reinitialize\") # re-ping the server\n @status[type] = @server_status.send(\"#{type}_all_info\")\n\n # All sorts of invalid input can potentially cause an error. Whatever it is, just make sure we return a valid object.\n rescue Exception => e\n warn \"[#{Time.now}] #{e.inspect}\"\n e.backtrace.each do |msg|\n warn \"[#{Time.now}] #{msg}\"\n end\n @status[type] = {}\n end\n end\n end", "def parse_dns(raw)\n # Filtering Lines with Comments and Empty Lines\n dns_filter = raw.select { |x| x[0] != \"#\" && x != \"\\n\" }.map {|x| x.split(\", \")}\n\n # Building the Hash\n dns_hash = {}\n dns_filter.each do |x| \n dns_hash[x[1]] = {\n :type => x[0],\n :target => x[2]\n }\n end\n \n dns_hash\nend", "def checkHosts(layer)\n @host_facts.each do |f|\n # each host has a list of facts\n f[1].each do |l|\n if l['deploop_category'] == layer\n up = @mchandler.ifHostUp f[0]\n if @opt.verbose\n puts \"checking host #{f[0]} is up: \" \n puts up\n end\n if !up\n msg = \"ERROR: host \\'#{f[0]}\\' is unreachable. Aboring.\"\n @outputHandler.msgError msg\n end\n deplUp = @mchandler.checkIfDeploopHost f[0]\n if @opt.verbose\n puts \"checking Deploop enabled host #{f[0]}: \" \n puts deplUp\n end\n if !deplUp\n msg = \"ERROR: host \\'#{f[0]}\\' is not Deploop enabled, fix this. Aborting.\"\n @outputHandler.msgError msg\n end\n end\n end\n\n end # @host_facts.each\n msg = \"The layer \\'#{layer}\\' has all host Deploop enabled\"\n @outputHandler.msgOutput msg\n end", "def parse_dns(raw)\n # Filtering Lines with Comments and Empty Lines\n dns_filter = raw.select {|x| x[0]!= \"#\" && x != \"\\n\" }\n\n # Creating a List with 3 Columns\n dns_filter_list = []\n dns_filter.each {|x| dns_filter_list.push(x.split(\", \"))}\n\n # Creating the List each DNS for Hash\n record_type_list = []\n source_list = []\n destination_list = []\n\n dns_filter_list.each do |x|\n record_type_list.push(x[0])\n source_list.push(x[1])\n destination_list.push(x[2])\n end\n\n # Building the Hash\n dns_hash = {\n \"RECORDTYPE\".to_sym => record_type_list,\n \"SOURCE\".to_sym => source_list,\n \"DESTINATION\".to_sym => destination_list,\n }\n return dns_hash\nend", "def get_prim_uniq_sites\n\t\tputs \"Retrieve and prime unique sites in the site store. \" if @verbose\n\t\thost_tracker=Wmap::HostTracker.instance\n\t\thost_tracker.data_dir=@data_dir\n\t\tprimary_host_tracker=Wmap::HostTracker::PrimaryHost.instance\n\t\tprimary_host_tracker.data_dir=@data_dir\n\t\tprimary_host_tracker.hosts_file = primary_host_tracker.data_dir + \"/\" + \"prime_hosts\"\n\t\tprimary_host_tracker.known_hosts=primary_host_tracker.load_known_hosts_from_file(@hosts_file)\n\t\t# Step 1. Retrieve the unique site list first\n\t\tsites=get_uniq_sites\n\t\tprim_uniq_sites=Array.new\n\t\t# Step 2. Iterate on the unique site list, spit out the site in the primary host format one at a time\n\t\tsites.map do |site|\n\t\t\tputs \"Work on priming unique site: #{site}\" if @verbose\n\t\t\thost=url_2_host(site)\n\t\t\t# case#1, for the IP only site, do nothing (presuming 'refresh_ip_sites' or 'refresh_all' method already take care of the potential discrepancy here).\n\t\t\tif is_ip?(host)\n\t\t\t\tprim_uniq_sites.push(site)\n\t\t\t\tnext\n\t\t\tend\n\t\t\tip=@known_sites[site]['ip']\n\t\t\t# case#2, for site with an unique IP, do nothing\n\t\t\tputs \"Local hosts table entry count for #{ip}: #{host_tracker.alias[ip]}\" if @verbose\n\t\t\tif host_tracker.alias[ip] == 1\n\t\t\t\tprim_uniq_sites.push(site)\n\t\t\t\tnext\n\t\t\tend\n\t\t\t# case#3, case of multiple IPs for A DNS record, where the site IP may have 0 alias count, do nothing\n\t\t\tif host_tracker.alias[ip] == nil\n\t\t\t\tprim_uniq_sites.push(site)\n\t\t\t\tnext\n\t\t\tend\n\t\t\t# case#4, for the site has a duplicate IP with others, we try to determine which one is the primary site\n\t\t\t# raise \"Error: inconsistency detected on record: #{site}. Please run the following shell command to refresh it first: \\n\\srefresh #{site}\" if tracker1.alias[ip].nil?\n\t\t\tif ( primary_host_tracker.known_hosts.key?(ip) and (host_tracker.alias[ip] > 1) )\n\t\t\t\tnew_host=primary_host_tracker.prime(host)\n\t\t\t\tputs \"Host: #{host}, New host:#{new_host}\" if @verbose\n\t\t\t\tunless host==new_host\n\t\t\t\t\tnew_site=site.sub(host,new_host)\n\t\t\t\t\traise \"Site not found in the site tracking data repository: #{new_site}. You may need to add it into the site store first. Execute the following shell command before trying again: \\n\\twadd #{new_site}\\n\" unless @known_sites.key?(new_site)\n\t\t\t\t\tnew_ip=@known_sites[new_site]['ip']\n\t\t\t\t\tif new_ip==ip\t\t# consistency check\n\t\t\t\t\t\tsite=new_site\n\t\t\t\t\telse\n\t\t\t\t\t\t# TBD - case of multiple IPs for A DNS record\n\t\t\t\t\t\t#raise \"Inconsistency found on prime host entrance: #{new_ip}, #{ip}; #{new_site}, #{site}. Please refresh your entries by running the following shell command: \\n\\s refresh #{new_site}\"\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\t\tprim_uniq_sites.push(site)\n\t\tend\n\t\tprimary_host_tracker=nil\n\t\thost_tracker=nil\n\t\treturn prim_uniq_sites\n\t#rescue => ee\n\t#\tputs \"Exception on method #{__method__}: #{ee}\"\n\tend", "def health_check\n ret = {}\n unready = []\n # We are ignoring the ceph nodes, as they should already be in crowbar_upgrade state\n NodeObject.find(\"NOT roles:ceph-*\").each do |node|\n unready << node.name unless node.ready?\n end\n ret[:nodes_not_ready] = unready unless unready.empty?\n failed = Proposal.all.select { |p| p.active? && p.failed? }\n ret[:failed_proposals] = failed.map(&:display_name) unless failed.empty?\n ret\n end", "def wanted_records(records, domains)\n _records = records.select { |m| domains.include?(m[1]) }\n _records = _records.each_cons(2).select { |a, b| a.last == b.last }\n _records = _records.group_by(&:last).keys.map do |v|\n { record_id: v.first, ip_address: v.last }\n end\n _records\n end", "def deploy_dns host_instance\n # Before we deploy puppet, we need to (possibly generate) and read out the nsupdate key(s)\n domain_list = [@deployment.dns.app_domain]\n if @deployment.dns.register_components?\n domain_list << @deployment.dns.component_domain\n end\n domain_list.each do |dns_domain|\n print \"* Checking for #{dns_domain} DNS key... \"\n key_filepath = \"/var/named/K#{dns_domain}*.key\"\n key_check = host_instance.exec_on_host!(\"ls #{key_filepath}\")\n if key_check[:exit_code] == 0\n puts 'found.'\n else\n # No key; build one.\n puts 'not found; attempting to generate.'\n key_gen = host_instance.exec_on_host!(\"dnssec-keygen -a HMAC-MD5 -b 512 -n USER -r /dev/urandom -K /var/named #{dns_domain}\")\n if key_gen[:exit_code] == 0\n puts '* Key generation successful.'\n else\n display_error_info(host_instance, key_gen, 'Could not generate a DNS key.')\n return false\n end\n end\n\n # Copy the public key info to the config file.\n key_text = host_instance.exec_on_host!(\"cat #{key_filepath}\")\n if key_text[:exit_code] != 0 or key_text[:stdout].nil? or key_text[:stdout] == ''\n display_error_info(host_instance, key_text, \"Could not read DNS key data from #{key_filepath}.\")\n return false\n end\n\n # Format the public key correctly.\n key_vals = key_text[:stdout].strip.split(' ')\n nsupdate_key = \"#{key_vals[6]}#{key_vals[7]}\"\n if dns_domain == @deployment.dns.app_domain\n @puppet_global_config['bind_key'] = nsupdate_key\n else\n @puppet_global_config['dns_infrastructure_key'] = nsupdate_key\n end\n end\n\n # Make sure BIND is enabled.\n dns_restart = host_instance.exec_on_host!('service named restart')\n if dns_restart[:exit_code] == 0\n puts '* BIND DNS enabled.'\n else\n display_error_info(host_instance, dns_restart, \"Could not enable BIND DNS on #{host_instance.host}.\")\n return false\n end\n return true\nend", "def replace_nameservers(hostname, new_attributes, domains: [])\n transaction do\n domain_scope = domains.dup\n domain_list = []\n failed_list = []\n\n nameservers.where(hostname: hostname).find_each do |origin|\n idn = origin.domain.name\n puny = origin.domain.name_puny\n next unless domains.include?(idn) || domains.include?(puny) || domains.empty?\n\n if domain_not_updatable?(hostname: new_attributes[:hostname], domain: origin.domain)\n failed_list << idn\n next\n end\n\n new_nameserver = Nameserver.new\n new_nameserver.domain = origin.domain\n new_nameserver.attributes = new_attributes\n new_nameserver.save!\n\n domain_scope.delete_if { |i| i == idn || i == puny }\n domain_list << idn\n\n origin.destroy!\n end\n\n self.domains.where(name: domain_list).find_each(&:update_whois_record) if domain_list.any?\n [domain_list.uniq.sort, (domain_scope + failed_list).uniq.sort]\n end\n end", "def parse_dns(dns_raw)\n dns_records = {}\n dns_raw.each do |rec|\n rec=rec.chomp\n unless rec[0] == \"#\" || rec.empty?\n records = rec.split(/,/)\n records = records.map {|recd| recd.strip()}\n unless dns_records.has_key?(records[0])\n dns_records.store(records[0],[[records[1],records[2]]])\n else\n dns_records[records[0]].push([records[1],records[2]])\n end\n end\n end\n return dns_records\nend", "def check_passenger_monitoring\n passenger_plugins = [\n {\"plugin_name\"=>\"passenger\", \"plugin_type\"=>\"passenger_instances\",\"field\"=>\"value\"},\n {\"plugin_name\"=>\"passenger\", \"plugin_type\"=>\"passenger_processes\",\"field\"=>\"max\"},\n {\"plugin_name\"=>\"passenger\", \"plugin_type\"=>\"passenger_queued\",\"field\"=>\"value\"},\n {\"plugin_name\"=>\"passenger\", \"plugin_type\"=>\"passenger_requests\",\"field\"=>\"value\"}\n ]\n sleep 60 # wait for some data to be available\n @servers.each do |server|\n unless server.multicloud\n#passenger commands to generate data for collectd to return\n# for ii in 1...100\n# # how do we force there to be data?? For now just check that the graph exists - cause the\n# # bug was missing graphs.\n# end\n passenger_plugins.each do |plugin|\n monitor = obj_behavior(server, :get_sketchy_data, {'start' => -60,\n 'end' => -20,\n 'plugin_name' => plugin['plugin_name'],\n 'plugin_type' => plugin['plugin_type']})\n value = monitor['data'][\"#{plugin['field']}\"]\n puts \"Checking #{plugin['plugin_name']}-#{plugin['plugin_type']}: value #{value}\"\n raise \"No #{plugin['plugin_name']}-#{plugin['plugin_type']} data\" unless value.length > 0\n# # Need to check for that there is at least one non 0 value returned.\n# for nn in 0...value.length\n# if value[nn] > 0\n# break\n# end\n# end\n# raise \"No #{plugin['plugin_name']}-#{plugin['plugin_type']} time\" unless nn < value.length\n puts \"Monitoring is OK for #{plugin['plugin_name']}-#{plugin['plugin_type']}\"\n end\n end\n end\n end", "def parse_dns(raw)\n # Filtering Lines with Comments and Empty Lines\n dns_filter = raw.select { |x| x[0] != \"#\" && x != \"\\n\" }\n\n # Creating a List with 3 Columns\n dns_filter_list = []\n dns_filter.each { |x| dns_filter_list.push(x.split(\", \")) }\n\n # Creating the List each DNS for Hash\n record_type_list = []\n source_list = []\n destination_list = []\n\n dns_filter_list.each do |x|\n record_type_list.push(x[0])\n source_list.push(x[1])\n destination_list.push(x[2])\n end\n\n # Building the Hash\n dns_hash = {\n \"RECORDTYPE\".to_sym => record_type_list,\n \"SOURCE\".to_sym => source_list,\n \"DESTINATION\".to_sym => destination_list,\n }\n return dns_hash\nend", "def updateTable(cmd)\n # puts \"TRYING TO UPDATE TABLE\"\n sentFrom = cmd.shift\n curr_edge_time = nil\n new_edge_time = nil\n new_edge_cost = nil\n node = $node_info.new\n arr = nil\n hops = nil\n lis = nil\n loop{\n new_edge_time = cmd[3].to_i\n new_edge_cost = cmd[2].to_i\n\n $lock.synchronize{\n curr_edge_time = $network.get_time(cmd[0],cmd[1])\n }\n\n if curr_edge_time == 0\n #name of srcNode,name of destNode,cost of edge,time of Edge\n $lock.synchronize{\n $network.undir_connection(cmd[0], cmd[1], new_edge_time, new_edge_cost)\n }\n if ($rt.has_key?(cmd[0]) != true)\n node.src = $hostname\n node.dst = cmd[0]\n node.cost = nil #do dijsktras\n node.nexthop = nil #do dijsktras\n $lock.synchronize{\n $rt[cmd[0]] = node\n }\n end \n if($rt.has_key?(cmd[1]) != true)\n node.src = $hostname\n node.dst = cmd[1]\n node.cost = nil #do dijsktras\n node.nexthop = nil #do dijsktras\n $lock.synchronize{\n $rt[cmd[1]] = node\n }\n \n end\n\n elsif curr_edge_time < new_edge_time\n $lock.synchronize{\n $network.update_cost(cmd[0], cmd[1], new_edge_time, new_edge_cost)\n }\n end \n \n cmd.shift(4)\n break if cmd.length < 4\n \n # puts \"ABOUT TO RUN DIJKSTRAS\"\n $lock.synchronize{\n arr = $network.dijkstra($hostname) \n }\n $full_path = arr[0]\n #puts \"THIS IS THE RETURN OF DIJKSTRAS #{arr}\" \n $lock.synchronize{\n $rt.each_key {|key|\n update = $node_info.new \n # puts \"Key IS #{key}\"\n hops = arr[0]\n lis = arr[1]\n prevs = hops[key]\n update.src = $hostname\n update.dst = key\n update.cost = lis[key]\n update.nexthop = prevs[1]\n $rt[key] = update\n # puts \"ROUTING TABLE #{$rt}\"\n }\n }\n }\nend", "def known_host_hash?(hostlist, entries); end", "def update\n\t\trespond_to do |format|\n\t\t\tif @domain.update_attributes(:hostname => params[:hostname])\n\t\t\t\tformat.json { head :no_content, status: :ok }\n\t\t\telse\n\t\t\t\tformat.json { render json: @domain.errors, status: :unprocessable_entity }\n\t\t\tend\n\t\tend\n\n\t\t# Fetch the hostname IP address and update the record in a new thread\n\t\tt1=Thread.new{fetch_origin_ip()}\n\t\tt1.join\n\tend", "def auto_discover_nodes!\n @servers = execute(:all_nodes)\n end", "def nodetool_status()\n out = `/opt/cassandra/bin/nodetool status`\n raise 'nodetool status failed' if $? != 0\n rows = out.split(\"\\n\")\n hash = {}\n dc_exp = /Datacenter: (.*)/\n #vnode\n #-- Address Load Tokens Owns Host ID Rack\n #non-vnode\n #-- Address Load Owns Host ID Token Rack\n #node_ex = /^(?<status>[UD\\?][NLJM]) +(?<address>(?:[0-9]{1,3}\\.){3}[0-9]{1,3}) +(?<load>(\\d+\\.?\\d* (TB|GB|MB|KB|bytes))|\\?) +(?<tokens>\\d+) +(?<owns>(\\d+\\.?\\d*%|\\?)) +(?<hostid>[a-z0-9\\-]+) +(?<rack>.*)$/\n node_ex = /^([UD\\?][NLJM]) +((?:[0-9]{1,3}\\.){3}[0-9]{1,3}) +((?:\\d+\\.?\\d* (?:TB|GB|MB|KB|bytes))|\\?) +(\\d+) +(\\d+\\.?\\d*%|\\?) +([a-z0-9\\-]+) +(.*)$/\n datacenter = nil\n rows.each do |row|\n m = dc_exp.match(row)\n if m\n datacenter = m[1]\n next\n end\n m = node_ex.match(row)\n next if m == nil\n node = {'datacenter' => datacenter}\n hash[m[2]] = node\n i = 0\n %w(status address load tokens owns hostid rack).each do |name|\n node[name] = m[i += 1]\n end\n # m.names.each do |name|\n # node[name] = m[name]\n # end\n end\n return hash\n end", "def check_flags(hosts)\n flags = {}\n hosts.keys.each do |host|\n flags[host] = {}\n\n errors = check_sources(hosts[host])\n next if errors == ''\n flags[host]['text'] = errors\n end\n flags\n end", "def parse_old\n\t@services=Hash.new\n\tf_site=File.open(ARGV[0],'r')\n\tf_site.each do |line|\n\t\tsite=line.chomp.strip\n\t\tsite=Wmap::HostTracker.instance.url_2_site(site)\n\t\tabort \"Error on processing site: #{site}\" if site.nil?\n\t\thost=Wmap::HostTracker.instance.url_2_host(site)\n\t\tabort \"Error on processing host: #{host}\" if host.nil?\n\t\tip=Wmap::HostTracker.instance.local_host_2_ip(host)\n\t\tip=Wmap::HostTracker.instance.host_2_ip(host) if ip.nil?\n\t\tnext if ip.nil?\n\t\tnext unless Wmap::HostTracker.instance.is_ip?(ip)\n\t\tport=Wmap::HostTracker.instance.url_2_port(site)\n\t\tkey=ip+\":\"\n\t\tkey+=port.to_s\n\t\t@services[key]=true unless @services.key?(key)\n\tend\n\tf_site.close\nend", "def update_node_search_domains(node_search_domains_properties, opts = {})\n data, _status_code, _headers = update_node_search_domains_with_http_info(node_search_domains_properties, opts)\n return data\n end", "def common_nodes(remote, opts={:heads => nil, :force => nil, :base => nil})\n # variable prep!\n node_map = changelog.node_map\n search = []\n unknown = []\n fetch = {}\n seen = {}\n seen_branch = {}\n opts[:base] ||= {}\n opts[:heads] ||= remote.heads\n \n # if we've got nothing...\n if changelog.tip == NULL_ID\n opts[:base][NULL_ID] = true # 1 is stored in the Python\n \n return [NULL_ID], [NULL_ID], opts[:heads].dup unless opts[:heads] == [NULL_ID]\n return [NULL_ID], [], [] # if we didn't trip ^, we're returning this\n end\n \n # assume we're closer to the tip than the root\n # and start by examining heads\n UI::status 'searching for changes'\n \n opts[:heads].each do |head|\n if !node_map.include?(head)\n unknown << head\n else\n opts[:base][head] = true # 1 is stored in the Python\n end\n end\n \n opts[:heads] = unknown # the ol' switcheroo\n return opts[:base].keys, [], [] if unknown.empty? # BAIL\n \n # make a hash with keys of unknown\n requests = Hash.with_keys unknown\n count = 0\n \n # Search through the remote branches\n # a branch here is a linear part of history, with 4 (four)\n # parts:\n #\n # head, root, first parent, second parent\n # (a branch always has two parents (or none) by definition)\n #\n # Here's where we start using the Hashes instead of Arrays\n # trick. Keep an eye out for opts[:base] and opts[:heads]!\n unknown = remote.branches(*unknown)\n until unknown.empty?\n r = []\n \n while node = unknown.shift\n next if seen.include?(node[0])\n UI::debug \"examining #{short node[0]}:#{short node[1]}\"\n \n if node[0] == NULL_ID\n # Do nothing...\n elsif seen_branch.include? node\n UI::debug 'branch already found'\n next\n elsif node_map.include? node[1]\n UI::debug \"found incomplete branch #{short node[0]}:#{short node[1]}\"\n search << node[0..1]\n seen_branch[node] = true # 1 in the python\n else\n unless seen.include?(node[1]) || fetch.include?(node[1])\n if node_map.include?(node[2]) and node_map.include?(node[3])\n UI::debug \"found new changset #{short node[1]}\"\n fetch[node[1]] = true # 1 in the python\n end # end if\n \n node[2..3].each do |p|\n opts[:base][p] = true if node_map.include? p\n end\n end # end unless\n \n node[2..3].each do |p|\n unless requests.include?(p) || node_map.include?(p)\n r << p\n requests[p] = true # 1 in the python\n end # end unless\n end # end each\n end # end if\n \n seen[node[0]] = true # 1 in the python\n end # end while\n \n unless r.empty?\n count += 1\n \n UI::debug \"request #{count}: #{r.map{|i| short i }}\"\n \n (0 .. (r.size-1)).step(10) do |p|\n remote.branches(r[p..(p+9)]).each do |b|\n UI::debug \"received #{short b[0]}:#{short b[1]}\"\n unknown << b\n end\n end\n end # end unless\n end # end until\n \n # sorry for the ambiguous variable names\n # the python doesn't name them either, which\n # means I have no clue what these are\n find_proc = proc do |item1, item2|\n fetch[item1] = true\n opts[:base][item2] = true\n end\n \n # do a binary search on the branches we found\n search, new_count = *binary_search(:find => search,\n :repo => remote,\n :node_map => node_map,\n :on_find => find_proc)\n count += new_count # keep keeping track of the total\n \n # sanity check, because this method is sooooo fucking long\n fetch.keys.each do |f|\n if node_map.include? f\n raise RepoError.new(\"already have changeset #{short f[0..3]}\")\n end\n end\n \n if opts[:base].keys == [NULL_ID]\n if opts[:force]\n UI::warn 'repository is unrelated'\n else\n raise RepoError.new('repository is unrelated')\n end\n end\n \n UI::debug \"found new changesets starting at #{fetch.keys.map{|f| short f }.join ' '}\"\n UI::debug \"#{count} total queries\"\n \n # on with the show!\n [opts[:base].keys, fetch.keys, opts[:heads]]\n end", "def domain_update(args)\n response = send_request(domain_update_xml(args))\n\n get_result(response)\n end", "def update_subdomains\n backup_subdomains\n parse_subdomains\n end", "def call\n result = {}\n\n # loop on local remotes\n @local_campaigns.each do |local_campaign|\n discrepancies = []\n\n # find remote campaign using external reference\n remote_campaign = remote_campaign_by_local_reference(local_campaign.external_reference)\n\n if remote_campaign\n DISCREPANCY_ATTRIBUTES.each do |local_attr, remote_attr|\n if local_campaign[local_attr] != remote_campaign[remote_attr]\n discrepancies << discrepancy_hash(remote_attr, remote_campaign[remote_attr], local_campaign[local_attr])\n end\n end\n else\n @missing_remote_campaigns << new_campaign_hash(local_campaign)\n end\n\n unless discrepancies.empty?\n @changed_campaigns << changed_campaign_hash(local_campaign.external_reference, discrepancies)\n end\n end\n\n result[:changed_campaigns] = @changed_campaigns unless @changed_campaigns.empty?\n result[:missing_remote_campaigns] = @missing_remote_campaigns unless @missing_remote_campaigns.empty?\n\n result\n end", "def parse_old\n\thost_tracker=Wmap::HostTracker.instance\n\t@services=Hash.new\n\tf_site=File.open(ARGV[0],'r')\n\tf_site.each do |line|\n\t\tsite=line.chomp.strip\n\t\tsite=host_tracker.url_2_site(site)\n\t\tabort \"Error on processing site: #{site}\" if site.nil?\n\t\thost=host_tracker.url_2_host(site)\n\t\tabort \"Error on processing host: #{host}\" if host.nil?\n\t\tip=host_tracker.local_host_2_ip(host)\n\t\tip=host_tracker.host_2_ip(host) if ip.nil?\n\t\tnext if ip.nil?\n\t\tnext unless host_tracker.is_ip?(ip)\n\t\tport=host_tracker.url_2_port(site)\n\t\tkey=ip+\":\"\n\t\tkey+=port.to_s\n\t\t@services[key]=true unless @services.key?(key)\n\tend\n\tf_site.close\n\thost_tracker=nil\nend", "def refresh_uniq_sites\n\t\tputs \"Refresh unique site entries in the site store. \" if @verbose\n\t\tchanges=Hash.new\n\t\tsites=get_uniq_sites\n\t\tif sites.size > 0\n\t\t\tchanges=bulk_refresh(sites)\n\t\telse\n\t\t\tputs \"Error: no entry is refreshed. Please check your site store and try again.\"\n\t\tend\n\t\treturn changes\n\trescue => ee\n\t\tputs \"Exception on method #{__method__}: #{ee}\" if @verbose\n\tend", "def query_couchbase_servers\n\n couchbase_servers = Hash.new\n \n r=rightscale_server_collection 'couchbase_cluster_nodes' do\n tags [\"couchbase:cluster_ip=#{cluster_ip}\"]\n secondary_tags [\"server:uuid=*\", \"couchbase:listen_ip=*\"]\n action :nothing\n end\n r.run_action(:load)\n \n node[:server_collection]['couchbase_cluster_nodes'].to_hash.values.each do |tags|\n uuid = RightScale::Utils::Helper.get_tag_value('server:uuid', tags)\n ip = RightScale::Utils::Helper.get_tag_value('couchbase:listen_ip', tags)\n couchbase_servers[uuid] = {}\n couchbase_servers[uuid][:ip] = ip\n end\n \n couchbase_servers\n \n end", "def checkNodes(deployed_containers,nova_ip,quantum_ip,token)\n novaIP = URI.parse(nova_ip)\n nova = Ropenstack::Nova.new(novaIP, token)\n\n quantumIP = URI.parse(quantum_ip)\n quantum = Ropenstack::Quantum.new(quantumIP, token)\n\n deployed_containers.each do |deployed_container|\n logger.info \"Checking nodes for deployed container:\" + deployed_container.id.to_s\n # Check all VMs in the container\n deployed_container.deployed_vms.each do |vm|\n begin\n # Ask openstack for the server details\n server = nova.servers(vm.openstack_id)\n logger.info \"Server \" + vm.openstack_id + \" is up.\"\n rescue\n # If openstack returns an error, delete the vm\n logger.info \"Server \" + vm.openstack_id + \" is down. Deleting from deployed container.\"\n vm.destroy()\n end\n end\n\n # Check all networks in the container\n deployed_container.deployed_networks.each do |network|\n begin\n # Ask openstack for the network details\n net = quantum.networks(network.openstack_id)\n logger.info \"Network \" + network.openstack_id + \" is up.\"\n rescue\n # If openstack returns an error, delete the network\n logger.info \"Network \" + network.openstack_id + \" is down. Deleting from deployed container.\"\n network.destroy()\n end\n end\n\n # Check all routers in the container\n deployed_container.deployed_routers.each do |router|\n begin\n # Ask openstack for the router details\n r = quantum.routers(router.openstack_id)\n logger.info \"Router \" + router.openstack_id + \" is up.\"\n rescue\n # If openstack returns an error, delete the router\n logger.info \"Router \" + router.openstack_id + \" is down. Deleting from deployed container.\"\n router.destroy()\n end\n end\n end\n end" ]
[ "0.675163", "0.59737647", "0.59626085", "0.58956754", "0.57425475", "0.5713824", "0.56919813", "0.5617458", "0.5540912", "0.55262655", "0.5474444", "0.54590106", "0.54089713", "0.53691214", "0.53557086", "0.53376764", "0.53181106", "0.52742827", "0.5227042", "0.5222256", "0.51900923", "0.51652694", "0.5130863", "0.51302844", "0.512649", "0.51199454", "0.51156604", "0.5104164", "0.50755167", "0.5070298", "0.5019904", "0.5014401", "0.49922612", "0.4980031", "0.49574998", "0.4939231", "0.49183133", "0.4890204", "0.4883723", "0.48800507", "0.48672166", "0.4860614", "0.4860614", "0.48567855", "0.48403022", "0.48395556", "0.48383382", "0.48367897", "0.48270208", "0.48246422", "0.4816585", "0.48159578", "0.48080322", "0.48071817", "0.4793177", "0.4793104", "0.47883922", "0.47796252", "0.47613707", "0.4749689", "0.47398195", "0.47301605", "0.47095507", "0.47015205", "0.47007436", "0.46892026", "0.4689026", "0.46884277", "0.46851262", "0.46850613", "0.4682543", "0.46705887", "0.46640602", "0.4659189", "0.46549731", "0.46485987", "0.46485522", "0.46476847", "0.46391046", "0.46375316", "0.46374074", "0.46339238", "0.4629583", "0.46283916", "0.4628288", "0.46268195", "0.4625736", "0.4624579", "0.4624443", "0.46231252", "0.46208444", "0.46117902", "0.4605749", "0.46025214", "0.46004647", "0.45971292", "0.45959598", "0.45918384", "0.45865962", "0.4577508" ]
0.64395267
1
Create lookups for all the names in the specified +nodes+ and return a Hash of node identifiers keyed by the lookup Thread that is fetching the record.
def create_lookups( nodes ) return nodes.each_with_object( {} ) do |(identifier, node), hash| self.log.debug "Creating lookup for node: %p" % [ node ] name = node['name'] or next record_type = node['record_type'] || 'A' record_class = Resolv::DNS::Resource::IN.const_get( record_type ) or raise "Unsupported record type %p!" % [ record_type ] self.log.debug "Looking up %s record for %s (%s)" % [ record_type, name, identifier ] thr = Thread.new do self.resolver.getresources( name, record_class ) end hash[ thr ] = identifier end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_host_keys(_nodes, metadata)\n updated_metadata = {}\n # Get the list of nodes, per hostname (just in case several nodes share the same hostname)\n # Hash<String, Array<String> >\n hostnames = Hash.new { |hash, key| hash[key] = [] }\n metadata.each do |node, node_metadata|\n if node_metadata[:host_ip]\n hostnames[node_metadata[:host_ip]] << node\n elsif node_metadata[:hostname]\n hostnames[node_metadata[:hostname]] << node\n end\n end\n unless hostnames.empty?\n host_keys_for(*hostnames.keys).each do |hostname, ip|\n hostnames[hostname].each do |node|\n updated_metadata[node] = ip\n end\n end\n end\n updated_metadata\n end", "def run( nodes )\n\t\t\tself.log.debug \"Got nodes to check with %p: %p\" % [ self, nodes ]\n\n\t\t\trecords = nodes.each_with_object( {} ) do |(identifier, node), hash|\n\t\t\t\tself.log.debug \"Looking up whois info for %p (%p)\" % [ identifier, node ]\n\t\t\t\thash[ identifier ] = self.client.lookup( node['name'] )\n\t\t\tend\n\n\t\t\treturn records.each_with_object( {} ) do |(identifier, record), hash|\n\t\t\t\tparser = record.parser\n\t\t\t\thash[ identifier ] = self.parse_record( parser, identifier )\n\t\t\tend\n\n\t\tend", "def fetch_nodes(nodes, dns_cache)\n ret = []\n nodes.each_with_index do |item, index|\n ip, port = item\n host = dns_cache.fetch(ip) {\n |missing_ip|\n host = Resolv.getname(missing_ip)\n dns_cache[ip] = host\n host\n }\n name = \"#{host}:#{port}\"\n role = index == 0 ? 'master' : 'slave'\n node = {\n :host => host, :port => port,\n :name => name, :ip => ip,\n :role => role\n }\n ret << node\n end\n ret\n end", "def collect_node_nei_hashes\n @log.info(\"#{__method__.to_s} started[#{self.class.to_s}]\")\n\n node_nei_hash = @redis_connector.fetch_relations\n end", "def uuid_or_hostname_to_uuids(nodes)\n # avoid the Cassandra lookup if all the nodes are already UUIDs\n return nodes unless nodes.reject { |node| Hastur::Util.valid_uuid?(node) }.any?\n\n # node registration is daily, bucket the lookup on day boundary if unspecified\n day_start_ts, day_end_ts = get_start_end :one_day\n\n uuid_lookup = Hastur::Cassandra.lookup_by_key(cass_client, \"host-uuid\", day_start_ts, day_end_ts)\n\n nodes.flatten.map do |maybe_uuid|\n if Hastur::Util.valid_uuid?(maybe_uuid)\n maybe_uuid\n else\n uuid_lookup[maybe_uuid]\n end\n end.compact\n end", "def hostnames(nodes)\n @referenced_nodes ||= ObjectList.new\n nodes = listify(nodes)\n nodes.each_node do |node|\n @referenced_nodes[node.name] ||= node\n end\n return nodes.values.collect {|node| node.domain.name}\n end", "def wait_for_responses( lookups, nodes )\n\t\t\tupdate = {}\n\n\t\t\tuntil lookups.empty?\n\n\t\t\t\tlookups.keys.each do |thr|\n\t\t\t\t\tnext if thr.alive?\n\n\t\t\t\t\tidentifier = lookups.delete( thr )\n\t\t\t\t\tbegin\n\t\t\t\t\t\trecords = thr.value\n\n\t\t\t\t\t\tif !records\n\t\t\t\t\t\t\tupdate[ identifier ] = { error: \"Lookup failed (timeout).\" }\n\t\t\t\t\t\telsif records.empty?\n\t\t\t\t\t\t\tupdate[ identifier ] = { error: \"Lookup failed (no records returned).\" }\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\tnode_data = nodes[ identifier ]\n\t\t\t\t\t\t\tupdate[ identifier ] = self.compare_values( records, node_data )\n\t\t\t\t\t\tend\n\t\t\t\t\trescue SystemCallError => err\n\t\t\t\t\t\tmsg = \"%p: %s\" % [ err.class, err.message ]\n\t\t\t\t\t\tself.log.error \"%s while looking up %s\" % [ msg, identifier ]\n\t\t\t\t\t\tupdate[ identifier ] = { error: msg }\n\t\t\t\t\tend\n\t\t\t\tend\n\n\t\t\tend\n\n\t\t\treturn update\n\t\tend", "def run( nodes )\n\t\t\tself.log.debug \"Got %d nodes to check with %p\" % [ nodes.length, self ]\n\t\t\tlookups = self.create_lookups( nodes )\n\t\t\treturn self.wait_for_responses( lookups, nodes )\n\t\tend", "def node_hash(node_id)\n \n end", "def hosts_file(nodes=nil)\n if nodes.nil?\n if @referenced_nodes && @referenced_nodes.any?\n nodes = @referenced_nodes\n nodes = nodes.merge(nodes_like_me[:services => 'mx']) # all nodes always need to communicate with mx nodes.\n end\n end\n return {} unless nodes\n hosts = {}\n my_location = @node['location'] ? @node['location']['name'] : nil\n nodes.each_node do |node|\n hosts[node.name] = {'ip_address' => node.ip_address, 'domain_internal' => node.domain.internal, 'domain_full' => node.domain.full}\n node_location = node['location'] ? node['location']['name'] : nil\n if my_location == node_location\n if facts = @node.manager.facts[node.name]\n if facts['ec2_public_ipv4']\n hosts[node.name]['ip_address'] = facts['ec2_public_ipv4']\n end\n end\n end\n host_pub_key = Util::read_file([:node_ssh_pub_key,node.name])\n if host_pub_key\n hosts[node.name]['host_pub_key'] = host_pub_key\n end\n end\n hosts\n end", "def hosts_file(nodes=nil)\n if nodes.nil?\n if @referenced_nodes && @referenced_nodes.any?\n nodes = @referenced_nodes\n nodes = nodes.merge(nodes_like_me[:services => 'mx']) # all nodes always need to communicate with mx nodes.\n end\n end\n return {} unless nodes\n hosts = {}\n my_location = @node['location'] ? @node['location']['name'] : nil\n nodes.each_node do |node|\n hosts[node.name] = {'ip_address' => node.ip_address, 'domain_internal' => node.domain.internal, 'domain_full' => node.domain.full}\n node_location = node['location'] ? node['location']['name'] : nil\n if my_location == node_location\n if facts = @node.manager.facts[node.name]\n if facts['ec2_public_ipv4']\n hosts[node.name]['ip_address'] = facts['ec2_public_ipv4']\n end\n end\n end\n host_pub_key = Util::read_file([:node_ssh_pub_key,node.name])\n if host_pub_key\n hosts[node.name]['host_pub_key'] = host_pub_key\n end\n end\n hosts\n end", "def facts_for_node(certnames)\n return {} if certnames.empty? || certnames.nil?\n\n certnames.uniq!\n name_query = certnames.map { |c| [\"=\", \"certname\", c] }\n name_query.insert(0, \"or\")\n\n @logger.debug(\"Querying certnames\")\n result = make_query(name_query, 'inventory')\n\n result&.each_with_object({}) do |node, coll|\n coll[node['certname']] = node['facts']\n end\n end", "def run( nodes )\n\t\t\tresults = {}\n\t\t\thydra = Typhoeus::Hydra.new( self.runner_settings )\n\n\t\t\tnodes.each do |identifier, node|\n\t\t\t\tself.log.debug \"Making request for node %s\" % [ identifier ]\n\t\t\t\trequest = self.request_for_node( node )\n\t\t\t\trequest.on_complete do |response|\n\t\t\t\t\tself.log.debug \"Handling response for %s\" % [ identifier ]\n\t\t\t\t\tresults[ identifier ] =\n\t\t\t\t\t\tself.make_response_results( response, node )\n\t\t\t\tend\n\t\t\t\thydra.queue( request )\n\t\t\tend\n\n\t\t\thydra.run\n\n\t\t\treturn results\n\t\tend", "def recipes(nodes)\n nodes.inject({}) do |memo, node|\n node.recipes.each do |recipe|\n memo[recipe.to_s] ||= 0\n memo[recipe.to_s] += 1\n end\n\n memo\n end\n end", "def make_nodes\n nodes = {}\n @files.each_with_index do |file, ind|\n @ranks.each do |rank|\n symbol = \"#{file}#{rank}\".to_sym\n nodes[symbol] = Node.new([rank - 1, ind])\n end\n end\n nodes\n end", "def find_nodes_to_verify(references)\n nodes = {}\n\n references.each do |uri, _digest_value|\n uri = uri.sub(/^#/, '')\n node = find_node_by_uri(uri)\n\n nodes[uri] = calculate_digest(node)\n end\n\n nodes\n end", "def nodes_hash(nodes)\n nodes.each_with_object({}) do |node, hash|\n model = node.get(:model)\n attributes = model.to_hash\n\n attributes.merge!(node.properties.except(:model, :cc_in, :cc_out))\n\n attributes.each do |key, value|\n attributes[key] = value.to_hash if value.is_a?(ValueObject)\n end\n\n if model.max_demand\n attributes[:max_demand] = model.max_demand\n elsif !model.queries.key?(:max_demand)\n # Keep the Refinery value if it was set by a query.\n attributes.delete(:max_demand)\n end\n\n attributes[:demand] = node.demand.to_f\n attributes[:input] = slots_hash(node.slots.in)\n attributes[:output] = slots_hash(node.slots.out)\n\n attributes.delete(:queries)\n\n hash[node.key] = attributes\n end\n end", "def source_nodename_map\n map = {}\n @prometheus.query('max_over_time(node_uname_info[1d])').each do |result|\n source = result['metric']['instance']\n nodename = result['metric']['nodename'].split('.', 2)[0]\n log.info(\"[node_exporter] instance: '#{source}', nodename: '#{nodename}'\")\n map[source] = nodename\n end\n log.warn('Unable to query the node_exporter intances from Prometheus') \\\n if map.empty?\n map\n end", "def facts_for_node(certnames)\n return {} if certnames.empty? || certnames.nil?\n\n certnames.uniq!\n name_query = certnames.map { |c| [\"=\", \"certname\", c] }\n name_query.insert(0, \"or\")\n result = make_query(name_query, 'inventory')\n\n result&.each_with_object({}) do |node, coll|\n coll[node['certname']] = node['facts']\n end\n end", "def nodes(attrs={})\n if attrs.is_a?(Hash) and attrs.has_key?(:type)\n attrs[:type].constantize.find(session, \"label_node_ids LIKE '%#{id}%'\")\n else\n Ecore::Node.find(session, \"label_node_ids LIKE '%#{id}%'\")\n #.inject(NodeArray.new) do |arr,n|\n # n.session = session\n # arr << n\n #end\n end\n end", "def uuids_for_hostnames(cass_client, hostnames, start_ts, end_ts)\n lookup = Hastur::Cassandra.lookup_by_key(cass_client, \"host-uuid\", start_ts, end_ts)\n\n # just rely on the lookup table and sink most of the logic there in a scheduled job\n out = {}\n hostnames.each do |host|\n out[host] = lookup[host]\n end\n\n out\n end", "def nodes # :nodoc:\n return @nodes if defined?(@nodes) \n node_ids = (1..Configuration.numbers['total_nodes'])\n @nodes = node_ids.map do |id| \n {\n id: id,\n host: \"node_#{id}_#{Worker::VERSION}\"\n }.to_struct\n end\n end", "def network_names_for_uuids(cass_client, uuids, start_ts, end_ts)\n cnames = Hastur::Cassandra.lookup_by_key cass_client, :cnames, start_ts, end_ts, :count => 1_000_000\n ohais = Hastur::Cassandra.get cass_client, uuids, \"info_ohai\", start_ts, end_ts, :count => 1\n regs = Hastur::Cassandra.get cass_client, uuids, \"reg_agent\", start_ts, end_ts, :count => 1\n\n unless ohais.keys.any? or regs.keys.any?\n raise NoDataAvailableError.new \"None of #{uuids} have registered recently. Try restarting the agent(s).\"\n end\n\n out = {}\n uuids.each do |uuid|\n sys = { :hostname => nil, :fqdn => nil, :nodename => nil, :cnames => [] }\n\n # first, try the registration information\n if regs[uuid] and regs[uuid][\"reg_agent\"]\n reg_ts, reg_json = regs[uuid][\"reg_agent\"][\"\"].shift\n reg = MultiJson.load reg_json rescue {}\n\n # we only send the fqdn as hostname right now, need to add uname(2) fields\n # agent currently sends :hostname => Socket.gethostname\n sys[:hostname] = reg[\"hostname\"]\n sys[:nodename] = reg[\"nodename\"]\n\n # /etc/cnames is an Ooyala standard for setting the system's human-facing name\n if reg[\"etc_cnames\"]\n sys[:cnames] = reg[\"etc_cnames\"]\n end\n end\n\n # use ohai to fill in additional info, including EC2 info\n if ohais[uuid] and ohais[uuid][\"info_ohai\"]\n ohai_ts, ohai_json = ohais[uuid][\"info_ohai\"][\"\"].shift\n ohai = MultiJson.load ohai_json rescue {}\n\n # ohai's 'hostname' is useless, it uses hostname -s to get it\n sys[:hostname] ||= ohai[\"fqdn\"]\n sys[:fqdn] ||= ohai[\"fqdn\"]\n\n if ohai[\"ec2\"]\n # use the EC2 info regardless of what the OS says\n sys[:hostname] = ohai[\"ec2\"][\"local_hostname\"]\n sys[:fqdn] = ohai[\"ec2\"][\"public_hostname\"]\n end\n end\n\n # hosts can have any number of cnames\n sys.values.each do |name|\n if cnames.has_key? name\n sys[:cnames] << cnames[name]\n end\n end\n # don't sort! etc_cnames values should always come first, alphabetical is useless\n sys[:cnames] = sys[:cnames].uniq\n\n # provide a simple array of all known network names\n # reverse the flattened list so the cnames come first\n sys[:all] = sys.values.flatten.compact.reverse.uniq\n\n out[uuid] = sys\n end\n\n out\n end", "def [](n)\n if n.class == String\n ret = @lookup_name[n]\n ret = @lookup_mac[n] if !ret\n return ret\n end\n @nodes[n]\n end", "def node_names\n @groups.inject(local_node_names) do |acc, g|\n acc.merge(g.node_names)\n end\n end", "def nodes(data)\n\tnodes = \n\t\tdata.collect{ |k,v| [k, v] }.flatten.uniq\n\t\t\t.collect do |data|\n\t\t\t\t{\n\t\t\t\t\t'id' => data\n\t\t\t\t\t# 'r' => data.split(' ')[1][0].to_i, # first digit\n\t\t\t\t}\n\t\t\tend\n\t\n\treturn nodes\nend", "def build_reverse_tag_lookup_cache\n @tags_for_node_cache = ArrayHash.new\n tags.inject(@tags_for_node_cache) do |hash, (tag, tag_node)|\n hash[tag_node] << tag\n hash\n end\n end", "def filter_in_nodes(nodes_list)\n new_nodes_graph = {}\n @nodes_graph.each do |node_name, node_info|\n next unless nodes_list.include?(node_name)\n\n new_nodes_graph[node_name] = node_info.merge(\n connections: node_info[:connections].select { |connected_hostname, _labels| nodes_list.include?(connected_hostname) },\n includes: node_info[:includes] & nodes_list\n )\n end\n @nodes_graph = new_nodes_graph\n end", "def map_nodes_for_events(events_diff, handlers)\n nodes_events = {}\n events_diff.each do |object, events|\n events.each do |event, nodes|\n handlers.keys.each do |ip|\n if nodes.include?(ip)\n nodes_events[ip] ||= {}\n nodes_events[ip][object] ||= []\n nodes_events[ip][object].push(event)\n break\n end\n end\n end\n end\n nodes_events\n end", "def parse_cid_lookup\n @cid_lookup = {}\n barcode_nodes.each do |node|\n barcode = get_barcode(node)\n @cid_lookup[barcode] = get_id(node) unless barcode.nil?\n end\n end", "def nodes\n @nodes ||= {}\n end", "def nodes\n @nodes ||= {}\n end", "def get_ids(host)\n node_uid, site_uid, grid_uid, _tdl = host.split('.')\n cluster_uid, node_num = node_uid.split('-')\n ids = { 'node_uid' => node_uid, 'site_uid' => site_uid, 'grid_uid' => grid_uid, 'cluster_uid' => cluster_uid, 'node_num' => node_num }\n return ids\nend", "def gen_node_infos(cluster_yml)\n master_n = 1\n master_mem = cluster_yml['master_mem']\n master_cpus = cluster_yml['master_cpus']\n slave_n = cluster_yml['slave_n']\n slave_mem = cluster_yml['slave_mem']\n slave_cpus = cluster_yml['slave_cpus']\n master_ipbase = cluster_yml['master_ipbase']\n slave_ipbase = cluster_yml['slave_ipbase']\n\n master_infos = (1..master_n).map do |i|\n { :hostname => \"master#{i}\",\n :ip => master_ipbase + \"#{10+i}\",\n :mem => master_mem,\n :cpus => master_cpus\n }\n end\n slave_infos = (1..slave_n).map do |i|\n { :hostname => \"slave#{i}\",\n :ip => slave_ipbase + \"#{10+i}\",\n :mem => slave_mem,\n :cpus => slave_cpus\n }\n end\n\n return { :master => master_infos, :slave=>slave_infos }\nend", "def gen_node_infos(cluster_yml)\n\n master_n = 1\n master_mem = cluster_yml['master_mem']\n master_cpus = cluster_yml['master_cpus']\n\n slave_n = cluster_yml['slave_n']\n slave_mem = cluster_yml['slave_mem']\n slave_cpus = cluster_yml['slave_cpus']\n\n master_ipbase = cluster_yml['master_ipbase']\n slave_ipbase = cluster_yml['slave_ipbase']\n\n master_infos = (1..master_n).map do |i|\n { :hostname => \"master#{i}\",\n :ip => master_ipbase + \"#{10+i}\",\n :mem => master_mem,\n :cpus => master_cpus\n }\n end\n\n slave_infos = (1..slave_n).map do |i|\n { :hostname => \"slave#{i}\",\n :ip => slave_ipbase + \"#{10+i}\",\n :mem => slave_mem,\n :cpus => slave_cpus\n }\n end\n\n analytics_node = cluster_yml['analytics_node']\n analytics_mem = cluster_yml['analytics_mem']\n analytics_cpus = cluster_yml['analytics_cpus']\n\n analytics_info = {\n :node => analytics_node,\n :mem => analytics_mem,\n :cpus => analytics_cpus\n }\n\n\n return { :master => master_infos, :slave=>slave_infos, :analytics=>analytics_info }\nend", "def discover_chef_nodes!\n chef_nodes.each do |chef_node|\n if chef_node[\"cluster_name\"] && chef_node[\"facet_name\"] && chef_node[\"facet_index\"]\n cluster_name = chef_node[\"cluster_name\"]\n facet_name = chef_node[\"facet_name\"]\n facet_index = chef_node[\"facet_index\"]\n elsif chef_node.name\n ( cluster_name, facet_name, facet_index ) = chef_node.name.split(/-/)\n else\n next\n end\n svr = Ironfan::Server.get(cluster_name, facet_name, facet_index)\n svr.chef_node = chef_node\n @aws_instance_hash[ chef_node.ec2.instance_id ] = svr if chef_node && chef_node[:ec2] && chef_node.ec2.instance_id\n end\n end", "def node_ids() ; ext_info[:nodes] ; end", "def known_ips\n # Keep a cache of it\n unless defined?(@known_ips)\n @known_ips = {}\n # Fill info from the metadata\n @nodes_handler.prefetch_metadata_of @nodes_handler.known_nodes, %i[private_ips public_ips]\n @nodes_handler.known_nodes.each do |node|\n %i[private_ips public_ips].each do |ip_type|\n ips = @nodes_handler.metadata_of(node, ip_type)\n next unless ips\n\n ips.each do |ip|\n raise \"Conflict: #{ip} is already associated to #{@known_ips[ip]}. Cannot associate it to #{node}.\" if @known_ips.key?(ip)\n\n @known_ips[ip] = node\n end\n end\n end\n end\n @known_ips\n end", "def job_names\n @nodes.keys\n end", "def create_nodes(nodes)\n nodes.each do |node|\n (raise CloudLB::Exception::MissingArgument, \"Must provide :address for all nodes\") if node[:address].to_s.empty?\n (raise CloudLB::Exception::MissingArgument, \"Must provide :port for all nodes\") if node[:port].to_s.empty?\n node[:condition] ||= \"ENABLED\"\n end\n body = {:nodes => nodes}.to_json\n response = @connection.lbreq(\"POST\", @lbmgmthost, \"#{@lbmgmtpath}/loadbalancers/#{CloudLB.escape(@id.to_s)}/nodes\",@lbmgmtport,@lbmgmtscheme,{},body)\n CloudLB::Exception.raise_exception(response) unless response.code.to_s.match(/^20.$/)\n JSON.parse(response.body)['nodes'].map { |node| get_node(node[\"id\"]) }\n end", "def initialize\n @nodes_hash = Hash.new\n end", "def nodes(tweets)\n tweets.inject({}) do |nodes, tweet|\n node = nodes[tweet.user_name]\n nodes[tweet.user_name] = User.new(tweet.user_name) unless node\n nodes\n end\n end", "def import_nodes(nodes)\n result = self.import(nodes)\n # node_ids = [] # could accumulate the ids and return the array, but this might cause problems when importing thousands of records. For now, not doing it.\n nodes.each do |n|\n node_id = n.latest_version_id\n # Must query the database to get the id of the imported node so that can be passed to the NodeIndexer job\n # AND so the resulting document in elasticsearch will have the _bindery_node_id value populated.\n # This adds the cost of a database query but allows us to run the elasticsearch import asynchronously...\n Bindery::Persistence::ElasticSearch::Node::NodeIndexer.perform_async(node_id)\n end\n result\n end", "def get_node_names\n Chef::Node.list.keys\n end", "def id_name_map2(records)\n records.inject({}) do |map, record|\n map.update(record['hostid'] => record['name'])\n end\n end", "def to_hash\n index_hash = Hash.new\n self.each do |node|\n index_hash[node.node_name] = node.to_s\n end\n index_hash\n end", "def keys\n\t\t\treturn @lookup\n\t\tend", "def lookups; end", "def generate_keys(num)\n num.times.inject({}) do |acc, i| \n key = ::Digest::MD5.hexdigest(i.to_s)\n node = redis.node_for(key).client\n hash_key = \"redis://#{node.host}:#{node.port}/#{node.db}\"\n acc[hash_key] = [] if acc[hash_key].nil?\n acc[hash_key] << key\n acc\n end\n end", "def get_responsible_nodes key\n responsible_hash_keys = []\n if @@dynamo_nodes.size <= ENV['REPLICATION'].to_i\n return @@dynamo_nodes\n end\n responsible_node_key = 0\n previous = 0\n\n sorted_hash_keys = @@dynamo_nodes.sort_by { |_k,v| v.first.second.to_i}.map {|_k,v| v.first.second}\n\n sorted_hash_keys.each do |hash_key|\n #log_message('Comparing key '+key.to_i.to_s+' to hash_key '+hash_key.to_i.to_s)\n if key.to_i <= hash_key.to_i && key.to_i > previous.to_i #key.to_i.between?(previous.to_i,hash_key.to_i)\n responsible_node_key = hash_key\n break\n elsif hash_key.to_i == sorted_hash_keys.last.to_i && hash_key.to_i < key.to_i\n responsible_node_key = sorted_hash_keys.first\n else\n previous = hash_key\n end\n end\n\n sorted_hash_keys.each_with_index do |key, index|\n if key == responsible_node_key\n 3.times.each_with_index { |_e, iterator| responsible_hash_keys << sorted_hash_keys[(index - iterator) % sorted_hash_keys.size]}\n end\n end\n\n @@dynamo_nodes.select { |_k, v| v.first.second.in?(responsible_hash_keys) }\n\n end", "def initialize(nodes: [], nodes_by_id: nil, selected_node_ids: nil)\n @nodes_by_id = nodes_by_id\n @nodes_by_id ||= nodes.reduce({}) { |h, n| h.update(n.id => n) }\n @selected_node_ids = selected_node_ids || Set.new(@nodes_by_id.keys)\n end", "def to_node_maps(nodes, config)\n config.map { |from, to| get_node_map(nodes, from, to) }\n end", "def resolve_names(lookup_name, lookup_types=[Dnsruby::Types::A, Dnsruby::Types::CNAME, Dnsruby::Types::PTR])\n\n names = []\n x = resolve(lookup_name, lookup_types)\n x.each {|y| names << y[\"name\"] }\n\n names.uniq\n end", "def resolve_names(lookup_name, lookup_types=[Dnsruby::Types::A, Dnsruby::Types::CNAME, Dnsruby::Types::PTR])\n\n names = []\n x = resolve(lookup_name, lookup_types)\n x.each {|y| names << y[\"name\"] }\n\n names.uniq\n end", "def sorted_keys(nodes)\n keys = []\n nodes.each do |n|\n n.cells.each do |c|\n keys << c.key\n end\n end\n keys.uniq.sort { |a, b| a <=> b }\n end", "def gen_node_key(num)\n \"node#{num}\".to_sym\nend", "def resolve_names(lookup_name, lookup_types=[Dnsruby::Types::AAAA, Dnsruby::Types::A, Dnsruby::Types::CNAME, Dnsruby::Types::PTR])\n\n names = []\n x = resolve(lookup_name, lookup_types)\n x.each {|y| names << y[\"name\"] }\n\n names.uniq\n end", "def help_node_names_data\n\t\t__elasticsearch__.client.cat.nodes(:format => 'json', :h => 'name')\n\tend", "def node_names\n @cluster.nodes.map(&:name)\n end", "def build_nodes\n # Create a Hash[Class => Node] for each table/class\n nodes = {}\n @table_names.each do |path|\n table_name = path.tr \"/\", \"_\"\n class_name = @class_names[table_name.to_sym] || table_name.classify\n klass = class_name.constantize\n nodes[klass] = Node.new(path, class_name, klass)\n end\n\n # First iniitalize dependencies from polymorphic associations that we\n # explicitly found in the yaml files.\n @polymorphic_associations.each do |klass, associations|\n associations.each do |association|\n node = nodes[klass]\n next unless node\n next unless nodes.key?(association)\n\n node.dependencies << association\n end\n end\n\n # Compute dependencies between nodes/classes by reflecting on their\n # ActiveRecord associations.\n nodes.each do |_, node|\n klass = node.klass\n klass.reflect_on_all_associations.each do |assoc|\n # We can't handle polymorphic associations, but the concrete types\n # should have been deduced from the yaml files contents\n next if assoc.polymorphic?\n\n # Don't add a dependency if the class is not in the given table names\n next unless nodes.key?(assoc.klass)\n\n # A class might depend on itself, but we don't add it as a dependency\n # because otherwise we'll never make it (the class can probably be created\n # just fine and these dependencies are optional/nilable)\n next if klass == assoc.klass\n\n case assoc.macro\n when :belongs_to\n node.dependencies << assoc.klass\n when :has_one, :has_many\n # Skip `through` association becuase it will be already computed\n # for the related `has_one`/`has_many` association\n next if assoc.options[:through]\n\n nodes[assoc.klass].dependencies << klass\n end\n end\n end\n\n # Finally sort all values by name for consistent results\n nodes.values.sort_by { |node| node.klass.name }\n end", "def resolve_node_hw_id_collision\n # Get all nodes\n nodes = get_data.fetch_all_objects(:node)\n # This will hold all hw_id's (not unique)'\n all_hw_id = []\n # Take each hw_id and add to our all_hw_id array\n nodes.each { |node| all_hw_id += node.hw_id }\n # Loop through each hw_id\n all_hw_id.each do\n |hwid|\n # This will hold nodes that match\n matching_nodes = []\n # loops through each node\n nodes.each do\n |node|\n # If the hwid is in the node.hw_id array then we add to the matching ndoes array\n matching_nodes << node if (node.hw_id & [hwid]).count > 0\n end\n # If we have more than one node we have a conflict\n # We sort by timestamp ascending\n matching_nodes.sort! { |a, b| a.timestamp <=> b.timestamp }\n # We remove the first one, any that remain will be cleaned of the hwid\n matching_nodes.shift\n # We remove the hw_id from each and persist\n matching_nodes.each do\n |node|\n node.hw_id.delete(hwid)\n node.update_self\n end\n end\n nil\n end", "def apply(nodes)\n node_manager.assert_known(nodes)\n for node in nodes\n node_manager.find(node).apply\n end\n end", "def current_node_snapshots\n nodes = {}\n snapshots = Hash.new { |h, k| h[k] = NodeSnapshot.new(k) }\n fetch_node_manager_states.each do |node_manager, states|\n available, unavailable = states.values_at(:available, :unavailable)\n available.each do |node_string, latency|\n node = nodes[node_string] ||= node_from(node_string)\n snapshots[node].viewable_by(node_manager, latency)\n end\n unavailable.each do |node_string|\n node = nodes[node_string] ||= node_from(node_string)\n snapshots[node].unviewable_by(node_manager)\n end\n end\n\n snapshots\n end", "def nodes\n\t\t# Query the database\n\t\tnodeQuery = Node.select(:node_id)\n\t\t# Place the query in an array\n\t\tnodeArray = Array.new\n\t\tnodeQuery.each do |node|\n\t\t\tnodeArray.push [node.node_id]\n\t\tend\n\t\treturn nodeArray\n\tend", "def node_list\n list = {}\n search = Chef::Search::Query.new\n query = config[:query]\n\n ui.msg \"Search nodes '#{query}'\"\n search.search('node', query) do |node|\n if node['chef'] && node['chef']['client_version']\n version = node['chef']['client_version']\n\n list[version] ||= []\n list[version] << node\n end\n end\n ui.msg ''\n\n list\n end", "def lookups(database_record)\n ids = {}\n\n for_mappings(database_record) do |mapping, lookup|\n associated = database_record.association(name).reader\n\n ids[lookup] =\n if associated\n # It's possible to define a belongs_to association in a Mapping\n # for what is actually a one-to-many association on the\n # ActiveRecord object.\n Array(associated).first.send(mapping.lookup_column)\n end\n end\n\n ids\n end", "def keys\n lookup_map.keys\n end", "def nodes(tag)\n if nodes = @cache_nodes[tag]\n return nodes\n end\n hash_key = @hash_key_slice ? perform_hash_key_slice(tag) : tag\n regular_index = @regular_weight_array.size > 0 ? get_index(hash_key, @regular_weight_array.size) : 0\n standby_index = @standby_weight_array.size > 0 ? get_index(hash_key, @standby_weight_array.size) : 0\n nodes = [@regular_weight_array[regular_index], @standby_weight_array[standby_index]].compact\n @cache_nodes[tag] = nodes\n end", "def names_to_references(names)\n names.map { |name| find_or_create(name) }\n end", "def find_node(node_name)\n puts \"*\"*80\n raise(Thor::Error, \"Node not specified.\") if node_name.nil? || node_name.empty?\n return {node_name => nodes_in_stage[node_name]} if nodes_in_stage[node_name]\n puts \"-\"*80\n nodes_in_stage.each do |key, value|\n puts key\n return {key => value} if key.start_with?(node_name)\n end\n raise(Thor::Error, \"Not found: #{node_name} in #{stage_name}.\")\n end", "def auto_discover_nodes!\n @servers = execute(:all_nodes)\n end", "def init_nodemap\n # map of key1 => node, key2 => node, ...\n @keynum = 1\n text_nodes = @doc.search(text_nodes_xpath, tm: TM_NS)\n text_nodes.each { |node| add_node(node) }\n end", "def create_lookup(start_dt, end_dt, tagger)\n db = NewsAnalyzer::News.new(\"dbi:Mysql:newsplus:#{@host}\")\n counter = {}\n titles = db.get_titles(start_dt, end_dt)\n titles.each do |row|\n t = row[\"title\"].force_encoding(Encoding::UTF_8)\n tagger.hatena_keyword(t).each do |word|\n counter[word] ||= 0\n counter[word] += 1\n end\n end\n lookup = {}\n id = 1\n counter.to_a.sort{|a,b|\n b[1] <=> a[1]\n }[0...50_000].each{|pair|\n lookup[pair[0]] = id\n id += 1\n }\n printf $stdout, \"words: %d\\n\", counter.size\n return lookup\nend", "def traverseNodes(nodes, nextNode)\n tVec = Array.new\n #puts \"***********************\\ntraverseNodes :#{nextNode} \\n_______________________________\\n#{nodes}\\n***********************************\"\n for i in 0..nodes.size\n tmp = nodes[i]\n @xmlTool.setCountToZero()\n at = 0\n #tmpNode = @xmlTool.getHashForNameAtPos(tmp, nextNode, at)\n while ((tmpNode [email protected](tmp, nextNode, at) )!= nil)\n at = at.next\n @xmlTool.setCountToZero()\n tVec.push(tmpNode) # alternative is tVec.push(tmpNode)\n #tmpNode = @xmlTool.getHashForNameAtPos(tmp, nextNode, at)\n end\n end\n return tVec\n end", "def hash_nodes(statements, nodes, grounded_hashes)\n hashes = grounded_hashes.dup\n ungrounded_hashes = {}\n hash_needed = true\n\n # We may have to go over the list multiple times. If a node is marked as\n # grounded, other nodes can then use it to decide their own state of\n # grounded.\n while hash_needed\n starting_grounded_nodes = hashes.size\n nodes.each do | node |\n unless hashes.member? node\n grounded, hash = node_hash_for(node, statements, hashes)\n if grounded\n hashes[node] = hash\n end\n ungrounded_hashes[node] = hash\n end\n end\n\n # after going over the list, any nodes with a unique hash can be marked\n # as grounded, even if we have not tied them back to a root yet.\n uniques = {}\n ungrounded_hashes.each do |node, hash|\n uniques[hash] = uniques.has_key?(hash) ? false : node\n end\n uniques.each do |hash, node|\n hashes[node] = hash if node\n end\n hash_needed = starting_grounded_nodes != hashes.size\n end\n [hashes, ungrounded_hashes]\n end", "def set_nodegroup_node_assignments(nodes, nodegroups, login, password_callback=PasswordCallback)\n node_ids = []\n nodes.each_pair do |node_name, node|\n if node['id']\n node_ids << node['id']\n else\n warn \"set_nodegroup_node_assignments passed a bogus nodes hash, #{node_name} has no id field\"\n end\n end\n\n nodegroupdata = {}\n node_ids = 'nil' if node_ids.empty?\n nodegroupdata['node_group_node_assignments[nodes][]'] = node_ids\n\n set_objects('node_groups', nodegroups, nodegroupdata, login, password_callback)\n end", "def gen_node_infos(config)\n\n config\n # master_n = config['master_n']\n # master_mem = config['master_mem']\n # slave_n = config['slave_n']\n # slave_mem = config['slave_mem']\n # force_zk = config['force_zk']\n # zk_mem = config['zk_mem']\n # master_ipbase = config['master_ipbase']\n # slave_ipbase = config['slave_ipbase']\n # zk_ipbase = config['zk_ipbase']\n\n # master_infos = (1..master_n).map do |i|\n # { :hostname => \"master#{i}\",\n # :ip => master_ipbase + \"#{10+i}\",\n # :mem => master_mem }\n # end\n # slave_infos = (1..slave_n).map do |i|\n # { :hostname => \"slave#{i}\",\n # :ip => slave_ipbase + \"#{10+i}\",\n # :mem => slave_mem }\n # end\n\n # zk_n = master_n > 1 ? 3 : (force_zk ? 1 : 0)\n # zk_infos = (1..zk_n).map do |i|\n # { :hostname => \"zk#{i}\",\n # :ip => zk_ipbase + \"#{10+i}\",\n # :mem => zk_mem }\n # end\n\n # return { :master => master_infos, :slave=>slave_infos, :zk=>zk_infos }\nend", "def find_probes_from_read_to_node(graph, read_to_node, probe_sequence_ids)\n return probe_sequence_ids.collect do |sequence_id|\n nodes = read_to_node[sequence_id].collect{|node_id| graph.nodes[node_id]}\n pick_best_node_for_read_id(sequence_id, nodes)\n end\n end", "def propfind(nodes)\n properties = { found: [], not_found: [] }\n\n find_properties_from_xml_elements nodes, properties\n\n properties[:found] = properties[:found].uniq.flatten if properties[:found]\n properties\n end", "def subtree_with_all_paths(nodes)\n hash = {}\n nodes.each { |x| hash[x] = true }\n nodes.each_index do |i|\n node1 = nodes[i]\n (0...i).each do |j|\n node2 = nodes[j]\n unless node1 == node2 then\n begin\n path = self.path(node1, node2)\n rescue IndexError, NoPathError\n path = []\n end\n path.each { |x| hash[x] = true }\n end\n end\n end\n self.subtree(hash.keys)\n end", "def identifiers_hash\n @identifiers_hash ||= {:default => [:uuid]}\n end", "def marshall_nodes(node_id_list)\n node_id_list.map{|nid| Node.find_by_persistent_id(nid)}\n end", "def AddNodes(nodes)\n\tnodes.each do |n|\n\t\tn.id=@lastId\n\t\tDefineGroup(n)\n\t\t@lastId=@lastId+1\n\tend\n\treturn nodes\n end", "def find_nodes_and_map(xml)\n self.maps.each do |map|\n doc = LibXML::XML::Parser.string(xml).parse\n nodes = doc.find(map[:base_path])\n return nodes, map if !nodes.empty?\n end\n return [], nil\n puts \"No map found in #{self.class} for xml #{xml[0..100]}...\" if $DEBUG\n end", "def get_aliases(hostname)\n headers = { Authorization: \"Bearer #{@token}\", 'Content-Type': 'application/json' }\n response = HTTParty.get(\"https://#{NETDB_SERVER}/nodes/#{hostname.fully_qualify}\",\n :headers => headers)\n if response.code != 200\n raise \"no node found for #{hostname}\"\n end\n\n # Parse through the response to find any aliases, and make sure that this is\n # the main hostname.\n node_json = JSON.parse(response.body)\n aliases = []\n node_json['names'].each do |n|\n if n['name'] != hostname\n raise \"#{hostname} is an alias for #{n['name']}. Please rerun with hostname #{n['name']}.\"\n elsif n.key?('aliases')\n aliases = (aliases + n['aliases'])\n end\n end\n\n aliases\nend", "def getHostnameToPort()\n @hostnameToPort = Hash.new\n\n # for every node we recognize, create an empty spot\n @allWeights.collection.each { |weight| \n if @ipToHostname[weight.srcIp]\n @hostnameToPort[weight.srcHostname] = \"\"\n end\n }\n\n nodeFile = File.open(\"#{@pathToNode}\", \"r\")\n\n nodeFile.each_line do |line|\n arr = line.split('=')\n\n if @hostnameToPort[\"#{arr[0]}\"]\n @hostnameToPort[\"#{arr[0]}\"] = \"#{arr[1].delete(\"\\n\").to_i}\"\n end\n end\n end", "def dfs_names(start_node_num)\n dfs(start_node_num).map do |num|\n @node_names[num]\n end\n end", "def services_list_by_node(node_profiles)\n services = Defaults::SERVICES_TO_PROFILES.select do |service, service_profiles|\n !(service_profiles & node_profiles).empty?\n end\n\n services.keys.uniq\n end", "def node_names\n map do |node|\n node.name\n end.sort\n end", "def parse_dns(nodeList)\n find_if_exists = 0\n domain = Hash.new{|hsh,key| hsh[key] = []}\n address = Hash.new{|hsh,key| hsh[key] = []}\n cname = Hash.new{|hsh,key| hsh[key] = []}\n\n nodeArr = []\n # To remove null values for '.split' method to work\n nodeList.each do |node|\n if node == ''|| node.empty? || node == \"\\n\"\n next\n end\n nodeArr.push(node.strip.split(','))\n end\n # Creating the key value Hash\n nodeArr.each do |(type,domain,source)|\n if type == \"CNAME\"\n cname[domain.strip.to_sym].push(source.strip)\n elsif type == \"A\"\n address[domain.strip.to_sym].push(source.strip)\n end\n end\n # Adding CNAME hash and ADDRESS hash into domain hash\n domain[:CNAME].push(cname)\n domain[:ADDRESS].push(address)\n return domain\nend", "def node_names\n @nodes.values_at *@nodes.each_index.reject {|i| @index_nodes.include? i}\n end", "def nodes\n nodes_by_id.values\n end", "def nodes(reload = false)\n load if reload\n @info['nodes'].map do |node|\n Drain::Node.new(\n stats['nodes'].find { |n| n[0] == node[0] },\n node,\n client,\n @asg\n )\n end\n end", "def bfs_names(start_node_num)\n bfs(start_node_num).map do |num|\n @node_names[num]\n end\n end", "def build_relationship_token_map\n rel_map = HashExtra[Hash.new{|h,k| h[k] = Set.new}]\n my_name = name_sym.to_s # no leading module names\n relationships.each do |(rel_name,)|\n toks = (rel_name.split('_') - [my_name])\n toks.each{|tok| rel_map[tok].add(rel_name)}\n end\n rel_map\n end", "def acquire_machines(action_handler, nodes_json, parallelizer)\n parallelizer.parallelize(nodes_json) do |node_json|\n machine = acquire_machine(add_prefix(node_json, action_handler), node_json)\n yield node_json, machine if block_given?\n machine\n end.to_a\n end", "def lookup_label_stat_names(cass_client, uuids, labels, start_timestamp, end_timestamp, options={})\n data = {}\n options = { :count => 10_000 }.merge(options)\n\n time_buckets = usec_aligned_chunks(start_timestamp, end_timestamp, :hour)\n\n labels.each do |lname, lvalue|\n data[lname] ||= {}\n\n # We use a reversed comparator - swap start and finish\n prefix_start, prefix_end = prefixes_from_values([lname, lvalue])\n options[:finish] = prefix_start\n options[:start] = prefix_end\n\n query_rows = time_buckets.flat_map { |ts| uuids.map { |u| \"statname-#{u}-#{ts}\"} }\n\n cass_client.multi_get('lookup_by_label', query_rows, options).each do |row_key, col_hash|\n uuid = row_key[9..44] # 36 characters, following \"statname-\"\n\n col_hash.each do |col_key, _|\n data_lname, data_lvalue, type_id, stat_name = col_key.split(\"\\0\")\n data[lname][data_lvalue] ||= {}\n label_output = data[lname][data_lvalue]\n\n type_str = Hastur::Message.type_id_to_symbol(type_id.to_i).to_s\n\n label_output[type_str] ||= {}\n label_output[type_str][stat_name] ||= []\n label_output[type_str][stat_name] |= [uuid] # Single-bar for union\n end\n end\n end\n\n data\n end", "def match_sales_levels\n nodes = {}\n nodes.merge!(match_sl3)\n nodes.merge!(match_sl4)\n nodes.merge!(match_sl5)\n nodes.merge!(match_sl6)\n nodes\n end", "def initialize_slots_cache\n startup_nodes_reachable = false\n dns_cache = {}\n @startup_nodes.each{|n|\n begin\n @nodes = []\n r = get_redis_link(n[:host],n[:port])\n r.cluster(\"slots\").each {|r|\n (r[0]..r[1]).each{|slot|\n ip,port = r[2]\n host = dns_cache.fetch(ip) {\n |missing_ip|\n host = Resolv.getname(missing_ip)\n dns_cache[ip] = host\n host\n }\n name = \"#{host}:#{port}\"\n node = {\n :host => host, :port => port,\n :name => name, :ip => ip\n }\n @nodes << node\n @connections.update_slot!(slot, name)\n }\n }\n populate_startup_nodes\n @refresh_table_asap = false\n rescue\n # Try with the next node on error.\n next\n end\n # Exit the loop as long as the first node replies\n startup_nodes_reachable = true\n break\n }\n if !startup_nodes_reachable\n raise Exceptions::StartupNodesUnreachable\n end\n end", "def node_hash_for(node, statements, hashes)\n statement_signatures = []\n grounded = true\n statements.each do | statement |\n if statement.to_quad.include?(node)\n statement_signatures << hash_string_for(statement, hashes, node)\n statement.to_quad.compact.each do | resource |\n grounded = false unless grounded?(resource, hashes) || resource == node\n end\n end\n end\n # Note that we sort the signatures--without a canonical ordering, \n # we might get different hashes for equivalent nodes.\n [grounded,Digest::SHA1.hexdigest(statement_signatures.sort.to_s)]\n end" ]
[ "0.6362123", "0.6238826", "0.5902057", "0.57655853", "0.57206625", "0.5666619", "0.54756373", "0.54353553", "0.5363174", "0.531582", "0.531582", "0.53138226", "0.5165208", "0.5155551", "0.5141833", "0.51401424", "0.5122923", "0.51089525", "0.5098923", "0.50561416", "0.5052161", "0.5032351", "0.50306374", "0.5020585", "0.50167537", "0.50075704", "0.49736074", "0.49674782", "0.49619755", "0.49595279", "0.4937337", "0.4937337", "0.49284843", "0.49145678", "0.4909312", "0.49041176", "0.48789045", "0.48647222", "0.4861634", "0.48611942", "0.48566675", "0.48545903", "0.4853849", "0.4846896", "0.48404476", "0.48312673", "0.4819838", "0.48133245", "0.48060724", "0.47999164", "0.47855908", "0.47826833", "0.47790417", "0.47790417", "0.477708", "0.47706452", "0.47701532", "0.4766315", "0.4736614", "0.47142047", "0.47069842", "0.47051775", "0.46749073", "0.46745864", "0.46740663", "0.46723354", "0.4667566", "0.4664818", "0.4658055", "0.4653731", "0.46476847", "0.46472022", "0.46463427", "0.46402684", "0.46347898", "0.4625353", "0.4623163", "0.46158305", "0.46155438", "0.4615029", "0.46147218", "0.46105665", "0.46007222", "0.4590148", "0.45838904", "0.45691332", "0.4568117", "0.45571232", "0.45570734", "0.4547585", "0.45441374", "0.45371547", "0.45316035", "0.45265183", "0.45089388", "0.45004705", "0.44998375", "0.4492396", "0.44912004", "0.44889322" ]
0.7914847
0
Wait for the lookup threads in +lookups+ to finish and return a Hash of node updates.
def wait_for_responses( lookups, nodes ) update = {} until lookups.empty? lookups.keys.each do |thr| next if thr.alive? identifier = lookups.delete( thr ) begin records = thr.value if !records update[ identifier ] = { error: "Lookup failed (timeout)." } elsif records.empty? update[ identifier ] = { error: "Lookup failed (no records returned)." } else node_data = nodes[ identifier ] update[ identifier ] = self.compare_values( records, node_data ) end rescue SystemCallError => err msg = "%p: %s" % [ err.class, err.message ] self.log.error "%s while looking up %s" % [ msg, identifier ] update[ identifier ] = { error: msg } end end end return update end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run( nodes )\n\t\t\tself.log.debug \"Got %d nodes to check with %p\" % [ nodes.length, self ]\n\t\t\tlookups = self.create_lookups( nodes )\n\t\t\treturn self.wait_for_responses( lookups, nodes )\n\t\tend", "def create_lookups( nodes )\n\t\t\treturn nodes.each_with_object( {} ) do |(identifier, node), hash|\n\t\t\t\tself.log.debug \"Creating lookup for node: %p\" % [ node ]\n\t\t\t\tname = node['name'] or next\n\t\t\t\trecord_type = node['record_type'] || 'A'\n\t\t\t\trecord_class = Resolv::DNS::Resource::IN.const_get( record_type ) or\n\t\t\t\t\traise \"Unsupported record type %p!\" % [ record_type ]\n\n\t\t\t\tself.log.debug \"Looking up %s record for %s (%s)\" % [ record_type, name, identifier ]\n\t\t\t\tthr = Thread.new do\n\t\t\t\t\tself.resolver.getresources( name, record_class )\n\t\t\t\tend\n\t\t\t\thash[ thr ] = identifier\n\t\t\tend\n\t\tend", "def join\n wait_for_completion\n update_nodes\n end", "def collect_node_nei_hashes\n @log.info(\"#{__method__.to_s} started[#{self.class.to_s}]\")\n\n node_nei_hash = @redis_connector.fetch_relations\n end", "def run\n @stopped = false\n until @stopped do\n if (Time.now.to_i - @last_lookup.to_i) > @lookupd_poll_interval\n # Do lookupd\n end\n @selector.select(@timer.next_interval) { |m| m.value.call }\n end\n end", "def results\n @mutex.synchronize do\n resolvables = @resolvables\n @resolvables = []\n return resolvables\n end\n end", "def search( unique_words )\n Thread.new{\n wordHash = []\n tempResults = {}\n list = {}\n y = unique_words.length - 1\n for i in 0..y\n Thread.new(i){ |i2|\n wordHash[i2] = Hash_Func( unique_words[i2] )\n while @searchAckWait != nil && ( @searchAckWait[ wordHash[i2] ] == 1 || @searchAckWait[ wordHash[i2] ].kind_of?(Array) )\n end\n @searchAckWait[ wordHash[i2] ] = 1\n searchMesg = { :type => \"SEARCH\", :word => unique_words[i2], :node_id => wordHash[i2], :sender_id => @guid }.to_json\n nh, m, n = nextHop( wordHash[i2] )\n @s.send searchMesg, 0, nh.ip, nh.port\n t = Time.now.sec\n t2 = t + 90\n while t < t2 # Waits 30 seconds before checking route\n if @searchAckWait[ wordHash[i2] ].kind_of?(Array)\n tempResults[ wordHash[i2] ] = @searchAckWait[ wordHash[i2] ]\n break\n end\n t = Time.now.sec\n if t < t2 - 30\n t = t + 60\n end\n end\n if @searchAckWait[ wordHash[i2] ].kind_of?(Array)\n puts \"correct search result\"\n else\n puts \"The Search has failed time to check the route\"\n #puts @searchAckWait, wordHash[i2]\n routeChecker( wordHash[i2] )\n end\n @searchAckWait[ wordHash[i2] ] = 0\n }\n end\n t3 = Time.now.sec # returns results after 3 seconds\n t4 = t3 + 3\n while t3 < t4\n t3 = Time.now.sec\n if t3 < t4 - 3\n t3 = t3 + 60\n end\n end\n # Search algorithm return the minimum rank for each URL that is present for each word\n list = tempResults[ wordHash[0] ]\n removeList = []\n for j in 1..wordHash.length-1\n nList = tempResults[ wordHash[j] ]\n list.each { |h|\n removeFlag = true\n nList.any? { |nH|\n if nH[:url] == h[:url]\n removeFlag = false\n if nH[:rank] < h[:rank]\n h[:rank] = nH[:rank]\n end\n end\n }\n if removeFlag\n removeList << h\n end\n }\n for k in removeList\n list.delete(k)\n end\n end\n r = SearchResult.new() # Holds results\n r.words = unique_words\n r.resutls = list\n return r\n }\n end", "def update_node_info_cache()\n @all_stats = []\n @nodes.each { |node|\n ip = node.private_ip\n acc = AppControllerClient.new(ip, @@secret)\n\n begin\n @all_stats << acc.get_stats()\n rescue FailedNodeException\n Djinn.log_warn(\"Failed to get status update from node at #{ip}, so \" +\n \"not adding it to our cached info.\")\n end\n }\n end", "def run( nodes )\n\t\t\tself.log.debug \"Got nodes to check with %p: %p\" % [ self, nodes ]\n\n\t\t\trecords = nodes.each_with_object( {} ) do |(identifier, node), hash|\n\t\t\t\tself.log.debug \"Looking up whois info for %p (%p)\" % [ identifier, node ]\n\t\t\t\thash[ identifier ] = self.client.lookup( node['name'] )\n\t\t\tend\n\n\t\t\treturn records.each_with_object( {} ) do |(identifier, record), hash|\n\t\t\t\tparser = record.parser\n\t\t\t\thash[ identifier ] = self.parse_record( parser, identifier )\n\t\t\tend\n\n\t\tend", "def poll\n unless memcached_nodes.empty?\n memcached_nodes.each do | hostname_port |\n stats_text = issue_stats hostname_port \n if stats_text.present?\n @last_stats[hostname_port] = parse_and_report_stats hostname_port, stats_text\n else\n @last_stats[hostname_port] = {}\n end \n end\n\n aggregate_stats\n logger.debug \"Done with aggs\" \n end\n end", "def get_njobs(nodes)\n\n # Reset job count on each input node\n nodes.each do |wn|\n wn[:njobs] = -1\n end\n\n begin\n pbsnodes_xml = REXML::Document.new( %x[ #{$cmd_pbsnodes} 2> /dev/null ] )\n rescue\n return\n end\n\n return if pbsnodes_xml.elements.empty?\n\n pbsnodes_xml.elements.each('//Data/Node') do |node_xml|\n\n name = node_xml.elements['name'].text\n is_offline = node_xml.elements['state'].text.include?('offline')\n is_down = node_xml.elements['state'].text.include?('down')\n\n jobs_xml = node_xml.elements['jobs']\n if jobs_xml\n njobs = jobs_xml.text.split(' ').length\n else\n njobs = 0\n end\n\n # Find matching input nodes: FQDN must be set, node must be up and offline\n nodes.each do |wn|\n next unless wn[:fqdn] and wn[:fqdn] == name and is_offline and !is_down\n wn[:njobs] = njobs\n end\n\n end\n\nend", "def getTorrents\n timer = Timer.new\n infoHashes = getInfoHashes\n\n #puts \"Got #{infoHashes.size} hashes after #{timer.stop} ms\"\n\n callData = []\n callCountPerTorrent = nil\n infoHashes.each do |infoHash|\n newCallData = [\n ['d.get_name', infoHash],\n ['d.get_down_rate', infoHash],\n ['d.get_up_rate', infoHash],\n ['d.get_size_files', infoHash],\n ['d.get_size_bytes', infoHash],\n ['d.get_bytes_done', infoHash],\n ['d.get_tied_to_file', infoHash],\n ]\n callCountPerTorrent = newCallData.size\n callData.concat newCallData\n end\n\n #puts \"Created multicall arguments in #{timer.stop} ms\"\n\n rpcData = @rpc.multicall(*callData)\n\n #puts \"The multicall itself took #{timer.stop} ms\"\n\n offset = 0\n output = []\n infoHashes.each do |infoHash|\n values = rpcData[offset..(offset + callCountPerTorrent - 1)]\n gotHealthyData = true\n values.each do |value|\n if value.class == XMLRPC::FaultException\n gotHealthyData = false\n break\n end\n end\n if gotHealthyData\n data = [infoHash] + values\n torrent = TorrentData.new(*data)\n output << torrent\n end\n offset += callCountPerTorrent\n end\n\n #puts \"Finished processing the RPC data after #{timer.stop} ms\"\n\n return output\n end", "def send_lookup(lookup)\n lookup.ensure_enough_info\n request = build_request(lookup)\n\n response = @sender.send(request)\n\n raise response.error if response.error\n\n candidates = convert_candidates(@serializer.deserialize(response.payload))\n lookup.result = candidates\n end", "def node_search\n while (current_node = queue.shift).present? && final.nil?\n @processed += 1\n yield(current_node)\n log \"\\rProcessing #{start.name}.... %d / %d / %d / %d @ %ds - depth: %d\", @unmarked, @requeued, @processed, @steps, (Time.now - @started), current_node.depth\n end\n\n format_results\n save_results if final.present?\n self.final_path\n end", "def get_registered_nodes\n update_nodes\n @registered_nodes\n end", "def seek!\n results = {}\n self.nodes.each { |node| results[node] = seek_from_node(node) }\n \n return results\n end", "def find_nodes_to_verify(references)\n nodes = {}\n\n references.each do |uri, _digest_value|\n uri = uri.sub(/^#/, '')\n node = find_node_by_uri(uri)\n\n nodes[uri] = calculate_digest(node)\n end\n\n nodes\n end", "def get_lookup(name)\n @lookups = {} unless defined?(@lookups)\n @lookups[name] = spawn_lookup(name) unless @lookups.include?(name)\n @lookups[name]\n end", "def start_compute_hits_thread\n Thread.new do\n interval = @driver.config['update_routing_interval'].to_i\n while true\n sleep(interval)\n routing = estimate_hits\n\n # Send updated values to the supernodes\n ping_msg = Protocol::Ping.new(routing.authority,routing.hub,routing.authority_prime,routing.hub_prime,routing.supernode?)\n ping_msg.ctime = DateTime.now\n sns = @supernode_table.supernodes\n group = ThreadGroup.new\n sns.each do |sn|\n t = Thread.new(sn) do |sn|\n ping(sn.socket,ping_msg) unless sn.socket.nil?\n end\n group.add(t)\n end\n group.list.each { |t| t.join }\n end\n end\n end", "def refresh_all\n\t\tputs \"Refresh all the entries in the local host repository in one shot.\"\n\t\tchanges=Hash.new\n\t\thosts=@known_hosts.keys\n\t\t@known_hosts=Hash.new\n\t\tchanges=bulk_add(hosts)\n\t\t@known_hosts.merge!(changes)\n\t\t#@known_hosts.keys.map do |key|\n\t\t#\tunless is_ip?(key)\n\t\t#\t\thost=refresh(key)\n\t\t#\t\tchanges.push(host) unless host.nil?\n\t\t#\tend\n\t\t#end\n\t\tputs \"\\n#{changes.size} Entries Refreshed:\" if changes.size>0\n\t\t#changes.map { |x| puts x }\n\t\tputs \"Done refreshing the local hosts.\"\n\t\treturn changes\n\trescue => ee\n\t\tputs \"Exception on method #{__method__}: #{ee}\"\n\tend", "def find_working_trackers\n # We could pass an array in, but the purpose is to do 5 scrapes and see\n # how often the host responds. Using different ones looks less like abuse.\n hashes = [\n '867bdcaec9b522809aebc1e7085ef8f0a1e7f290',\n '1354AC45BFB3E644A04D69CC519E83283BD3AC6A',\n '66FC47BF95D1AA5ECA358F12C70AF3BA5C7E8F9A',\n '39eac8c9fcb529d518184d45cdaa558771089835',\n '3C7534B034FE8FD46B5AF3A52AC3AA1B89DDEF03'\n ]\n\n results = {}\n\n hashes.each do |hash|\n puts \"Fetching hash #{hash}...\"\n scrape_all(hash).each do |res|\n tracker = res[:tracker]\n status = res[:status]\n if results.has_key?(tracker)\n results[tracker] << status\n else\n results[tracker] = [status]\n end\n end\n end\n \n puts \"Finished scanning #{hashes.size} hashes across #{results.size} trackers...\"\n results.each do |tracker, res|\n puts \"#{res}: #{tracker}\"\n success = res.select{|x| x == :success}.count * 20\n if success > 0\n puts \"GOOD: #{tracker} (#{success}%)\"\n else\n puts \"BAD: #{tracker} (0%)\"\n end\n end\n nil\n end", "def search_results_hashes\n wait_until_bus_section_load\n search_results_table.rows_text.map{ |row| Hash[*search_results_table.headers_text.zip(row).flatten] }\n end", "def merge_lookup(lookup_variants)\n lookup(lookup_variants, Lookup::Invocation.current)\n end", "def fetch_supernodes\n supernodes = []\n iter = @driver.supernode_cache_iterator\n # Get supernodes from cache. Make sure its size is 10.\n size = @driver.config['fetched_supernodes_number'].to_i || 10\n while supernodes.length < size \n sns = iter.next\n break if sns.empty?\n Routing.log { |logger| logger.info(self.class) {\"Get #{sns.length} supernodes from cache\"}}\n\n # Ping(TCP) the supernodes\n group = ThreadGroup.new\n lock = Mutex.new\n sns.each do |sn|\n t = Thread.new(sn) { |sn|\n ping = Net::Ping::TCP.new(sn.address.public_ip)\n if ping.ping?\n sn.latency = ping.duration\n lock.synchronize {supernodes << sn}\n end\n }\n group.add(t)\n end\n group.list.each { |t| t.join }\n end\n\n # Get supernodes from bootstrap nodes\n if supernodes.empty?\n Routing.log {|logger| logger.info(self.class) {\"No supernode cache available. Get from bootstrap nodes.\"}}\n # FIXME add bootstrap process\n end\n supernodes.sort! {|s1,s2| s1.latency <=> s2.latency }\n supernodes[0,size]\n end", "def lookups; end", "def total_hash_search(hash_to_find=@hash_to_find, stop_on_success=@sos, verbose=true)\n matches={}\n while(true)\n case @hash_type\n when 'MD4'\n result = leakdb_hash_search(hash_to_find, verbose)\n if not result.nil?\n matches.store('api.leakdb.abusix.com', result)\n break if stop_on_success\n end\n when 'MD5'\n result = leakdb_hash_search(hash_to_find, verbose)\n if not result.nil?\n matches.store('api.leakdb.abusix.com', result)\n break if stop_on_success\n end\n\n result = darkbyte_hash_search(hash_to_find, verbose)\n if not result.nil?\n matches.store('md5.darkbyte.ru', result)\n break if stop_on_success\n end\n\n result = gromweb_hash_search(hash_to_find, verbose)\n if not result.nil?\n matches.store('md5.gromweb.com', result)\n break if stop_on_success\n end\n\n result = md5comcn_hash_search(hash_to_find, verbose)\n if not result.nil?\n matches.store('md5.com.cn', result)\n break if stop_on_success\n end\n\n result = md5onlinenet_hash_search(hash_to_find, verbose)\n if not result.nil?\n matches.store('md5online.net', result)\n break if stop_on_success\n end\n\n result = md5onlineorg_hash_search(hash_to_find, verbose)\n if not result.nil?\n matches.store('md5online.org', result)\n break if stop_on_success\n end\n\n result = myaddr_hash_search(hash_to_find, verbose)\n if not result.nil?\n matches.store('md5.my-addr.com', result)\n break if stop_on_success\n end\n\n result = noisette_hash_search(hash_to_find, verbose)\n if not result.nil?\n matches.store('md5.noisette.ch', result)\n break if stop_on_success\n end\n\n result = netmd5crack_hash_search(hash_to_find, verbose)\n if not result.nil?\n matches.store('netmd5crack.com', result)\n break if stop_on_success\n end\n\n result = sans_hash_search(hash_to_find, verbose)\n if not result.nil?\n matches.store('isc.sans.edu', result)\n break if stop_on_success\n end\n\n result = stringfunction_hash_search(hash_to_find, verbose)\n if not result.nil?\n matches.store('stringfunction.com', result)\n break if stop_on_success\n end\n when 'LM'\n result = it64_hash_search(hash_to_find, verbose)\n if not result.nil?\n matches.store('rainbowtables.it64.com', result)\n break if stop_on_success\n end\n\n result = leakdb_hash_search(hash_to_find, verbose)\n if not result.nil?\n matches.store('api.leakdb.abusix.com', result)\n break if stop_on_success\n end\n when 'NTLM'\n result = leakdb_hash_search(hash_to_find, verbose)\n if not result.nil?\n matches.store('api.leakdb.abusix.com', result)\n break if stop_on_success\n end\n when 'LM:NTLM'\n result = leakdb_hash_search(hash_to_find, verbose)\n if not result.nil?\n matches.store('api.leakdb.abusix.com', result)\n break if stop_on_success\n end\n when 'MYSQL'\n result = leakdb_hash_search(hash_to_find, verbose)\n if not result.nil?\n matches.store('api.leakdb.abusix.com', result)\n break if stop_on_success\n end\n when 'SHA1'\n result = leakdb_hash_search(hash_to_find, verbose)\n if not result.nil?\n matches.store('api.leakdb.abusix.com', result)\n break if stop_on_success\n end\n\n result = sans_hash_search(hash_to_find, verbose)\n if not result.nil?\n matches.store('isc.sans.edu', result)\n break if stop_on_success\n end\n\n result = stringfunction_hash_search(hash_to_find, verbose)\n if not result.nil?\n matches.store('stringfunction.com', result)\n break if stop_on_success\n end\n end\n break # tried all sites by now...\n end\n return matches\n end", "def aws_wait_for_peers\n loop do\n aws_find_peers\n\n Chef::Log.info(\"etcd_service[#{ new_resource.name }] Found \"\\\n \"#{ new_resource.peers.length + 1 }/#{ new_resource.quorum } AWS \"\\\n \"peers, #{ active_peers.length } active\")\n\n break if new_resource.peers.length >= (new_resource.quorum - 1)\n break if active_peers.length > 0\n sleep 5\n end\n end", "def refresh(nodes_to_refresh = seeds)\n refreshed_nodes = []\n seen = {}\n # Set up a recursive lambda function for refreshing a node and it's peers.\n refresh_node = ->(node) do\n unless seen[node]\n seen[node] = true\n # Add the node to the global list of known nodes.\n seeds.push(node) unless seeds.include?(node)\n begin\n node.refresh\n # This node is good, so add it to the list of nodes to return.\n refreshed_nodes.push(node) unless refreshed_nodes.include?(node)\n # Now refresh any newly discovered peer nodes - this will also\n # remove nodes that are not included in the peer list.\n refresh_peers(node, &refresh_node)\n rescue Errors::ConnectionFailure\n # We couldn't connect to the node.\n end\n end\n end\n\n nodes_to_refresh.each(&refresh_node)\n refreshed_nodes\n end", "def findStaleTargets(neededByHash, dependencyHash, leafTargets)\n\t\tstaleSet = Set.new()\n\t\tvisitedSet = Set.new()\n\n\t\tpendingTargets = Array.new()\n\t\tpendingTargets.concat(leafTargets.to_a())\n\n\t\t#search over the dependency graph starting at the leaves\n\t\tuntil (pendingTargets.empty?())\n\t\t\ttarget = pendingTargets.pop()\n\t\t\tvisitedSet.add(target)\n\t\t\tunless (staleSet.include?(target))\n\n\t\t\t\t#test if target is older than it's source\n\t\t\t\tsrc = targetToSource target #target in srcDir\n\t\t\t\tbuild_target = toBuildDirFile(target)\n\t\t\t\tstaleFromSource = isStale?(build_target, [src])\n\n\t\t\t\t#debug \"\\n\\nTarget: \\'#{(build_target)}\\'\"\n\t\t\t\tif (not staleFromSource)\n\t\t\t\t\t#test if target is older than targets it depends on\n\t\t\t\t\tdeps = dependencyHash[target]\n\t\t\t\t\tdep_destinations = deps.map {|dep| toBuildDirFile(dep)}\n\t\t\t\t\tstaleFromDep = isStale?(build_target, dep_destinations)\n\t\t\t\t\t#if(staleFromDep)\n\t\t\t\t\t\t#debug \"Target mtime: #{File.mtime(build_target)}\"\n\t\t\t\t\t\t#debug \"One of these caused stale:\"\n\t\t\t\t\t\t#dep_destinations.each do |build_dep|\n\t\t\t\t\t\t#\tdebug \"#{build_dep}:: #{File.mtime(build_dep)}\\n\"\n\t\t\t\t\t\t#end\n\t\t\t\t\t#end\n\t\t\t\tend\n\t\t\t\tif(staleFromSource || staleFromDep)\n\t\t\t\t\t#debug \"Staleness: src: \\'#{staleFromSource.to_s}\\'\"\n\t\t\t\t\t#debug \"Staleness: dep: \\'#{staleFromDep.to_s}\\'\"\n\t\t\t\t\t#its stale. add it and all it's dependents to staleSet\n\t\t\t\t\t#debug \"Staleness: adding: \\'#{target}\\'\"\n\t\t\t\t\tstaleSet.add(target)\n\t\t\t\t\tpendingStales = Array.new()\n\t\t\t\t\tpendingStales.concat(neededByHash[target].to_a())\n\t\t\t\t\tuntil(pendingStales.empty?())\n\t\t\t\t\t\tstale_target = pendingStales.pop()\n\t\t\t\t\t\tstaleSet.add(stale_target)\n\t\t\t\t\t\tdebug \"Staleness: adding child: \\'#{stale_target}\\'\"\n\t\t\t\t\t\tpendingStales.concat(neededByHash[stale_target].to_a)\n\t\t\t\t\tend\n\t\t\t\telse\n\t\t\t\t\t#it's not stale. add it's dependents to the pendingTargets\n\t\t\t\t\t#queue to be tested\n\t\t\t\t\tneededByHash[target].each do |child|\n\t\t\t\t\t\tif(not visitedSet.include?(child))\n\t\t\t\t\t\t\tpendingTargets << child\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\treturn staleSet\n\tend", "def perform\n @timeout_start = Time.now\n puts \"Searching for #{@target}\"\n prime = SearchEntry.new(nil, @target, whole_trie, nil)\n debug \"priming: #{prime}\"\n debug ''\n ticked = 0\n frontier.push prime, prime.score\n\n @results = Set.new\n\n begin\n search_entry = frontier.pop\n @visited.push search_entry, search_entry.score\n\n status = \"#{@visited.size}/#{@frontier.size} - #{search_entry}\"\n debug status\n\n seconds_since_start = (Time.now - @timeout_start).to_i\n if seconds_since_start > ticked\n puts status\n ticked += 1\n end\n\n\n # If we've reached the end of a word, continue with a pointer to the\n # top of the whole trie and enqueue this in the frontier\n if search_entry.subtrie[:terminal]\n new_entry = SearchEntry.new(\n '',\n search_entry.target,\n whole_trie,\n search_entry,\n )\n debug \"+ found terminal entry: #{new_entry}\"\n frontier.push(new_entry, new_entry.score)\n end\n\n search_entry.subtrie.each do |key, subtrie|\n next if key == :path\n next if key == :depth\n next if key == :terminal\n\n new_entry = SearchEntry.new(\n \"#{search_entry.match}#{key}\",\n @target,\n subtrie,\n search_entry.previous_entry,\n )\n debug \"- iterating: #{search_entry.match.inspect}+#{key.inspect} #{new_entry}\"\n frontier.push(new_entry, new_entry.score)\n end\n end until frontier.empty? || timeout? #&& collected?)\n\n require 'pry'\n binding.pry\n nil\n end", "def resolve_ip_sites\n\t\tputs \"Resolve sites that contain an IP address. Update the site cache table once a hostname is found in the local host table.\" if @verbose\n\t\tupdates=Array.new\n\t\tsites=get_ip_sites\n\t\thost_tracker=Wmap::HostTracker.instance\n\t\thost_tracker.data_dir=@data_dir\n\t\thost_tracker.hosts_file = host_tracker.data_dir + \"/\" + \"hosts\"\n\t\thost_tracker.load_known_hosts_from_file\n\t\tsites.map do |site|\n\t\t\tputs \"Work on resolve the IP site: #{site}\" if @verbose\n\t\t\tip=url_2_host(site)\n\t\t\thostname=host_tracker.local_ip_2_host(ip)\n\t\t\tif hostname.nil?\n\t\t\t\tputs \"Can't resolve #{ip} from the local host store. Skip #{site}\" if @verbose\n\t\t\telse\n\t\t\t\tputs \"Host-name found for IP #{ip}: #{hostname}\" if @verbose\n\t\t\t\tupdates.push(site)\n\t\t\t\trefresh(site)\n\t\t\tend\n\t\tend\n\t\tupdates.sort!\n\t\tputs \"The following sites are now refreshed: #{updates}\" if @verbose\n\t\thost_tracker=nil\n\t\treturn updates\n\trescue Exception => ee\n\t\tputs \"Exception on method #{__method__}: #{ee}\" if @verbose\n\tend", "def hash\n [host_list, total_matching, total_returned].hash\n end", "def next_renewal_hashes\n wait_until_bus_section_load\n tables[1].hashes\n end", "def find_applying_nodes(hosts, statuses = [])\n Log.debug(\"checking applying status of #{hosts.inspect}\")\n @client.filter[\"identity\"].clear\n hosts.each do |host|\n @client.identity_filter(host)\n end\n\n results = @client.status\n\n hosts.each do |host|\n result = results.select { |r| r[:sender] == host }.first\n status = statuses.select { |s| s[:name] == host }.first\n\n unless status\n status = make_status(host)\n statuses << status\n end\n\n if result\n # check the value of applying as defined in the agent ddl\n if result[:data][:applying] == true\n # we're applying\n if result[:data][:initiated_at]\n # it's a new agent, we can record when it started\n Log.debug(\"#{host} run was started at #{result[:data][:initiated_at]}\")\n status[:initiated_at] = result[:data][:initiated_at]\n else\n Log.debug(\"#{host} run started\")\n end\n else\n # Here we check the \"asked to run but not yet started\" state.\n if result[:data][:lastrun].to_i >= status[:initiated_at]\n Log.debug(\"#{host} run completed\")\n # The node has finished applying, remove from the running set\n statuses.reject! { |s| s[:name] == host }\n next\n else\n # We haven't started yet that we can see, increment the check counter\n status[:checks] += 1\n Log.debug(\"#{host} starting, checks #{status[:checks]}\")\n end\n end\n else\n # We didn't get a result from this host, log and record a check happened\n log(\"Host #{host} did not respond to the status action.\")\n status[:no_response] += 1\n end\n\n if status[:no_response] >= 5\n # If we missed many responses to status, assume it's a dead node\n log(\"Host #{host} failed to respond multiple times. Skipping.\")\n statuses.reject! { |s| s[:name] == host }\n end\n\n if status[:checks] >= 5\n # If we hit more than 5 checks, assume it couldn't start\n log(\"Host #{host} did not move into an applying state. Skipping.\")\n statuses.reject! { |s| s[:name] == host }\n end\n end\n\n return statuses\n end", "def resolve_node_hw_id_collision\n # Get all nodes\n nodes = get_data.fetch_all_objects(:node)\n # This will hold all hw_id's (not unique)'\n all_hw_id = []\n # Take each hw_id and add to our all_hw_id array\n nodes.each { |node| all_hw_id += node.hw_id }\n # Loop through each hw_id\n all_hw_id.each do\n |hwid|\n # This will hold nodes that match\n matching_nodes = []\n # loops through each node\n nodes.each do\n |node|\n # If the hwid is in the node.hw_id array then we add to the matching ndoes array\n matching_nodes << node if (node.hw_id & [hwid]).count > 0\n end\n # If we have more than one node we have a conflict\n # We sort by timestamp ascending\n matching_nodes.sort! { |a, b| a.timestamp <=> b.timestamp }\n # We remove the first one, any that remain will be cleaned of the hwid\n matching_nodes.shift\n # We remove the hw_id from each and persist\n matching_nodes.each do\n |node|\n node.hw_id.delete(hwid)\n node.update_self\n end\n end\n nil\n end", "def merge_results_from_worker(results)\n @mutex.synchronize do\n results.each do |r|\n new_props = @job_inst.collector_combine_block(r, @n, @val)\n @n = new_props[:n]\n @val = new_props[:val]\n end\n end\n end", "def estimate_hits\n nodes = @driver.neighbors\n authority_prime = 0.0\n hub_prime = 0.0\n square_sum_authority_prime = 0.0\n square_sum_hub_prime = 0.0\n # Compute the sum\n nodes.each do |p|\n authority_prime += p.hub\n hub_prime += p.authority\n square_sum_authority_prime += p.authority_prime**2\n square_sum_hub_prime += p.hub_prime**2\n end\n square_sum_authority_prime += authority_prime**2\n square_sum_hub_prime += hub_prime**2\n # Normalize\n authority = authority_prime**2/square_sum_authority_prime\n hub = hub_prime**2/square_sum_hub_prime\n\n # Update routing\n new_routing = @driver.update_routing do |routing|\n routing.authority = authority\n routing.hub = hub\n routing.authority_prime = authority_prime\n routing.hub_prime = hub_prime\n routing.last_update = DateTime.now\n end\n\n new_routing\n end", "def find_returned_hashes(node, returning:)\n if node.is_a?(Array)\n *possible_returns, last_expression = *node\n return possible_returns.map { |c| find_returned_hashes(c, returning: false) }.flatten +\n # Check the last expression of a method body\n find_returned_hashes(last_expression, returning: returning)\n end\n\n case node.type\n when :hash\n if returning\n [node]\n else\n # This is some random hash literal\n []\n end\n when :begin\n # Check the last expression of a method body\n find_returned_hashes(node.children, returning: true)\n when :resbody\n _condition, _assign, body = *node\n find_returned_hashes(body, returning: returning)\n when :kwbegin\n find_returned_hashes(node.children, returning: returning)\n when :rescue\n try_body, rescue_body, _ensure_body = *node\n find_returned_hashes(try_body, returning: returning) + find_returned_hashes(rescue_body, returning: returning)\n when :block\n # Check methods with blocks for possible returns\n method_call, _args, *body = *node\n if method_call.type == :send\n find_returned_hashes(body, returning: returning)\n end\n when :if\n # Check each branch of a conditional\n _condition, *branches = *node\n branches.compact.map { |b| find_returned_hashes(b, returning: returning) }.flatten\n when :return\n find_returned_hashes(node.children.first, returning: true)\n else\n []\n end\n rescue\n p \"--- UnderscorizeMutationHashTransform crashed on node: ---\"\n p node\n raise\n end", "def reachable_nodes\n recursive_set(@start) { |n| n.out }\n end", "def get_node_ready(nodes)\n ready_nodes = nodes.select { |node| check_node_status(node) == \"ready\" }\n idle_nodes = []\n ready_nodes.each { |node| idle_nodes << node if !(DRbObject.new(nil, \"druby://#{node.ip}:9000\").executando_job) }\n idle_nodes.min{|a,b| DRbObject.new(nil, \"druby://#{a.ip}:9000\").cpu <=> DRbObject.new(nil, \"druby://#{b.ip}:9000\").cpu }\n end", "def waiting\n @mootex.synchronize { @waiting.keys }\n end", "def current_node_snapshots\n nodes = {}\n snapshots = Hash.new { |h, k| h[k] = NodeSnapshot.new(k) }\n fetch_node_manager_states.each do |node_manager, states|\n available, unavailable = states.values_at(:available, :unavailable)\n available.each do |node_string, latency|\n node = nodes[node_string] ||= node_from(node_string)\n snapshots[node].viewable_by(node_manager, latency)\n end\n unavailable.each do |node_string|\n node = nodes[node_string] ||= node_from(node_string)\n snapshots[node].unviewable_by(node_manager)\n end\n end\n\n snapshots\n end", "def watch_targets\n wallet.db.get_keys(self).map { |key| Bitcoin.hash160(key) }\n end", "def chat_retrieve(unique_tags)\n Thread.new {\n tagHash = []\n tempResults = {}\n list = {}\n y = unique_tags.length - 1\n for i in 0..y\n Thread.new(i) { |i2|\n tagHash[i2] = Hash_Func(unique_tags[i2])\n while @chat_retrieveAckWait != nil && (@chat_retrieveAckWait[tagHash[i2]] == 1 || @chat_retrieveAckWait[tagHash[i2]].kind_of?(Array))\n end\n @chat_retrieveAckWait[tagHash[i2]] = 1\n chat_retrieveMsg = {:type => \"CHAT_RETRIEVE\", :tag => unique_tags[i2], :node_id => tagHash[i2], :sender_id => @guid}.to_json\n nh, m, n = nextHop(tagHash[i2])\n @socket.send chat_retrieveMsg, 0, nh.ip, nh.port\n t = Time.now.sec\n t2 = t + 90\n while t < t2 # Waits 30 seconds before checking route\n if @chat_retrieveAckWait[tagHash[i2]].kind_of?(Array)\n tempResults[tagHash[i2]] = @chat_retrieveAckWait[tagHash[i2]]\n break\n end\n t = Time.now.sec\n if t < t2 - 30\n t = t + 60\n end\n end\n if @chat_retrieveAckWait[tagHash[i2]].kind_of?(Array)\n puts \"Get correct chat result\"\n else\n puts \"The chat_retrieve failed to check the route within set time\"\n\n routeChecker(tagHash[i2])\n end\n @chat_retrieveAckWait[tagHash[i2]] = 0\n }\n end\n t3 = Time.now.sec # returns results after 3 seconds\n t4 = t3 + 3\n while t3 < t4\n t3 = Time.now.sec\n if t3 < t4 - 3\n t3 = t3 + 60\n end\n end\n\n list = tempResults[tagHash[0]]\n removeList = []\n for j in 1..tagHash.length-1\n nList = tempResults[tagHash[j]]\n list.each { |h|\n removeFlag = true\n nList.any? { |nH|\n if nH[:text] == h[:text]\n removeFlag = false\n\n end\n }\n if removeFlag\n removeList << h\n end\n }\n for k in removeList\n list.delete(k)\n end\n end\n r = ChatResult.new() # Holds results\n r.tags = unique_tags\n r.resutls = list\n return r\n }\n end", "def find_digest_values\n references = {}\n reference_nodes = doc.css('xmlns|Reference', xmlns: DSIG)\n\n reference_nodes.each do |node|\n uri = node.attr('URI')\n digest_value = node.at('xmlns|DigestValue', xmlns: DSIG).content\n\n references[uri] = digest_value\n end\n\n references\n end", "def build_reverse_tag_lookup_cache\n @tags_for_node_cache = ArrayHash.new\n tags.inject(@tags_for_node_cache) do |hash, (tag, tag_node)|\n hash[tag_node] << tag\n hash\n end\n end", "def sync\n caches = []\n from_scratch = false\n mapped = Service.nmap\n\n return if mapped.empty?\n\n # iterate through boxes, removing any who don't respond\n # so we do not waste time sending out updates to down boxes\n mapped.delete_if do |box|\n resp = Service.net_get(\"http://#{box}:3000\")\n if resp\n c = JSON.parse(resp)\n if c.empty?\n from_scratch = true\n else\n caches << [box,c]\n end\n false\n else\n true\n end\n end\n\n if from_scratch\n newest = [`hostname`.strip,Service.discovery]\n else\n newest = caches.sort{|a,b| a.last['timestamp'] <=> b.last['timestamp']}.last\n mapped.delete_if{|x| newest.first == x}\n end\n\n puts 'Sending to: ' + mapped.inspect\n\n # perform the POST, the URI is always required\n mapped.peach do |box|\n # do a post to services/set_cache\n post_uri = URI \"http://#{box}:3000/services/set_cache\"\n post = Net::HTTP::Post.new post_uri.path\n post.set_form_data 'newest' => newest.last.to_json\n if box == `hostname`.strip\n Rails.cache.write(Service.cache_key,newest.last)\n else\n http = Net::HTTP::Persistent.new box\n http.request post_uri, post\n end\n end\n render :text => \"Caches should be synced, with #{newest.first} as the winner!\"\n end", "def sync\n @cache.flush(true)\n @nodes.sync\n end", "def common_nodes(remote, opts={:heads => nil, :force => nil, :base => nil})\n # variable prep!\n node_map = changelog.node_map\n search = []\n unknown = []\n fetch = {}\n seen = {}\n seen_branch = {}\n opts[:base] ||= {}\n opts[:heads] ||= remote.heads\n \n # if we've got nothing...\n if changelog.tip == NULL_ID\n opts[:base][NULL_ID] = true # 1 is stored in the Python\n \n return [NULL_ID], [NULL_ID], opts[:heads].dup unless opts[:heads] == [NULL_ID]\n return [NULL_ID], [], [] # if we didn't trip ^, we're returning this\n end\n \n # assume we're closer to the tip than the root\n # and start by examining heads\n UI::status 'searching for changes'\n \n opts[:heads].each do |head|\n if !node_map.include?(head)\n unknown << head\n else\n opts[:base][head] = true # 1 is stored in the Python\n end\n end\n \n opts[:heads] = unknown # the ol' switcheroo\n return opts[:base].keys, [], [] if unknown.empty? # BAIL\n \n # make a hash with keys of unknown\n requests = Hash.with_keys unknown\n count = 0\n \n # Search through the remote branches\n # a branch here is a linear part of history, with 4 (four)\n # parts:\n #\n # head, root, first parent, second parent\n # (a branch always has two parents (or none) by definition)\n #\n # Here's where we start using the Hashes instead of Arrays\n # trick. Keep an eye out for opts[:base] and opts[:heads]!\n unknown = remote.branches(*unknown)\n until unknown.empty?\n r = []\n \n while node = unknown.shift\n next if seen.include?(node[0])\n UI::debug \"examining #{short node[0]}:#{short node[1]}\"\n \n if node[0] == NULL_ID\n # Do nothing...\n elsif seen_branch.include? node\n UI::debug 'branch already found'\n next\n elsif node_map.include? node[1]\n UI::debug \"found incomplete branch #{short node[0]}:#{short node[1]}\"\n search << node[0..1]\n seen_branch[node] = true # 1 in the python\n else\n unless seen.include?(node[1]) || fetch.include?(node[1])\n if node_map.include?(node[2]) and node_map.include?(node[3])\n UI::debug \"found new changset #{short node[1]}\"\n fetch[node[1]] = true # 1 in the python\n end # end if\n \n node[2..3].each do |p|\n opts[:base][p] = true if node_map.include? p\n end\n end # end unless\n \n node[2..3].each do |p|\n unless requests.include?(p) || node_map.include?(p)\n r << p\n requests[p] = true # 1 in the python\n end # end unless\n end # end each\n end # end if\n \n seen[node[0]] = true # 1 in the python\n end # end while\n \n unless r.empty?\n count += 1\n \n UI::debug \"request #{count}: #{r.map{|i| short i }}\"\n \n (0 .. (r.size-1)).step(10) do |p|\n remote.branches(r[p..(p+9)]).each do |b|\n UI::debug \"received #{short b[0]}:#{short b[1]}\"\n unknown << b\n end\n end\n end # end unless\n end # end until\n \n # sorry for the ambiguous variable names\n # the python doesn't name them either, which\n # means I have no clue what these are\n find_proc = proc do |item1, item2|\n fetch[item1] = true\n opts[:base][item2] = true\n end\n \n # do a binary search on the branches we found\n search, new_count = *binary_search(:find => search,\n :repo => remote,\n :node_map => node_map,\n :on_find => find_proc)\n count += new_count # keep keeping track of the total\n \n # sanity check, because this method is sooooo fucking long\n fetch.keys.each do |f|\n if node_map.include? f\n raise RepoError.new(\"already have changeset #{short f[0..3]}\")\n end\n end\n \n if opts[:base].keys == [NULL_ID]\n if opts[:force]\n UI::warn 'repository is unrelated'\n else\n raise RepoError.new('repository is unrelated')\n end\n end\n \n UI::debug \"found new changesets starting at #{fetch.keys.map{|f| short f }.join ' '}\"\n UI::debug \"#{count} total queries\"\n \n # on with the show!\n [opts[:base].keys, fetch.keys, opts[:heads]]\n end", "def refresh_all\n\t\tputs \"Refresh all the entries within the local site store ... \"\n\t\tchanges=Hash.new\n\t\tchanges=bulk_refresh(@known_sites.keys)\n\t\t@known_sites.merge!(changes)\n\t\tputs \"Done refresh all entries.\"\n\t\treturn changes\n\trescue => ee\n\t\tputs \"Exception on method #{__method__}: #{ee}\" if @verbose\n\tend", "def search_missing_node_of(other_state)\n results = []\n\n other_state.job_names.each do |other_job_name|\n other_state.job_nodes(other_job_name).size.times do |i|\n other_node = other_state.job_nodes(other_job_name)[i]\n found_node = job_nodes(other_job_name)[i] || job_extra_node(other_job_name, i)\n # found in other state or this state doens't has it either\n if (other_node && other_node.existing?) || found_node.nil?\n results.push(other_node)\n else\n results.push(found_node)\n end\n end\n end\n\n other_state.resource_pool_names.each do |other_resource_pool_name|\n my_exisiting_nodes = (\n idle_nodes(other_resource_pool_name) +\n idle_extra_nodes(other_resource_pool_name)\n ).select(&:existing?)\n\n other_exisiting_nodes = other_state\n .idle_nodes(other_resource_pool_name)\n .select(&:existing?)\n\n my_exclusive_nodes = my_exisiting_nodes.select do |my_node|\n other_exisiting_nodes.find do |other_node|\n my_node.id == other_node.id\n end.nil?\n end\n\n other_state.idle_nodes(other_resource_pool_name).size.times do |i|\n other_node = other_state.idle_nodes(other_resource_pool_name)[i]\n if other_node && other_node.existing?\n results.push(other_node)\n else\n my_node = my_exclusive_nodes.pop\n if my_node\n results.push(my_node)\n else\n results.push(other_node)\n end\n end\n end\n end\n\n results\n end", "def initialize_slots_cache\n startup_nodes_reachable = false\n dns_cache = {}\n @startup_nodes.each{|n|\n begin\n @nodes = []\n r = get_redis_link(n[:host],n[:port])\n r.cluster(\"slots\").each {|r|\n (r[0]..r[1]).each{|slot|\n ip,port = r[2]\n host = dns_cache.fetch(ip) {\n |missing_ip|\n host = Resolv.getname(missing_ip)\n dns_cache[ip] = host\n host\n }\n name = \"#{host}:#{port}\"\n node = {\n :host => host, :port => port,\n :name => name, :ip => ip\n }\n @nodes << node\n @connections.update_slot!(slot, name)\n }\n }\n populate_startup_nodes\n @refresh_table_asap = false\n rescue\n # Try with the next node on error.\n next\n end\n # Exit the loop as long as the first node replies\n startup_nodes_reachable = true\n break\n }\n if !startup_nodes_reachable\n raise Exceptions::StartupNodesUnreachable\n end\n end", "def get_and_update_node_state!(node, attribute_names)\n ret = {}\n external_ref = node[:external_ref]\n external_ref_changed = false\n attribute_names.each do |attribute_name|\n case attribute_name\n when :host_addresses_ipv4\n external_ref_changed = true if NodeState.update_host_addresses_ipv4!(ret, external_ref, node) \n when :fqdn\n external_ref_changed = true if NodeState.update_fqdn!(ret, external_ref, node) \n else\n Log.error(\"Not treating update of BOSH node attribute '#{attribute_name}'\")\n end\n end\n node.update(external_ref: external_ref) if external_ref_changed\n ret \n end", "def setup_states\n @jobs.each do |job|\n @job_states[job[:id]] ||= {:run_at => Time.now.utc, :runs_left => job[:times_to_run]}\n @job_states[job[:id]][:found] = true\n end\n end", "def discover_nodes\n @lock.synchronize do\n return unless running?\n @slaves, @unavailable = [], []\n if @master = find_existing_master\n logger.info(\"Using master #{@master} from existing znode config.\")\n elsif @master = guess_master(@nodes)\n logger.info(\"Guessed master #{@master} from known redis nodes.\")\n end\n @slaves = @nodes - [@master]\n logger.info(\"Managing master (#{@master}) and slaves #{stringify_nodes(@slaves)}\")\n end\n rescue *NODE_DISCOVERY_ERRORS => ex\n msg = <<-MSG.gsub(/\\s+/, ' ')\n Failed to discover master node: #{ex.inspect}\n In order to ensure a safe startup, redis_failover requires that all redis\n nodes be accessible, and only a single node indicating that it's the master.\n In order to fix this, you can perform a manual failover via redis_failover,\n or manually fix the individual redis servers. This discovery process will\n retry in #{TIMEOUT}s.\n MSG\n logger.warn(msg)\n sleep(TIMEOUT)\n retry\n end", "def grab_thread_results(threads)\n # TODO: this barrier might take arbitrarily long. Mostly needed\n # for merging pruposes, but we might consider instrumenting this\n # and doing something smarter if it's a bottleneck\n start_time = Time.now.to_i\n threads.each do |thread|\n remaining_time = (@max_thread_wait_time_minutes*60) - (Time.now.to_i-start_time)\n if remaining_time < 1 then remaining_time = 1 end\n @logger.debug{\"Thread #{thread}: Remaining time: #{remaining_time}\"}\n begin \n # thread.get will throw an exception if the underlying thread\n # threw an exception during execution\n if $executor\n java_import java.util.concurrent.TimeUnit\n thread.get(remaining_time*1000, TimeUnit::MILLISECONDS) \n else \n thread.join\n end\n rescue Exception => e\n if $executor\n # Two levels of nesting to get at the real exception!\n # jruby, sometimes you do weird things...\n begin\n e = e.cause if e.cause\n e = e.cause if e.cause\n rescue Exception # catch errors this generates...\n # What errors is it generating??? That might be good to\n # know ;-)\n end\n raise e\n else\n raise e\n end\n end\n if $executor then thread.cancel(true) end # should do nothing if done, but needs to happen to free up threads otherwise\n end\n end", "def dispatch_hashing!\n Thread::new do\n begin\n self.logger.debug { \"Starting hashset dispatcher.\" }\n \n data = true\n position = @options.offset * @options.blocksize\n \n if @options.blockcount\n target = position + @options.blockcount * @options.blocksize\n else\n target = nil\n end\n \n self.logger.info { \"Starting indexing for transfer.\" }\n \n while data and (target.nil? or position < target)\n self.file.acquire do |file|\n self.logger.debug { \"Reading block from position #{position}.\" }\n file.seek(position)\n data = file.read(@options.blocksize)\n end\n \n position += @options.blocksize\n @hash_queue << Digest::SHA1.hexdigest(data) if data\n end\n \n # indicates finish\n @hash_queue << :end\n \n self.logger.info { \"Indexing for transfer finished.\" }\n \n rescue Exception => e\n self.logger.fatal { \"#{e.class.name}: #{e.message}\\n #{e.backtrace.join(\"\\n\")}\" }\n end\n end\n end", "def fetch_jobs!(cluster, jobusrid)\n if jobusrid==nil\n @logger.warn \"monitor{#@id} - At least one job or user id must be specified, aborting\"\n return\n end\n if @root == nil \n @logger.warn \"monitor{#@id} - It seems API connection was not made \"\n return\n end \n puts \"monitor{#@id} - Looking for #{jobusrid} on #{cluster}\" if $dbug \n @logger.info \"Looking for #{jobusrid} on #{cluster}\"\n (j_ids = [] << jobusrid).flatten! #Convert to array\n size= j_ids.length\n #puts \"Size : #{size} & 0 : #{j_ids[0]}\"\n s_sites = ((cluster.to_s!=\"all\") && (cluster!=nil)) ? 1 : root.sites.length\n i=0\n root.sites.each do |site|\n begin \n next unless ((site['uid']==cluster.to_s) || (cluster.to_s == \"all\") || (cluster == nil))\n i+=1\n puts \"monitor{#@id} - \" + i.to_s+\"/\"+s_sites.to_s+\" \" + site['description'] + \" connecting...\" \n @logger.info \"monitor{#@id} - \" + i.to_s+\"/\"+s_sites.to_s+\" \" + site['description'] + \" connecting...\" \n site.jobs.each do |job|\n j_ids.each do |t_id|\n if (t_id.kind_of? Integer)\n if job['uid'].to_i==t_id\n @logger.info \"monitor{#@id} - #{t_id} found on #{site['description']}\" \n puts \"#{t_id} found on #{site['description']}\" if $dbug\n @jobs << job\n updateHash(site['uid'], job['uid'])\n if size == 1 # Job id is unique\n return\n end\n end\n elsif (t_id.kind_of? String)\n if job['user'].to_s == t_id\n @logger.info \"monitor{#@id} - '#{t_id}' found on '#{site['description']}' jobid #{job['uid']}\" \n puts \"monitor{#@id} - '#{t_id}' found on '#{site['description']}' jobid #{job['uid']}\" if $dbug\n @jobs << job\n updateHash(site['uid'], job['uid'])\n end\n else\n @logger.warn \"Unknown input for #{t_id}, skipping\" \n puts \"monitor{#@id} - Unknown input for #{t_id}, skipping\"\n end\n end\n end\n rescue\n @logger.warn \"monitor{#@id} - could not connect to '#{site['description']}'\"\n puts \"monitor{#@id} - could not connect to '#{site['description']}'\"\n end\n end\n if jobs.length >0\n @logger.info \"monitor{#@id} - scan completed\"\n return\n else\n @logger.warn \"#{jobusrid} not found on #{cluster}\" \n puts \"monitor{#@id} - #{jobusrid} not found on #{cluster}\"\n return\n end\n end", "def check_up(targets)\n threads = []\n targets.each do |target_ip|\n threads.append Thread.new { ping_host target_ip.to_s }\n end\n threads.each(&:join)\n @ips_up\n end", "def await_completion\n @latch.wait(@options[:timeout]) || status.timeout!\n return unless status.result == :answer\n logger.debug \"Main calls were completed, waiting for any added calls: #{@waiters.inspect}\"\n @waiters.each(&:wait)\n logger.debug \"All calls were completed, unblocking.\"\n end", "def fetch_nodes(nodes, dns_cache)\n ret = []\n nodes.each_with_index do |item, index|\n ip, port = item\n host = dns_cache.fetch(ip) {\n |missing_ip|\n host = Resolv.getname(missing_ip)\n dns_cache[ip] = host\n host\n }\n name = \"#{host}:#{port}\"\n role = index == 0 ? 'master' : 'slave'\n node = {\n :host => host, :port => port,\n :name => name, :ip => ip,\n :role => role\n }\n ret << node\n end\n ret\n end", "def fetch_hooks_to_be_processed\n acquire_lock\n fetch_locked_hooks\n end", "def get_results(in_progress)\nhostname2pid = {}\nin_progress.each { |pid, hostname|\n hostname2pid[hostname] = pid\n}\nissue_command_on_hosts(hostname2pid, 30){ |vp, pid| vp.get_results(pid) }\nend", "def resolve_conflicts\n neighbors = nodes\n new_chain = []\n\n # We are only looking for chains longer than ours\n max_length = chain.length\n\n neighbors.each do |node|\n response = Net::HTTP.get(URI.parse(\"#{node}/chain\"))\n\n if response\n length = JSON.parse(response)[\"length\"]\n chain = JSON.parse(response)[\"chain\"]\n\n if length > max_length && valid_chain(chain)\n max_length = length\n new_chain = chain\n end\n end\n end\n\n if new_chain.any?\n @chain = new_chain\n true\n else\n false\n end\n end", "def updates\n @report = {}\n Server.find_each do |server|\n # Go through each package. In some cases (gems) there may be multiple\n # versions of a package on the machine.\n packages = {}\n server.servers_to_packages.find_each do |package_map|\n next unless package_map.status == 'pending'\n package = Package.find(package_map.package_id)\n\n new = {}\n new['provider'] = package.provider\n new['version'] = package.version\n packages[package.name] = [] unless packages.key?(package.name)\n packages[package.name] << new\n end\n @report[server.hostname] = packages unless packages.empty?\n end\n end", "def digests\n @digests ||= SidekiqUniqueJobs::Digests.new\n end", "def find_stale(rounds)\n puts \"Check 1...\"\n last = fetch_all\n (1...rounds).each do |i|\n sleep 5\n puts \"Check #{i + 1}...\"\n current = fetch_all\n last = keep_unchanged_data(last, current)\n end\n\n stale = (last[:total_items].keys & last[:items].keys).sort!\n sorted = sort_data(last)\n\n puts \"\"\n puts \"stale queue prediction\"\n puts \"======================\"\n printf(\"%11s %11s %s\\n\", \"total_items\", \"items\", \"queue\")\n printf(\"%11s %11s %s\\n\", \"-----------\", \"-----------\", \"--------------------\")\n stale.each do |queue_name|\n items = sorted[:items][stale]\n total_items = sorted[:total_items][stale]\n printf(\"%11d %11d %s\\n\", total_items, items, queue_name)\n end\nend", "def run( nodes )\n\t\t\tresults = {}\n\t\t\thydra = Typhoeus::Hydra.new( self.runner_settings )\n\n\t\t\tnodes.each do |identifier, node|\n\t\t\t\tself.log.debug \"Making request for node %s\" % [ identifier ]\n\t\t\t\trequest = self.request_for_node( node )\n\t\t\t\trequest.on_complete do |response|\n\t\t\t\t\tself.log.debug \"Handling response for %s\" % [ identifier ]\n\t\t\t\t\tresults[ identifier ] =\n\t\t\t\t\t\tself.make_response_results( response, node )\n\t\t\t\tend\n\t\t\t\thydra.queue( request )\n\t\t\tend\n\n\t\t\thydra.run\n\n\t\t\treturn results\n\t\tend", "def fetch_results!\n raise NoTargets if targets.empty?\n\n targets.uniq!\n\n puts 'searching the AUR...'\n results = Rpc.new(:multiinfo, *targets).call\n\n # we need the results in the order of our targets (so dependencies\n # are installed first). unfortunately, the rpc returns results\n # alphabetically. assumption is the reordering done here is\n # cheaper than making per-target rpc calls.\n targets.each do |target|\n if result = results.detect {|r| r.name == target}\n @results << result\n else\n raise NoResults.new(target)\n end\n end\n end", "def initialize_slots_cache\n startup_nodes_reachable = false\n dns_cache = {}\n @startup_nodes.each{|n|\n begin\n nodes = []\n r = get_redis_link(n[:host],n[:port])\n r.cluster(\"slots\").each {|r|\n slot_nodes = fetch_nodes(r[2..-1], dns_cache)\n nodes += slot_nodes\n node_names = slot_nodes.map { |x| x[:name]}.compact\n (r[0]..r[1]).each{|slot|\n @connections.update_slot!(slot, node_names)\n }\n @connections.init_node_pool(slot_nodes)\n }\n populate_startup_nodes(nodes)\n @refresh_table_asap = false\n rescue Errno::ECONNREFUSED, Redis::TimeoutError, Redis::CannotConnectError, Errno::EACCES\n # Try with the next node on error.\n next\n rescue\n raise\n end\n # Exit the loop as long as the first node replies\n startup_nodes_reachable = true\n break\n }\n if !startup_nodes_reachable\n raise Exceptions::StartupNodesUnreachable\n end\n end", "def updateRoutingTable()\n @routingTable = Hash.new\n\n # get a map hostname => (hostname, cost)\n weightsHash = @validWeights.hnameMap\n\n # implementation of Djikstra's algorithm -- follows Wikipedia psuedocode\n # pretty closely\n unvisited = Array.new\n\n weightsHash.keys.each { |node|\n @routingTable[node] = [nil, Float::MAX]\n unvisited.push(node)\n }\n\n @routingTable[@hostname] = [@hostname, 0]\n\n while !unvisited.empty?\n # find node in [unvisited] with minimum distance\n node = unvisited[0]\n unvisited.each { |nNode|\n if (@routingTable[nNode][1] < @routingTable[node][1]) then\n node = nNode\n end\n }\n\n # delete it from the set\n unvisited.delete(node)\n\n # for each neighbor that this node is connected to, that is still in the unvisited set\n weightsHash[node].keys.each { |neighbor|\n if unvisited.include?(neighbor) then\n altRoute = @routingTable[node][1] + weightsHash[node][neighbor]\n \n if (altRoute < @routingTable[neighbor][1])\n @routingTable[neighbor][0] = node\n @routingTable[neighbor][1] = altRoute\n end\n end\n }\n end\n end", "def create_pools\n @old_store = store.dup\n pools.map do |key, value|\n # convert the requests to vm names\n pools[key]['requests'] = value['requests'].find_all do |req|\n puts \"Checking request: #{req}\"\n r = req_obj(req)\n if r.completed?\n puts \"The request #{req} has completed, getting hostname\"\n hostnames = resolve_vm_name(r)\n # remove request from pool file by not returning anything\n # if hostname does not exist but request completed don't update pool\n if ! hostnames\n puts \"Provisioning seemed to have failed for #{req}\"\n puts \"Removing request #{req} from pool #{key}\"\n false\n else\n pools[key]['pool_instances'] = value['pool_instances'] + hostnames\n false\n end\n else \n # has not completed\n # keep the request, since it is not finished\n puts \"The request #{req} is still running\"\n req\n end\n end\n\n # return the alive instances and save to the pool\n pools[key]['pool_instances'] = pools[key]['pool_instances'].find_all {|h| is_alive?(h) }\n\n # delete any old instances from used pool\n pools[key]['used_instances'] = pools[key]['used_instances'].find_all {|h| is_alive?(h) }\n\n # create the pool, and save the request in the requests\n # do not create if the number of systems and requests are more than the requested amount\n current_total = value['pool_instances'].count + pools[key]['requests'].count\n unless current_total >= value['size']\n reqs = create_pool(value)\n pools[key]['requests'] = reqs\n end\n end\n # prevents updates from occuring when they are not required\n store.save if store_changed?(@old_store, store)\nend", "def fetch_locations\n log(' - Fetching location data (takes ~25s)...', false)\n results = {}\n ('a'..'z').to_a.each do |letter|\n response = get_response(\"http://www.railwaycodes.org.uk/stations/station#{letter}.shtm\")\n Nokogiri::HTML(response).xpath('//table/tr').each do |row|\n cols = row.xpath('./td').map(&:text)\n crs = row.xpath('./td').first.xpath('./a').first['name'].upcase rescue nil\n unless [crs, cols[6], cols[7]].include?(nil) || [crs, cols[6], cols[7]].include?('')\n results[crs] = { latitude: cols[7].to_f, longitude: cols[6].to_f }\n end\n end\n sleep(1) # be nice to railwaycodes.org.uk\n end\n log('DONE')\n results\nend", "def keys\n\t\t\treturn @lookup\n\t\tend", "def reload_pool_nodes\n @mutex.synchronize do\n reload_pool_nodes_unsync\n end\n end", "def refresh (num=@max_parallel,use_cache=true)\n\t\t puts \"Add entries to the local cache table from site tracker: \" if @verbose\n\t\t\tresults = Hash.new\n\t\t\ttags = @tag_store.keys\n\t\t\tif tags.size > 0\n\t\t\t\tParallel.map(tags, :in_processes => num) { |target|\n\t\t\t\t\tcheck_adware(target,use_cache)\n\t\t\t\t}.each do |process|\n\t\t\t\t\tif !process\n\t\t\t\t\t\tnext\n\t\t\t\t\telse\n\t\t\t\t\t\tresults.merge!(process)\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\t@tag_store.merge!(results)\n\t\t\t\tputs \"Done loading adware entries.\"\n tags = nil\n\t\t\t\treturn results\n\t\t\telse\n\t\t\t\tputs \"Error: no entry is loaded. Please check your list and try again.\"\n\t\t\tend\n tags = nil\n\t\t\treturn results\n\t\trescue => ee\n\t\t\tputs \"Exception on method #{__method__}: #{ee}\" if @verbose\n \tend", "def flush\n @mutex.synchronize { @flush = true }\n @request_results.each do |result|\n begin\n result.wait!\n rescue StandardError\n # Ignored\n end\n end\n @mutex.synchronize do\n @doc_refs = Set.new\n @flush = false\n end\n end", "def watch_registry\n watch_path = \"#{@root}/**\"\n logger.info \"Start monitoring #{watch_path}\"\n # This thread must use its own dedicated doozer connection\n doozer = RubyDoozer::Client.new(@doozer_config)\n @current_revision ||= doozer.current_revision\n\n # Watch for any new changes\n logger.debug \"Monitoring thread started. Waiting for Registry Changes\"\n doozer.watch(watch_path, @current_revision + 1) do |node|\n logger.trace \"Registry Change Notification\", node\n\n # Update the current_revision with every change notification\n @current_revision = node.rev\n\n # Remove the Root key\n key = relative_key(node.path)\n\n case node.flags\n when 4\n changed(key, @deserializer.deserialize(node.value), node.rev)\n when 8\n deleted(key, node.rev)\n else\n logger.error \"Unknown flags returned by doozer:#{node.flags}\"\n end\n end\n logger.info \"Stopping monitoring thread normally\"\n\n rescue ScriptError, NameError, StandardError, Exception => exc\n logger.error \"Exception in monitoring thread\", exc\n ensure\n doozer.close if doozer\n logger.info \"Stopped monitoring for changes in the doozer registry\"\n end", "def define(word_lookup)\n index=hash(word_lookup)\n list=@buckets[index]\n counter=0\n while counter<list.counter\n current_node=list.find_node(counter)\n if current_node.word == word_lookup\n puts \"Searched #{counter+1} nodes.\"\n puts \"Found definition for #{word_lookup}: #{current_node.definition}\" \n return\n end\n counter +=1\n end\n puts \"Searched #{counter+1} nodes.\"\n puts \"#{word_lookup} not found\"\n end", "def poll_nodes\n # clear any list of nodes we already know about and start fresh\n @nodes.clear\n transmit Packet::Poll.new\n end", "def hash_nodes(statements, nodes, grounded_hashes)\n hashes = grounded_hashes.dup\n ungrounded_hashes = {}\n hash_needed = true\n\n # We may have to go over the list multiple times. If a node is marked as\n # grounded, other nodes can then use it to decide their own state of\n # grounded.\n while hash_needed\n starting_grounded_nodes = hashes.size\n nodes.each do | node |\n unless hashes.member? node\n grounded, hash = node_hash_for(node, statements, hashes)\n if grounded\n hashes[node] = hash\n end\n ungrounded_hashes[node] = hash\n end\n end\n\n # after going over the list, any nodes with a unique hash can be marked\n # as grounded, even if we have not tied them back to a root yet.\n uniques = {}\n ungrounded_hashes.each do |node, hash|\n uniques[hash] = uniques.has_key?(hash) ? false : node\n end\n uniques.each do |hash, node|\n hashes[node] = hash if node\n end\n hash_needed = starting_grounded_nodes != hashes.size\n end\n [hashes, ungrounded_hashes]\n end", "def send(lookup)\n lookup.ensure_enough_info\n request = build_request(lookup)\n\n response = @sender.send(request)\n\n raise response.error if response.error\n\n candidates = convert_candidates(@serializer.deserialize(response.payload))\n lookup.result = candidates\n end", "def process_workers\n cleanup_worker_threads\n\n completion_blocks = []\n while !worker_completion_blocks.empty?\n block = worker_completion_blocks.pop\n completion_blocks << PollBlockDefinition.new(\"worker completion handler #{block}\", block, Hash.new)\n end\n call_poll_blocks(completion_blocks)\n end", "def health_check\n ret = {}\n unready = []\n NodeObject.all.each do |node|\n unready << node.name unless node.ready?\n end\n ret[:nodes_not_ready] = unready unless unready.empty?\n failed = Proposal.all.select { |p| p.active? && p.failed? }\n ret[:failed_proposals] = failed.map(&:display_name) unless failed.empty?\n ret\n end", "def refresh_ip_sites\n\t\tputs \"Refresh all entries that contain an IP address instead of a FQDN ... \"\n\t\tsites=get_ip_sites\n\t\tlive_sites=sites.delete_if { |x| @known_sites[x]['code'] == 10000 or @known_sites[x]['code'] == 20000 }\n\t\tchanges=Hash.new\n\t\tchanges=bulk_refresh(live_sites)\n\t\t@known_sites.merge!(changes)\n\t\tputs \"Done refresh IP sites.\"\n\t\treturn changes\n\trescue => ee\n\t\tputs \"Exception on method #{__method__}: #{ee}\" if @verbose\n\tend", "def collect_and_compare_queries!(tcpdump_time=30, *compare_nodes)\n # Sample traffic and convert to slowlog for master\n master_dump_filename = master.tcpdump!(tcpdump_time)\n local = Host.local # node where we're running Jetpants from\n local.ssh_cmd \"mkdir -p #{Jetpants.export_location}\"\n master.fast_copy_chain(Jetpants.export_location, local, files: master_dump_filename, overwrite: true)\n master.ssh_cmd \"rm #{Jetpants.export_location}/#{master_dump_filename}\"\n master_slowlog_path = local.dumpfile_to_slowlog(\"#{Jetpants.export_location}/#{master_dump_filename}\")\n \n # If we also have an active slave running, grab sampled slowlog from there too\n active_slowlog_path = nil\n if active_slaves.size > 0\n active_slave = active_slaves.first\n active_dump_filename = active_slave.tcpdump!(tcpdump_time)\n active_slave.fast_copy_chain(Jetpants.export_location, local, files: active_dump_filename, overwrite: true)\n active_slave.ssh_cmd \"rm #{Jetpants.export_location}/#{active_dump_filename}\"\n active_slowlog_path = local.dumpfile_to_slowlog(\"#{Jetpants.export_location}/#{active_dump_filename}\")\n end\n \n # Gather our comparison nodes\n if compare_nodes.size == 0\n higher_ver_standby = standby_slaves.select {|s| s.version_cmp(master) > 0}.first\n same_ver_standby = standby_slaves.select {|s| s.version_cmp(master) == 0}.first\n if higher_ver_standby && same_ver_standby\n compare_nodes = [same_ver_standby, higher_ver_standby]\n else\n compare_nodes = standby_slaves[0, 2]\n end\n end\n \n # Disable monitoring on our comparison nodes, and then stop replication\n # at the same position. We only proceed with this if we're comparing\n # exactly two nodes; this may be improved in a future release.\n if compare_nodes.size == 2\n compare_nodes.each {|n| n.disable_monitoring}\n compare_nodes.first.pause_replication_with(compare_nodes.last)\n end\n \n # Run pt-upgrade using the master dumpfile\n puts\n output \"COMPARISON VIA QUERY LOG FROM MASTER\"\n compare_queries(master_slowlog_path, true, *compare_nodes)\n \n if active_slowlog_path\n puts\n output \"COMPARISON VIA QUERY LOG FROM ACTIVE SLAVE\"\n compare_queries(active_slowlog_path, true, *compare_nodes)\n end\n \n # If we previously paused replication and disabled monitoring, un-do this\n if compare_nodes.size == 2\n compare_nodes.concurrent_each do |n| \n n.resume_replication\n n.catch_up_to_master\n n.enable_monitoring\n end\n end\n end", "def search_results_hashes(match)\n case match\n when 'pending-delete not available to purge'\n search_results_pending_delete_table.rows_text.map{ |row| Hash[* search_results_pending_delete_table.headers_text.zip(row).flatten] } unless search_results_pending_delete_table.text.include?(\"No results found\")\n when 'pending-delete available to purge'\n search_results_available_to_purge_table.rows_text.map{ |row| Hash[*search_results_available_to_purge_table.headers_text.zip(row).flatten] } unless search_results_available_to_purge_table.text.include?(\"No results found\")\n when 'who have been purged'\n search_results_purged_table.rows_text.map{ |row| Hash[*search_results_purged_table.headers_text.zip(row).flatten] } unless search_results_purged_table.text.include?(\"No results found\")\n end\n end", "def refresh\n ::Chef_Delivery::ClientHelper.enter_client_mode_as_delivery\n @job = @rest.get_rest(@job_uri)\n ::Chef_Delivery::ClientHelper.leave_client_mode_as_delivery\n\n @id ||= job['id']\n @status = job['status']\n @created_at = DateTime.parse(job['created_at'])\n @updated_at = DateTime.parse(job['updated_at'])\n @results = job['nodes']\n end", "def check_for_existing_chef_objects\n Parallel.map(@bs.nodes, in_threads: @bs.batch_size.to_i) do |node|\n fqdn = node.split('.')\n node_name = fqdn[0] + '.' + fqdn[1]\n cleanup_chef_objects(node_name) if chef_objects_dirty?(node_name)\n end\n end", "def watch_appropriate_nodes\n remaining_paths.last( threshold + 1 ).reverse_each do |path|\n next if watched_paths.include? path\n watched_paths << path\n finish_node(path) unless zk.exists?(path, :watch => true)\n end\n end", "def nodes\n # Find the nodes that were down but are ready to be refreshed, or those\n # with stale connection information.\n needs_refresh, available = seeds.partition do |node|\n refreshable?(node)\n end\n\n # Refresh those nodes.\n available.concat(refresh(needs_refresh))\n\n # Now return all the nodes that are available and participating in the\n # replica set.\n available.reject{ |node| node.down? }\n end", "def lookup\n @lock.with_read_lock do\n recalculate unless @options[:lookup]\n @lookup ||= @options[:lookup].map { |code| self.class.new(code) }.uniq\n end\n end", "def update_polling_pool(key, thread); end", "def poll\n\t\tactive_node = Node.find_by_mac_address(params[:nodeid]) || create_node\n\n\t\tif params[:jobid]\n\t\t\t# check status of current job\n\t\t\tjob = Job.find(params[:jobid])\n\n\t\t\tif job.status == \"active\"\n\t\t\t\trender :json => '{ \"status\": \"active\"}'\n\t\t\telse\n\t\t\t\trender\t:json => '{ \"status\": \"halt\"}'\n\t\t\tend\n\t\telse\n\t\t\t# check if there are any ongoing jobs\n\t\t\tjobs = Job.active\n\n\t\t\t# If there are any, choose one randomly\n\t\t\tif jobs.count > 0\n\t\t\t\tjob_idx = rand(jobs.count - 1)\n\n\t\t\t\tjob = jobs[job_idx]\n\n\t\t\t\t# If it's a dictionary job let's find the work\n\t\t\t\tif job[:attack_type] == \"dictionary\"\n\t\t\t\t\t# Not elegant, but can't directly assign job[:work] = ...\n\t\t\t\t\tjob.work = dictionary_content_for_job(job.id, active_node.id)\n\n\t\t\t\t\tif job.work == \"\"\n\t\t\t\t\t\tjob.status = \"completed\"\n\t\t\t\t\t\tjob.save\n\t\t\t\t\t\treturn :json => '{ \"status\": \"halt\"}'\n\t\t\t\t\tend\n\t\t\t\telsif job[:attack_type] == \"bruteforce\"\n\t\t\t\t\tjob.next_index = job.next_index.nil? ? 0 : job.next_index\n\n\t\t\t\t\tbruteforce_status = BruteforceStatus.create({\n\t\t\t\t\t\t:node_id => active_node.id,\n\t\t\t\t\t\t:job_id => job.id,\n\t\t\t\t\t\t:index => job.next_index\n\t\t\t\t\t})\n\t\t\t\t\t## Calculate next index. We're just going to increase by 50 for now\n\t\t\t\t\tjob.next_index = job.next_index + 50\n\t\t\t\t\tjob.save\n\n\t\t\t\t\t# TODO: Mark job complete if next_index > length\n\t\t\t\t\tkeyspace_size = Bruteforce::totalSize(job.charset)\n\t\t\t\t\tif job.next_index > keyspace_size\n\t\t\t\t\t\tjob.status = \"completed\"\n\t\t\t\t\t\tjob.save\n\t\t\t\t\tend\n\n\t\t\t\tend\n\n\t\t\t\trender :json => job, methods: [:work, :response_flag_meta]\n\t\t\telse\n\t\t\t\trender :json => '{ \"job\": \"none\"}'\n\t\t\tend\n\t\tend\n\n\t\t# mark the client as active\n\t\tactive_node.mark_active\n\tend", "def find_hash(possible_words, known_anagram, known_md5s, start, n = 3)\n cpus = Parallel.processor_count\n puts \"Total number of iteration: #{possible_words.length**n}\"\n puts \"You got #{cpus} cores\"\n\n hash_table = get_hash_table(known_anagram)\n known_hash = get_hash(known_anagram, hash_table)\n\n Parallel.map(possible_words, in_processes: cpus) do |w1|\n possible_words.each do |w2|\n possible_words.each do |w3|\n # Print every ten million iteration\n phrase = \"#{w1} #{w2} #{w3}\"\n\n # Allow only equal size phrases\n next unless phrase.length == known_anagram.length\n\n # Allow only equal hash phrases\n hash = get_hash(phrase, hash_table)\n next unless hash == known_hash\n\n # All only equal md5 phrases\n md5 = Digest::MD5.hexdigest phrase\n next unless known_md5s.include?(md5)\n\n puts \"#{phrase} #{md5} (#{Time.now - start}s)\"\n end\n end\n end\nend", "def source_index_hash\n result = {}\n sources.each do |source|\n\tresult[source] = fetch_source(source)\n end\n @fetcher_class.finish\n result\n end", "def discover_chef_nodes!\n chef_nodes.each do |chef_node|\n if chef_node[\"cluster_name\"] && chef_node[\"facet_name\"] && chef_node[\"facet_index\"]\n cluster_name = chef_node[\"cluster_name\"]\n facet_name = chef_node[\"facet_name\"]\n facet_index = chef_node[\"facet_index\"]\n elsif chef_node.name\n ( cluster_name, facet_name, facet_index ) = chef_node.name.split(/-/)\n else\n next\n end\n svr = Ironfan::Server.get(cluster_name, facet_name, facet_index)\n svr.chef_node = chef_node\n @aws_instance_hash[ chef_node.ec2.instance_id ] = svr if chef_node && chef_node[:ec2] && chef_node.ec2.instance_id\n end\n end", "def discover_peers(port, key_private)\r\n handshake_peer(port, FIRST_PORT, key_private) # Handshake Genesis Peer\r\n i = 0\r\n peers = Array.new # Array To Store Ports Of Discovered Peers\r\n $peers.length.times do |i| # Store Your Discovered Peers\r\n peers << $peers[i].port.to_s\r\n end\r\n while (i < peers.length) # Store Others Discovered Peers\r\n their_peers = Faraday.get(\"#{URL}:#{peers[i]}/peer_peers\").body.to_s.chomp\r\n their_peers = their_peers.split(\",\")\r\n their_peers.length.times do |j|\r\n search_result = 0\r\n peers.length.times do |k|\r\n search_result = -1 if (their_peers[j] == peers[k])\r\n end\r\n peers << their_peers[j] if (search_result != -1)\r\n end\r\n i += 1\r\n end\r\n # Handshake Newly Discovered Peers\r\n i = 0\r\n peers.length.times do |i|\r\n handshake_peer(port, peers[i], key_private) if (search_peers_by_port(peers[i]) == -1)\r\n end\r\nend", "def wait_for_ready\n @logger.debug(\"Waiting for workers to be ready\")\n @workers.each(&:verify)\n @logger.debug(\"Workers are ready\")\n end", "def check_resolvers\n done = @resolver_manager.done\n done.each do |entry, resolver|\n if resolver.successful?\n @link_table.remove(entry)\n\n resolver.result.each do |resolvable|\n entry = LinkTable::Entry.new(resolvable.link)\n entry.status = resolvable.status\n entry.name = resolvable.name\n entry.hoster = resolvable.hoster\n entry.size = resolvable.size\n @link_table.add(entry)\n end\n else\n entry.status.error!(resolver.message)\n @link_table.update(entry)\n end\n end\n end" ]
[ "0.6164099", "0.6056663", "0.5340351", "0.5247167", "0.5187989", "0.517058", "0.51317114", "0.5130107", "0.51201093", "0.5017466", "0.49810123", "0.49194014", "0.4897538", "0.48940453", "0.488707", "0.48503375", "0.48428032", "0.48377717", "0.47676444", "0.47662422", "0.4687213", "0.4675157", "0.46714684", "0.4662069", "0.4658758", "0.4657885", "0.46550766", "0.4651106", "0.46388686", "0.46318823", "0.4626773", "0.46091503", "0.46087676", "0.45966733", "0.45868662", "0.45743933", "0.45739162", "0.45707932", "0.4539622", "0.45367792", "0.4526607", "0.45191577", "0.45118728", "0.45087305", "0.4508288", "0.45037615", "0.44980615", "0.44940397", "0.4492317", "0.44814423", "0.44702125", "0.4465698", "0.44611582", "0.44601995", "0.4447363", "0.44423917", "0.44418553", "0.4438654", "0.4433767", "0.44301406", "0.442737", "0.44259152", "0.4425621", "0.44142652", "0.44142282", "0.4412644", "0.44120273", "0.44105983", "0.4409485", "0.44068977", "0.44060123", "0.44034597", "0.43949306", "0.4391823", "0.43782595", "0.43775395", "0.4372482", "0.4370779", "0.4352825", "0.43527955", "0.43403673", "0.43373492", "0.43370286", "0.43308556", "0.43303794", "0.43291083", "0.43267286", "0.4322006", "0.43206397", "0.4320379", "0.43167022", "0.4314128", "0.43018502", "0.429551", "0.4285791", "0.42844895", "0.42835885", "0.42827192", "0.42814323", "0.427401" ]
0.7522926
0
Compare the given resolver +records+ with the +node_data+, and create an update hash describing the results.
def compare_values( records, node_data ) type = node_data['record_type'] case type when 'A' return self.compare_a_records( records, node_data['values'] ) when 'NS' return self.compare_ns_records( records, node_data['values'] ) when 'MX' return self.compare_mx_records( records, node_data['values'] ) else return { dns: "#{type} not comparable yet." } end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def compare_ns_records( records, hosts )\n\t\t\trecord_hosts = Set.new( records.map(&:name) )\n\t\t\thosts = Set.new( hosts.map {|name| Resolv::DNS::Name.create(name + '.')} )\n\n\t\t\tself.log.debug \"Comparing %p to %p\" % [ record_hosts, hosts ]\n\n\t\t\tstatus = nil\n\t\t\tif ( record_hosts ^ hosts ).empty?\n\t\t\t\tstatus = { ns_record: record_hosts.map(&:to_s) }\n\t\t\telsif !( subset = record_hosts - hosts ).empty?\n\t\t\t\tstatus = { error: \"missing NS records: %s\" % [subset.map(&:to_s).join(', ')] }\n\t\t\telsif !( subset = hosts - record_hosts ).empty?\n\t\t\t\tstatus = { error: \"extra NS records: %s\" % [subset.map(&:to_s).join(', ')] }\n\t\t\tend\n\n\t\t\treturn status\n\t\tend", "def compute_reverse_records(site_uid, site_records)\n reverse_records = {}\n\n site_records.each { |zone, records|\n # Sort records\n site_records[zone] = sort_records(records)\n\n records.each{ |record|\n # Get reverse records\n reverse_file_name, reverse_record = get_reverse_record(record, site_uid)\n if reverse_file_name != nil\n reverse_records[reverse_file_name] ||= []\n reverse_records[reverse_file_name].each {|r|\n if r.label == reverse_record.label\n puts \"Warning: reverse entry with address #{reverse_record.label} already exists in #{reverse_file_name}, #{reverse_record.name} is duplicate\"\n end\n }\n reverse_records[reverse_file_name] << reverse_record\n end\n }\n }\n\n reverse_records\nend", "def compare_a_records( records, addresses )\n\t\t\trecord_addresses = Set.new( records.map(&:address) )\n\t\t\taddresses = Set.new( addresses.map {|addr| Resolv::IPv4.create(addr)} )\n\n\t\t\tstatus = nil\n\t\t\tif addresses.subset?( record_addresses )\n\t\t\t\tstatus = { a_record: {addresses: record_addresses.map(&:to_s)} }\n\t\t\telse\n\t\t\t\tmissing = addresses - record_addresses\n\t\t\t\tstatus = { error: \"missing A records: %s\" % [ missing.map(&:to_s).join(', ') ] }\n\t\t\tend\n\n\t\t\treturn status\n\t\tend", "def create_lookups( nodes )\n\t\t\treturn nodes.each_with_object( {} ) do |(identifier, node), hash|\n\t\t\t\tself.log.debug \"Creating lookup for node: %p\" % [ node ]\n\t\t\t\tname = node['name'] or next\n\t\t\t\trecord_type = node['record_type'] || 'A'\n\t\t\t\trecord_class = Resolv::DNS::Resource::IN.const_get( record_type ) or\n\t\t\t\t\traise \"Unsupported record type %p!\" % [ record_type ]\n\n\t\t\t\tself.log.debug \"Looking up %s record for %s (%s)\" % [ record_type, name, identifier ]\n\t\t\t\tthr = Thread.new do\n\t\t\t\t\tself.resolver.getresources( name, record_class )\n\t\t\t\tend\n\t\t\t\thash[ thr ] = identifier\n\t\t\tend\n\t\tend", "def compare(node, node2)\n\n hxlist, hxlist2 = hashedxml(node), hashedxml(node2) \n \n # elements which may have been modified are also \n # added to the added_indexes list \n added_or_changed_indexes = added(hxlist, hxlist2)\n added_indexes, updated_indexes = @fuzzy_match ? \\\n fuzzy_match(added_or_changed_indexes, node, node2) : \\\n [added_or_changed_indexes, []]\n added_indexes.each do |i|\n \n attributes = node2.elements[i+1].attributes\n attributes[:created] ||= Time.now.to_s\n \n node2.elements[i+1].traverse do |e|\n\n e.attributes[:created] ||= Time.now.to_s\n\n end\n end\n\n deleted_indexes = deleted(hxlist, hxlist2)\n \n unchanged_indexes = unchanged(hxlist, hxlist2)\n\n unchanged_indexes.each do |i, i2| \n\n compare(node.elements[i+1], node2.elements[i2+1]) if node\\\n .elements[i+1].has_elements?\n attributes2 = node2.elements[i2+1].attributes\n \n if attributes2[:created].nil? then\n attributes = node.elements[i+1].attributes\n attributes2[:created] = attributes[:created] if attributes[:created]\n end\n end\n\n end", "def wait_for_responses( lookups, nodes )\n\t\t\tupdate = {}\n\n\t\t\tuntil lookups.empty?\n\n\t\t\t\tlookups.keys.each do |thr|\n\t\t\t\t\tnext if thr.alive?\n\n\t\t\t\t\tidentifier = lookups.delete( thr )\n\t\t\t\t\tbegin\n\t\t\t\t\t\trecords = thr.value\n\n\t\t\t\t\t\tif !records\n\t\t\t\t\t\t\tupdate[ identifier ] = { error: \"Lookup failed (timeout).\" }\n\t\t\t\t\t\telsif records.empty?\n\t\t\t\t\t\t\tupdate[ identifier ] = { error: \"Lookup failed (no records returned).\" }\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\tnode_data = nodes[ identifier ]\n\t\t\t\t\t\t\tupdate[ identifier ] = self.compare_values( records, node_data )\n\t\t\t\t\t\tend\n\t\t\t\t\trescue SystemCallError => err\n\t\t\t\t\t\tmsg = \"%p: %s\" % [ err.class, err.message ]\n\t\t\t\t\t\tself.log.error \"%s while looking up %s\" % [ msg, identifier ]\n\t\t\t\t\t\tupdate[ identifier ] = { error: msg }\n\t\t\t\t\tend\n\t\t\t\tend\n\n\t\t\tend\n\n\t\t\treturn update\n\t\tend", "def add_records(records)\n atoms = {}\n\n records.each do |record|\n next unless @if_proc.call(record)\n\n condensed_record = condense_record(record)\n atoms = add_occurences(condensed_record, record.id, atoms)\n end\n\n @storage.add(atoms)\n end", "def compare_mx_records( records, hosts )\n\t\t\trecord_hosts = Set.new( records.map(&:exchange) )\n\t\t\thosts = Set.new( hosts.map {|name| Resolv::DNS::Name.create(name + '.')} )\n\n\t\t\tself.log.debug \"Comparing %p to %p\" % [ record_hosts, hosts ]\n\n\t\t\tstatus = nil\n\t\t\tif ( record_hosts ^ hosts ).empty?\n\t\t\t\trecord_strings = records.\n\t\t\t\t\tmap {|rec| \"%s[%d]\" % [rec.exchange, rec.preference || 0] }\n\t\t\t\tstatus = {\n\t\t\t\t\tmx_record: record_strings.join( ', ' )\n\t\t\t\t}\n\t\t\telsif !( subset = record_hosts - hosts ).empty?\n\t\t\t\tstatus = { error: \"missing MX records: %s\" % [subset.map(&:to_s).join(', ')] }\n\t\t\telsif !( subset = hosts - record_hosts ).empty?\n\t\t\t\tstatus = { error: \"extra MX records: %s\" % [subset.map(&:to_s).join(', ')] }\n\t\t\tend\n\n\t\t\treturn status\n\t\tend", "def fetch_records_changes(directories_to_search, options)\n directories_records.inject({}) do |h, r|\n # directory records skips paths outside their range, so passing the\n # whole `directories` array is not a problem.\n record_changes = r.fetch_changes(directories_to_search, options.merge(:relative_paths => use_relative_paths))\n\n if h.empty?\n h.merge!(record_changes)\n else\n h.each { |k, v| h[k] += record_changes[k] }\n end\n\n h\n end\n end", "def dns_update(zone, records)\n update = Dnsruby::Update.new(zone)\n records.each do |r|\n if r.type.upcase == 'ADD'\n s = \"#{Domain} 3600 #{Type} #{RDATA}\"\n rr = Dnsruby::RR.create(s)\n update.add(rr)\n else\n update.delete(r['Domain'], r['Type'], r['RDATA'])\n end\n end\n update\n end", "def run( nodes )\n\t\t\tself.log.debug \"Got nodes to check with %p: %p\" % [ self, nodes ]\n\n\t\t\trecords = nodes.each_with_object( {} ) do |(identifier, node), hash|\n\t\t\t\tself.log.debug \"Looking up whois info for %p (%p)\" % [ identifier, node ]\n\t\t\t\thash[ identifier ] = self.client.lookup( node['name'] )\n\t\t\tend\n\n\t\t\treturn records.each_with_object( {} ) do |(identifier, record), hash|\n\t\t\t\tparser = record.parser\n\t\t\t\thash[ identifier ] = self.parse_record( parser, identifier )\n\t\t\tend\n\n\t\tend", "def add_records(records)\n atoms = ActiveSupport::OrderedHash.new\n records_count = 0\n\n records.each do |record|\n next unless allow_indexing?(record)\n records_count += 1\n\n condensed_record = condense_record(record)\n atoms = add_occurences(condensed_record, record.id, atoms)\n end\n\n @storage.add(atoms, records_count)\n end", "def resolve_node_hw_id_collision\n # Get all nodes\n nodes = get_data.fetch_all_objects(:node)\n # This will hold all hw_id's (not unique)'\n all_hw_id = []\n # Take each hw_id and add to our all_hw_id array\n nodes.each { |node| all_hw_id += node.hw_id }\n # Loop through each hw_id\n all_hw_id.each do\n |hwid|\n # This will hold nodes that match\n matching_nodes = []\n # loops through each node\n nodes.each do\n |node|\n # If the hwid is in the node.hw_id array then we add to the matching ndoes array\n matching_nodes << node if (node.hw_id & [hwid]).count > 0\n end\n # If we have more than one node we have a conflict\n # We sort by timestamp ascending\n matching_nodes.sort! { |a, b| a.timestamp <=> b.timestamp }\n # We remove the first one, any that remain will be cleaned of the hwid\n matching_nodes.shift\n # We remove the hw_id from each and persist\n matching_nodes.each do\n |node|\n node.hw_id.delete(hwid)\n node.update_self\n end\n end\n nil\n end", "def resolveVerify\n duplicateGuard = {}\n @records.each_key { |recKey|\n rec = @records[recKey]\n rec.refs.each { |ref|\n ref.resolve self\n preExisting = duplicateGuard[ref.key]\n raise \"Duplicate reference spec: #{r}(#{r.sourceRef}), pre-existing: #{preExisting}(#{preExisting.sourceRef})\" if preExisting\n duplicateGuard[ref.key] = ref\n reKey = ref.toEntity.name\n @reRefs[reKey] = [] unless @reRefs[reKey]\n @reRefs[reKey] << ref\n }\n }\n raise RuntimeError, \"No version defined on #{self}\" unless ver\n self\n end", "def compare_data(table)\n data1 = query(@db1, \"SELECT * FROM #{table}\", \"hash\")\n data2 = query(@db2, \"SELECT * FROM #{table}\", \"hash\")\n \n changes = false\n data1.each do |row|\n if ! data2.include?(row)\n to_insert(@db2_output, table, row)\n changes = true\n end\n end \n @db2_output << \"\\n\" if changes\n \n changes = false\n data2.each do |row|\n if ! data1.include?(row)\n to_insert(@db1_output, table, row)\n changes = true\n end\n end\n @db1_output << \"\\n\" if changes\n end", "def replicate_data_before_registration\n sorted_hash_keys = @@dynamo_nodes.sort_by { |_k,v| v.first.second.to_i}.map {|_k,v| v.first.second}\n sorted_hash_keys << @@my_key\n sorted_hash_keys = sorted_hash_keys.sort\n\n hash = Hash[sorted_hash_keys.map.with_index.to_a]\n\n nodes_to_be_replicated = []\n nodes_to_be_replicated << sorted_hash_keys[(hash[@@my_key] + 1 ) % sorted_hash_keys.size]\n nodes_to_be_replicated << sorted_hash_keys[(hash[@@my_key] + 2 ) % sorted_hash_keys.size]\n\n @@dynamo_nodes.each do |ip, data|\n if data.first.second.in?(nodes_to_be_replicated)\n data = JSON.parse(HTTPService.get_request('http://' + ip.to_s + '/node/get_data').body)['response']\n data.each do |key, value|\n store_value_simply(key, value)\n end\n end\n end\n end", "def result_maker(records)\n sorted_records = records.sort { |x,y| x.voltas[x.voltas.keys.last][:hora] <=> y.voltas[y.voltas.keys.last][:hora] }\n\n results = {}\n position_counter = 0\n\n sorted_records.each do |racer|\n position_counter += 1\n results.store(\"#{position_counter}\", racer)\n end\n\n return results\n end", "def diff_zone_file(zone, records)\n #Compare dumped strings directly instead of RR objects\n zone_records = zone.records.map{ |rec|\n rec.dump\n }\n recs = records.map{ |rec|\n rec.dump\n }\n removed_records = zone_records - recs\n added_records = recs - zone_records\n if $options[:verbose]\n if removed_records.any?\n puts \"Removed records in zone file: #{zone.file_path}\"\n removed_records.each{ |rec|\n puts rec\n }\n end\n if added_records.any?\n puts \"Added records in zone file: #{zone.file_path}\"\n added_records.each{ |rec|\n puts rec\n }\n end\n end\n return added_records.any? || removed_records.any?\nend", "def id_name_map2(records)\n records.inject({}) do |map, record|\n map.update(record['hostid'] => record['name'])\n end\n end", "def node_hash_for(node, statements, hashes)\n statement_signatures = []\n grounded = true\n statements.each do | statement |\n if statement.to_quad.include?(node)\n statement_signatures << hash_string_for(statement, hashes, node)\n statement.to_quad.compact.each do | resource |\n grounded = false unless grounded?(resource, hashes) || resource == node\n end\n end\n end\n # Note that we sort the signatures--without a canonical ordering, \n # we might get different hashes for equivalent nodes.\n [grounded,Digest::SHA1.hexdigest(statement_signatures.sort.to_s)]\n end", "def hash\n [host_list, total_matching, total_returned].hash\n end", "def add_records(records)\n records.each do |r|\n condensed_record = condense_record(r)\n load_atoms(condensed_record)\n add_occurences(condensed_record,r.id)\n @records_size += 1\n end\n end", "def compare(f1, f2)\n data = []\n data[0] = JSON.parse File.read f1\n data[1] = JSON.parse File.read f2\n data.each_with_index do |d, i|\n if d.count == 0\n puts \"No data in ##{i+1} file\"\n exit 1\n end\n data[i] = d.map { |row| row['_source'] }\n end\n all = {}\n all_keys = []\n data.each_with_index do |d, i|\n all_keys[i] = {}\n d.each_with_index do |row, r|\n ks = row.keys.sort\n ks.each do |k| \n all_keys[i][k] = 0 unless all_keys[i].key?(k)\n all_keys[i][k] += 1\n all[k] = 0 unless all.key?(k)\n all[k] += 1\n end\n end\n end\n ks = all.keys.sort.join(',')\n ks1 = all_keys[0].keys.sort\n ks2 = all_keys[1].keys.sort\n if ks1 != ks2\n puts \"WARNING: different key sets:\\n#{ks1}\\nnot equal to:\\n#{ks2}\"\n end\n vals1 = all_keys[0].values.uniq\n vals2 = all_keys[1].values.uniq\n puts \"Unique key presence counts in 1st file: #{vals1}\"\n puts \"Unique key presence counts in 2nd file: #{vals2}\"\n skip_keys = ENV['SKIP_KEYS']\n if skip_keys.nil? || skip_keys == ''\n skip_keys = 'metadata__updated_on,metadata__timestamp,metadata__enriched_on'\n elsif skip_keys == \"-\"\n skip_keys = ''\n end\n skip_keys = skip_keys.split(',').map(&:strip)\n skip = {}\n skip_keys.each do |k|\n skip[k] = true\n end\n keys = ENV['KEYS']\n if keys.nil? || keys == ''\n puts \"You should specify keys to check via KEYS='key1,key2,...,keyN', available keys:\\n#{ks}\"\n puts \"You can also specify special value ALLKEYS\"\n puts \"You can specify non-standard keys to skip, default are: metadata__updated_on,metadata__timestamp,metadata__enriched_on\"\n puts \"To specify them use SKIP_KEYS='key1,key2,...,keyN', use 'SKIP_KEYS=- to disable skipping anything\"\n puts \"Will use default key: grimoire_creation_date\"\n keys = 'grimoire_creation_date'\n end\n if keys == 'ALLKEYS'\n keys = ks.split(',')\n else\n keys = keys.split(',').map(&:strip)\n end\n diff = 0\n keys.each do |k|\n next if skip.key?(k)\n values = []\n data.each_with_index do |d, i|\n values[i] = {}\n d.each_with_index do |row, r|\n v = (row[k] || '(nil)').to_s\n values[i][v] = true\n end\n end\n miss1 = {}\n miss2 = {}\n values[1].keys.each do |k|\n unless values[0].key?(k)\n miss1[k] = true\n end\n end\n values[0].keys.each do |k|\n unless values[1].key?(k)\n miss2[k] = true\n end\n end\n if miss1.count > 0 || miss2.count > 0\n puts \"Key: #{k}\"\n diff += 1\n end\n if miss1.count > 0\n puts \"Values from 2nd file missing in 1st file: #{miss1.keys.sort.join(',')}\"\n end\n if miss2.count > 0\n puts \"Values from 1st file missing in 2nd file: #{miss2.keys.sort.join(',')}\"\n end\n end\n puts \"Differences on #{diff} keys specified to check\"\nend", "def correct_node_hash(node_id)\n SpStore::Crypto.hash_for_tree_node node_id, node_hash(left_child(node_id)),\n node_hash(right_child(node_id))\n end", "def compare\n\t\t\t\n\t\t\tDir.foreach(@folder1) do |item|\n\t\t\t\tbegin\n \t\t\t\tnext if item == '.' or item == '..'\n \t\t\t\tfullfilename = File.expand_path(@folder1, item)\n \t\t\t\tthe_hash = Digest::MD5.hexdigest(File.read(File.join(File.expand_path(@folder1), item)))\n \t\t\t\titem = item.downcase\n \t\t\t\tfiledata = FileHashResults.new(item, the_hash, nil)\n \t\t\t\t@filehash[item] = filedata\n \t\t\trescue\n \t\t\t #puts \"Skipped:#{item.inspect}\"\n \t\t\tend\n\t\t\tend\n\n\t\t\tDir.foreach(@folder2) do |item|\n\t\t\t begin\n \t\t\t\tnext if item == '.' or item == '..'\n \t\t\t\tthe_hash = Digest::MD5.hexdigest(File.read(File.join(@folder2, item)))\n \t\t\t\titem = item.downcase\n if(@filehash[item]==nil)\n \t\t\t\t\tfiledata = FileHashResults.new(item, nil, the_hash)\n \t\t\t\t\t@filehash[item] = filedata\n \t\t\t\t\tnext\n \t\t\t\tend\n \t\t\t\t@filehash[item.downcase].file_hash2 = the_hash\n \t\t\trescue\n #puts \"Skipped:#{item.inspect}\"\n end\t\n\t\t\tend\n\t\tend", "def wanted_records(records, domains)\n _records = records.select { |m| domains.include?(m[1]) }\n _records = _records.each_cons(2).select { |a, b| a.last == b.last }\n _records = _records.group_by(&:last).keys.map do |v|\n { record_id: v.first, ip_address: v.last }\n end\n _records\n end", "def update_all records\n array = []\n records.each do |id, r|\n params = {}\n r.each do |k, v|\n params[k] = {value: v}\n end\n array.push({\n id: id,\n record: params\n })\n end\n puts \"update #{array.count} records...\"\n while array.present?\n a100 = array.shift(100)\n @api.records.update(@app_id, a100)\n end\n {}\n end", "def collect_and_compare_queries!(tcpdump_time=30, *compare_nodes)\n # Sample traffic and convert to slowlog for master\n master_dump_filename = master.tcpdump!(tcpdump_time)\n local = Host.local # node where we're running Jetpants from\n local.ssh_cmd \"mkdir -p #{Jetpants.export_location}\"\n master.fast_copy_chain(Jetpants.export_location, local, files: master_dump_filename, overwrite: true)\n master.ssh_cmd \"rm #{Jetpants.export_location}/#{master_dump_filename}\"\n master_slowlog_path = local.dumpfile_to_slowlog(\"#{Jetpants.export_location}/#{master_dump_filename}\")\n \n # If we also have an active slave running, grab sampled slowlog from there too\n active_slowlog_path = nil\n if active_slaves.size > 0\n active_slave = active_slaves.first\n active_dump_filename = active_slave.tcpdump!(tcpdump_time)\n active_slave.fast_copy_chain(Jetpants.export_location, local, files: active_dump_filename, overwrite: true)\n active_slave.ssh_cmd \"rm #{Jetpants.export_location}/#{active_dump_filename}\"\n active_slowlog_path = local.dumpfile_to_slowlog(\"#{Jetpants.export_location}/#{active_dump_filename}\")\n end\n \n # Gather our comparison nodes\n if compare_nodes.size == 0\n higher_ver_standby = standby_slaves.select {|s| s.version_cmp(master) > 0}.first\n same_ver_standby = standby_slaves.select {|s| s.version_cmp(master) == 0}.first\n if higher_ver_standby && same_ver_standby\n compare_nodes = [same_ver_standby, higher_ver_standby]\n else\n compare_nodes = standby_slaves[0, 2]\n end\n end\n \n # Disable monitoring on our comparison nodes, and then stop replication\n # at the same position. We only proceed with this if we're comparing\n # exactly two nodes; this may be improved in a future release.\n if compare_nodes.size == 2\n compare_nodes.each {|n| n.disable_monitoring}\n compare_nodes.first.pause_replication_with(compare_nodes.last)\n end\n \n # Run pt-upgrade using the master dumpfile\n puts\n output \"COMPARISON VIA QUERY LOG FROM MASTER\"\n compare_queries(master_slowlog_path, true, *compare_nodes)\n \n if active_slowlog_path\n puts\n output \"COMPARISON VIA QUERY LOG FROM ACTIVE SLAVE\"\n compare_queries(active_slowlog_path, true, *compare_nodes)\n end\n \n # If we previously paused replication and disabled monitoring, un-do this\n if compare_nodes.size == 2\n compare_nodes.concurrent_each do |n| \n n.resume_replication\n n.catch_up_to_master\n n.enable_monitoring\n end\n end\n end", "def record_hash(record, fieldset, params = {})\n if cached\n record_hash = Rails.cache.fetch(record.cache_key, expires_in: cache_length, race_condition_ttl: race_condition_ttl) do\n temp_hash = id_hash(id_from_record(record), record_type, true)\n temp_hash = temp_hash.merge(attributes_hash(record, fieldset, params)) if attributes_to_serialize.present?\n temp_hash[:relationships] = {}\n temp_hash[:relationships] = relationships_hash(record, cachable_relationships_to_serialize, fieldset, params) if cachable_relationships_to_serialize.present?\n temp_hash[:links] = links_hash(record, params) if data_links.present?\n temp_hash\n end\n record_hash[:relationships] = record_hash[:relationships].merge(relationships_hash(record, uncachable_relationships_to_serialize, fieldset, params)) if uncachable_relationships_to_serialize.present?\n record_hash[:meta] = meta_hash(record, params) if meta_to_serialize.present?\n record_hash\n else\n record_hash = id_hash(id_from_record(record), record_type, true)\n record_hash = record_hash.merge(attributes_hash(record, fieldset, params)) if attributes_to_serialize.present?\n record_hash[:relationships] = relationships_hash(record, nil, fieldset, params) if relationships_to_serialize.present?\n record_hash[:links] = links_hash(record, params) if data_links.present?\n record_hash[:meta] = meta_hash(record, params) if meta_to_serialize.present?\n record_hash\n end\n end", "def node_hash(node_id)\n \n end", "def run( nodes )\n\t\t\tself.log.debug \"Got %d nodes to check with %p\" % [ nodes.length, self ]\n\t\t\tlookups = self.create_lookups( nodes )\n\t\t\treturn self.wait_for_responses( lookups, nodes )\n\t\tend", "def atomic_updates(*args)\n r = super(*args)\n if @records\n (r['$set'] ||= {})['records'] = serialize_records\n end\n r\n end", "def update_records\n # Get all line items include redone body and it's serial number.\n line_items = AxOrderLineItem.find(:all, :conditions => ['item_id = ? and sales_item_reservation_number <> ?', Product::REDONE_ERP_PRODUCT_ITEM, ''], :include => :ax_order)\n \n # Get all serial numbers from ax order line items.\n serial_numbers = line_items.map(&:serial_number).sort_by { |i| i.to_i }\n \n # Calc all new serial numbers.\n new_serial_numbers = serial_numbers - self.find(:all).map(&:serial_number)\n \n # Add new serial numbers to database.\n new_serial_numbers.each do |serial_number|\n line_item = line_items.find {|i| i.serial_number == serial_number}\n self.create(\n :ax_account_number => line_item.ax_account_number,\n :ax_account_id => line_item.ax_account_id,\n :ax_order_number => line_item.ax_order_number,\n :ax_order_id => line_item.ax_order_id,\n :email_address => line_item.email_address,\n :first_name => line_item.first_name,\n :last_name => line_item.last_name,\n :serial_number => line_item.serial_number,\n :purch_order_form_num => line_item.purch_order_form_num\n )\n end\n \n # Update exist but not sent records data up to date.\n self.find(:all, :conditions => ['sent_mail = ?', false]).each do |item|\n if line_item = line_items.find {|i| i.serial_number == item.serial_number}\n item.update_attributes(\n :ax_account_number => line_item.ax_account_number,\n :ax_account_id => line_item.ax_account_id,\n :ax_order_number => line_item.ax_order_number,\n :ax_order_id => line_item.ax_order_id,\n :email_address => line_item.email_address,\n :first_name => line_item.first_name,\n :last_name => line_item.last_name,\n :serial_number => line_item.serial_number,\n :purch_order_form_num => line_item.purch_order_form_num\n ) unless compare_equal?(item, line_item)\n end\n end\n end", "def compare(base_data, external_data)\n external_data[:product].each do |ex_prd|\n base_prd_id = base_data[:product_matched][ex_prd[:id]]\n changed = nil\n\n if base_prd_id\n # Product exists\n base_prd = base_data[:product].find {|p| p[:id] == base_prd_id }\n changed = field_compare(base_prd, ex_prd, base_data[:category_matched])\n @result[:update] << changed if changed\n else\n # Product is new\n changed = ex_prd.dup\n @result[:new] << changed\n end\n\n next unless changed\n # Product Special Rules\n has_dummy_variant = ex_prd[:variants].any? {|v| v[:option_values].empty? }\n changed.delete(:variants) if has_dummy_variant\n changed[:ex_options] = ex_prd[:variants].map {|i| [i[:sku], i[:option_values].first[:label]] }.to_h if changed[:variants]\n changed[:categories] = external_data[:category].find {|c| c[:id] == changed[:categories] }&.dig(:name)\n changed[:inventory_tracking] = if ex_prd[:variants].all? {|i| i[:inventory_level].nil? }\n 'none'\n elsif ex_prd[:variants].size == 1\n 'product'\n else\n 'variant'\n end\n changed[:inventory_level] = 0\n changed[:inventory_level] = ex_prd[:variants].first[:inventory_level] if changed[:inventory_tracking] == 'product'\n end\n @result[:update].each {|i| clean_fields(i) }\n @result\n end", "def collect_node_nei_hashes\n @log.info(\"#{__method__.to_s} started[#{self.class.to_s}]\")\n\n node_nei_hash = @redis_connector.fetch_relations\n end", "def resolve(dns_records, lookup_chain, domain)\n record_A = dns_records[:A]\n record_CNAME = dns_records[:CNAME]\n\n #if domain in recordA\n #add destination to lookup_chain\n if record_A[:source].include?(domain)\n lookup_chain.push(record_A[:ip][record_A[:source].index(domain)])\n\n #if domain in recordCNAME\n #add destination to lookup_chain\n #update domain with destination\n #call the funtion again with new domain(Recursion)\n elsif record_CNAME[:source].include?(domain)\n lookup_chain.push(record_CNAME[:alias][record_CNAME[:source].index(domain)])\n domain = record_CNAME[:alias][record_CNAME[:source].index(domain)]\n resolve(dns_records, lookup_chain, domain)\n else\n return lookup_chain\n end\nend", "def match_records(records)\n records.select do |record|\n conditions.matches?(record)\n end\n end", "def ret_new_objs_info(field_set_to_copy, create_override_attrs)\n ret = []\n ancestor_rel_ds = array_dataset(parent_rels, :target)\n\n # all parent_rels will have same cols so taking a sample\n remove_cols = [:ancestor_id, :display_name, :type, :ref, :canonical_template_node_id] + parent_rels.first.keys\n node_template_fs = field_set_to_copy.with_removed_cols(*remove_cols).with_added_cols(id: :node_template_id)\n node_template_wc = nil\n node_template_ds = Model.get_objects_just_dataset(model_handle, node_template_wc, Model::FieldSet.opt(node_template_fs))\n\n target_id = parent_rels.first[:datacenter_datacenter_id]\n sp_hash = {\n cols: [:id, :display_name, :type, :iaas_type],\n filter: [:eq, :id, target_id]\n }\n target = Model.get_obj(model_handle.createMH(:target), sp_hash)\n\n # mapping from node stub to node template and overriding appropriate node template columns\n unless matches.empty?\n ndx_node_matches = NodeMatch.ndx_node_matches(matches)\n mappings = ndx_node_matches.values.map{ |m| m.mapping}\n mapping_ds = array_dataset(mappings, :mapping)\n\n select_ds = ancestor_rel_ds.join_table(:inner, node_template_ds).join_table(:inner, mapping_ds, [:node_template_id])\n ret = Model.create_from_select(model_handle, field_set_to_copy, select_ds, create_override_attrs, create_opts)\n\n # update any external refs if any are set in ndx_node_matches\n update_external_refs!(ret, ndx_node_matches)\n ret.each do |r|\n if node_match = ndx_node_matches[r[:display_name]]\n r[:node_template_id] = node_match.mapping[:node_template_id]\n r.merge!(Aux.hash_subset(node_match.node, [:donot_clone, :target_refs_to_link, :target_refs_exist]))\n end\n end\n end\n ret\n end", "def possible_duplicates\n @duplicates = {}\n if last_name.present?\n last_name_duplicates = Person.where(last_name: last_name).where.not(id: id)\n last_name_duplicates.each do |duplicate|\n duplicate_hash = {}\n duplicate_hash['person'] = duplicate\n duplicate_hash['match_count'] = 1\n duplicate_hash['last_name_match'] = true\n duplicate_hash['matches_on'] = ['Last Name']\n @duplicates[duplicate.id] = duplicate_hash\n end\n end\n if email_address.present?\n email_address_duplicates = Person.where(email_address: email_address).where.not(id: id)\n email_address_duplicates.each do |duplicate|\n if @duplicates.key? duplicate.id\n @duplicates[duplicate.id]['match_count'] += 1\n @duplicates[duplicate.id]['matches_on'].push('Email Address')\n else\n @duplicates[duplicate.id] = {}\n @duplicates[duplicate.id]['person'] = duplicate\n @duplicates[duplicate.id]['match_count'] = 1\n @duplicates[duplicate.id]['matches_on'] = ['Email Address']\n end\n @duplicates[duplicate.id]['email_address_match'] = true\n end\n end\n if phone_number.present?\n phone_number_duplicates = Person.where(phone_number: phone_number).where.not(id: id)\n phone_number_duplicates.each do |duplicate|\n if @duplicates.key? duplicate.id\n @duplicates[duplicate.id]['match_count'] += 1\n @duplicates[duplicate.id]['matches_on'].push('Phone Number')\n else\n @duplicates[duplicate.id] = {}\n @duplicates[duplicate.id]['person'] = duplicate\n @duplicates[duplicate.id]['match_count'] = 1\n @duplicates[duplicate.id]['matches_on'] = ['Phone Number']\n end\n @duplicates[duplicate.id]['phone_number_match'] = true\n end\n end\n if address_1.present?\n address_1_duplicates = Person.where(address_1: address_1).where.not(id: id)\n address_1_duplicates.each do |duplicate|\n if @duplicates.key? duplicate.id\n @duplicates[duplicate.id]['match_count'] += 1\n @duplicates[duplicate.id]['matches_on'].push('Address_1')\n else\n @duplicates[duplicate.id] = {}\n @duplicates[duplicate.id]['person'] = duplicate\n @duplicates[duplicate.id]['match_count'] = 1\n @duplicates[duplicate.id]['matches_on'] = ['Address_1']\n end\n @duplicates[duplicate.id]['address_1_match'] = true\n end\n end\n @duplicates\n end", "def update_site_data_catalog_variables(variables)\n variables.each do |var|\n #entry = Voeis::SiteDataCatalog.first_or_create(:site_id => self.id, :variable_id => var.id)\n #sql = \"SELECT data_value_id FROM voeis_data_value_variables WHERE variable_id = #{var.id} INTERSECT SELECT data_value_id FROM voeis_data_value_sites WHERE site_id = #{self.id}\"\n sql = \"SELECT id FROM voeis_data_values WHERE variable_id = #{var.id} AND site_id = #{self.id}\"\n results = repository.adapter.select(sql)\n if results.length > 0\n entry = Voeis::SiteDataCatalog.first_or_create(:site_id => self.id, :variable_id => var.id)\n entry.record_number = results.length\n sql = \"SELECT * FROM voeis_data_values WHERE id IN #{results.to_s.gsub('[','(').gsub(']',')')} ORDER BY local_date_time\"\n dresults = repository.adapter.select(sql)\n entry.starting_timestamp = dresults.first[:local_date_time]#(var.data_values & self.data_values).first(:order=>[:local_date_time]).local_date_time\n entry.ending_timestamp = dresults.last[:local_date_time] #(var.data_values & self.data_values).last(:order=>[:local_date_time]).local_date_time\n entry.valid?\n puts entry.errors.inspect()\n entry.save!\n end\n end #end each\n end", "def match_node_to_res(node, i)\n n = Marshal.load(Marshal.dump(@base_node))\n puts \"new node: '#{n.inspect}'\"\n n[:name] = node['hostname']\n n[:hostname] = node['hostname']\n n[:urn] = \"#{BASE_URN}#{node['hostname']}\"\n n[:interfaces_attributes].each do |interface|\n if interface[:role] == \"control\"\n interface[:name] = node['hostname'] + \":if0\"\n interface[:ips_attributes].first[:address] = node['control_ip']\n interface[:mac] = node['control_mac']\n elsif interface[:role] == \"experimental\"\n interface[:name] = node['hostname'] + \":if1\"\n interface[:mac] = calc_mac(node['control_mac'])\n end\n end\n n[:cmc_attributes][:name] = node['hostname'] + \":cm\"\n n[:cmc_attributes][:mac] = calc_cmc_mac(n[:cmc_attributes][:mac], i)\n n[:cmc_attributes][:ip_attributes][:address] = calc_cmc_ip(node['control_ip'])\n\n if i >= 2 && i <= 9\n domain = OUTDOOR_DOMAIN\n additional_info = Marshal.load(Marshal.dump(@orbit_info))\n elsif i == 1 || i == 10 || (i >= 14 && i <= 35)\n domain = OUTDOOR_DOMAIN\n additional_info = Marshal.load(Marshal.dump(@grid_info))\n elsif i >= 36 && i <= 40\n domain = OFFICE_DOMAIN\n additional_info = Marshal.load(Marshal.dump(@diskless_info))\n elsif i >= 41 && i <= 49\n domain = OFFICE_DOMAIN\n additional_info = Marshal.load(Marshal.dump(@icarus_info))\n elsif i >= 50 && i <= 85\n domain = INDOOR_DOMAIN\n additional_info = Marshal.load(Marshal.dump(@icarus_info)) \n else\n return n\n end\n n[:domain] = domain\n n[:urn] = n[:urn].sub('omf:nitos', domain)\n n.merge!(additional_info)\n n\nend", "def computeResourceUpdate(app_id, run_id, rid)\n resources = nutella.f.persist.get_run_mongo_object_store(app_id, run_id, 'resources')\n\n resource = resources[rid]\n\n if resource != nil\n if resource['proximity'] != nil\n if resource['proximity']['rid'] != nil\n baseStation = resources[resource['proximity']['rid']]\n\n if baseStation != nil && baseStation['continuous'] != nil\n resource['proximity']['continuous'] = baseStation['continuous']\n\n # Update basic station\n computeResourceUpdate(app_id, run_id, resource['proximity']['rid'])\n end\n\n if baseStation != nil && baseStation['discrete'] != nil\n resource['proximity']['discrete'] = baseStation['discrete']\n end\n end\n end\n\n resources[rid] = resource\n\n=begin\n if resource['continuous'] != nil\n counter = 0 # Number of proximity beacons tracked from this station\n for r in $resources.keys()\n resource2 = $resources[r]\n if resource2['proximity'] != nil && resource2['proximity']['rid'] == resource['rid']\n counter += 1\n resource2['proximity']['continuous'] = resource['continuous']\n $resources[r] = resource2\n publishResourceUpdate(resource2)\n end\n end\n puts counter\n resource['number_resources'] = counter\n end\n=end\n\n # Translate discrete coordinate\n if resource['discrete'] != nil\n resource['discrete'] = translateDiscreteCoordinates(app_id, run_id, resource['discrete'])\n end\n\n # Send update\n publishResourceUpdate(app_id, run_id, resource)\n\n end\nend", "def collect_records(matches, new_collection)\n matches.each do |match|\n next if new_collection.matching_records('record_id', match.record_id).length > 1\n new_collection.push(match)\n end\n end", "def find_nodes_to_verify(references)\n nodes = {}\n\n references.each do |uri, _digest_value|\n uri = uri.sub(/^#/, '')\n node = find_node_by_uri(uri)\n\n nodes[uri] = calculate_digest(node)\n end\n\n nodes\n end", "def set(recs, data)\r\n # If updates are not in the form of a Proc, convert updates, which\r\n # could be an array, a hash, or a Struct into a common format (i.e.\r\n # hash).\r\n update_rec = convert_input_data(data) unless data.is_a?(Proc)\r\n\r\n updated_recs = []\r\n\r\n # For each one of the recs that matched the update query, apply the\r\n # updates to it and write it back to the database table.\r\n recs.each do |rec|\r\n temp_rec = rec.dup\r\n\r\n if data.is_a?(Proc)\r\n begin\r\n data.call(temp_rec)\r\n rescue NoMethodError\r\n raise 'Invalid field name in code block: %s' % $!\r\n end\r\n else\r\n @field_names.each { |fn| temp_rec[fn] = update_rec.fetch(fn,\r\n temp_rec.send(fn)) }\r\n end\r\n\r\n # Is the user trying to change something they shouldn't?\r\n raise 'Cannot update recno field!' unless \\\r\n rec.recno == temp_rec.recno\r\n raise 'Cannot update internal fpos field!' unless \\\r\n rec.fpos == temp_rec.fpos\r\n raise 'Cannot update internal line_length field!' unless \\\r\n rec.line_length == temp_rec.line_length\r\n\r\n # Are the data types of the updates correct?\r\n validate_input(temp_rec)\r\n\r\n check_required_fields(temp_rec)\r\n\r\n check_against_input_for_specials(temp_rec)\r\n\r\n # Apply updates to the record and add it to an array holding\r\n # updated records. We need the fpos and line_length because\r\n # the engine will use them to determine where to write the\r\n # update and whether the updated record will fit in the old\r\n # record's spot.\r\n updated_recs << { :rec => @field_names.zip(@field_types\r\n ).collect { |fn, ft| convert_to_encoded_string(ft,\r\n temp_rec.send(fn)) }, :fpos => rec.fpos,\r\n :line_length => rec.line_length }\r\n \r\n\r\n # Update any associated blob/memo fields.\r\n temp_rec.each { |r| r.write_to_file if r.is_a?(KBMemo) } if \\\r\n @field_types.include?(:Memo)\r\n temp_rec.each { |r| r.write_to_file if r.is_a?(KBBlob) } if \\\r\n @field_types.include?(:Blob)\r\n end\r\n\r\n # Take all of the update records and write them back out to the\r\n # table's file.\r\n @db.engine.update_records(self, updated_recs)\r\n\r\n # Return the number of records updated.\r\n return recs.size\r\n end", "def transform_hosts(hosts)\n require 'time'\n\n node_data = []\n\n hosts.each do |host|\n if host[:report_timestamp].nil?\n # This can happen in weird cases. Mark as an expired node, so\n # the expired logic doesn't try to do math on a nil timestamp.\n last_checkin = nil\n formatted_checkin = 'N/A'\n host[:expired] = nil\n else\n last_checkin = Time.now - Time.parse(host[:report_timestamp])\n formatted_checkin = sprintf(\"%#{@options.round_to}f\",(last_checkin * @options.divisor).abs)\n end\n node_data << {\n :last_checkin => last_checkin,\n :expired => host[:expired].nil? ? false : host[:expired],\n :certname => host[:certname],\n :environment => host[:report_environment].nil? ? 'N/A' : host[:report_environment],\n :status => host[:latest_report_status].nil? ? 'N/A' : host[:latest_report_status],\n :formatted_checkin => formatted_checkin\n }\n end\n\n unless @options.environments.empty?\n node_data.delete_if {|node| not @options.environments.include? node[:environment] }\n end\n unless @options.statuses.empty?\n node_data.delete_if {|node| not @options.statuses.include? node[:status] }\n end\n\n node_data\n end", "def serialize_data(records)\n records.map { |r|\n data_adapter.record_to_hash(r, final_columns(:with_meta => true)).tap { |h|\n\n config[:extra_fields].each do |f|\n name = f[:name].underscore.to_sym\n h[name] = send(\"#{name}#{f[:type] == 'boolean' ? '?' : ''}\", r)\n end\n\n inline_children = get_inline_children(r)\n h[:data] = serialize_data(inline_children) unless inline_children.nil?\n h\n }\n }\n end", "def deep_update(*other_hashes, &blk); end", "def compute_checksums data\n\n digest = OpenSSL::Digest::Digest.new('sha256')\n tree_digest = OpenSSL::Digest::Digest.new('sha256')\n tree_parts = []\n\n until data.eof?\n\n chunk = data.read(1024 * 1024) # read 1MB\n tree_parts << tree_digest.update(chunk).digest\n tree_digest.reset\n\n digest.update(chunk)\n\n end\n\n data.rewind\n\n [digest.to_s, compute_tree_hash(tree_parts)]\n\n end", "def deepCompare(name, hash1, hash2, failFast, errors=0)\n\t\t\tif hash1.is_a?(Hash)\n\t\t\t\tif hash2.nil?\n\t\t\t\t\tlogger(1,\"DeepCompare: #{name}, No Match '#{hash1}' vs 'nil'\")\n\t\t\t\t\[email protected](\"DeepCompare: #{name}, No Match '#{hash1}' vs 'nil'\")\n\t\t\t\t\tfailFast ? (return false) : (return errors+1)\n\t\t\t\tend\n\t\t\t\thash1.each do |key,value|\n\t\t\t\t\tif ( hash2.include?(key) )\n\t\t\t\t\t\tresult = deepCompare(\"#{name}:#{key}\", value, hash2[key], failFast, errors)\n\t\t\t\t\t\t( failFast and (!result) ) ? (return false) : errors = result\n\t\t\t\t\telse\n\t\t\t\t\t\tlogger(1,\"DeepCompare: #{name}, Missing Key '#{key}'\")\n\t\t\t\t\t\[email protected](\"DeepCompare: #{name}, Missing Key '#{key}'\")\n\t\t\t\t\t\tfailFast ? (return false) : (return errors+1)\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\tfailFast ? (return true) : (return errors)\n\t\t\telsif hash1.is_a?(Array)\n\t\t\t\t# Check array lengths match\n\t\t\t\tif hash1.length != hash2.length\n\t\t\t\t\tlogger(1, \"DeepCompare: #{name}, Size differs: #{hash1.length} vs #{hash2.length}\")\n\t\t\t\t\[email protected](\"DeepCompare: #{name}, Size differs: #{hash1.length} vs #{hash2.length}\")\n\t\t\t\t\tfailFast ? (return false) : (return errors+1)\n\t\t\t\tend\n\t\t\t\t# sort arrays before comparing\n\t\t\t\tsort1 = hash1.sort_by { |h| h.to_s }\n\t\t\t\tsort2 = hash2.sort_by { |h| h.to_s }\n\t\t\t\tsort1.each do |item1|\n\t\t\t\t\titem2 = sort2.shift\n\t\t\t\t\tresult = deepCompare(\"#{name}:#{item1}\", item1, item2, failFast, errors)\n\t\t\t\t\t( failFast and (!result) ) ? ( return false ) : errors = result\n\t\t\t\tend\t\n\t\t\t\tfailFast ? ( return true ) : ( return errors )\n\t\t\telse\n\t\t\t\tif ( hash1 != hash2 )\n\t\t\t\t\tlogger(1,\"DeepCompare: #{name}, No Match '#{hash1}' vs '#{hash2}'\")\n\t\t\t\t\[email protected](\"DeepCompare: #{name}, No Match: #{hash1} vs #{hash2}\")\n\t\t\t\t\tfailFast ? (return false) : (return errors+1)\n\t\t\t\tend\n\t\t\tend\n\t\t\tfailFast ? (return true) : (return errors)\n\t\tend", "def update_if_necessary(new_contact_record)\n matched = []\n updated_details = []\n self.first_name = new_contact_record.first_name\n self.last_name = new_contact_record.last_name\n \n new_contact_record.details.each do |ncd|\n details.each do |ocd|\n matched << [ocd,ncd] and break if ocd.matches?(ncd)\n end\n end\n # Update the matched records if necessary\n matched.each do |ocd,ncd|\n if not ocd.update_if_necessary(ncd).empty?\n updated_details << ocd\n end\n end\n # Also add the new contact details that we are adding\n # We need to dup to make sure we don't overwrite the original\n \n (new_contact_record.details - matched.column(1)).each do |ncd| \n self.details << ncd.dup\n updated_details << self.details.last\n end \n updated_details\n end", "def dns_check\n gen_host_records # These are the hosts we have\n load_all_subnets # These are the DNS entries\n \n # We want a standard layout, with the hypervisor API entries being \n @host_record.each do |hr| # Array of host record Hash's\n hn = hr[:hostname]\n shn = hn.split('.',2)[0] # Remove the domain\n forward_hr = @forward_host_record[hn] # Find Host Record\n if forward_hr.nil?\n # We have no IPAM entry for this hostname\n if (rhr = @reverse_host_records[hr[:ip]])\n puts \"Only Reverse IPAM entry for #{shn}: #{rhr}\"\n @infoblox.create_host_record(ip_address: hr[:ip], hostname: hn, aliases: hr[:aliases])\n else\n puts \"No IPAM entry for hostrecord: #{hr}\"\n @infoblox.create_host_record(ip_address: hr[:ip], hostname: hn, aliases: hr[:aliases])\n end\n else\n # We have an IPAM record for this hostname\n if forward_hr[:ip] != hr[:ip]\n puts \"IP mismatch #{shn} #{hr[:ip]} != #{forward_hr[:ip]} for IPAM: #{forward_hr}\"\n elsif forward_hr[:hostname] != hn\n # Reference must be via ALIASES or CNAMES\n if forward_hr[:aliases].include?(shn)\n puts \"Hostname #{shn} is an ALIAS. IPAM: #{forward_hr}\"\n elsif forward_hr[:cnames].include?(hn)\n puts \"Hostname #{shn} is a CNAME. IPAM: #{forward_hr}\"\n end\n end\n end\n end\n \n # We want to find IPAM entries, not matching existing @host_record entries\n @reverse_host_records.each do |ip, ahr| # Hash to array of host records from IPAM, indexed by IP\n ahr.each do |hr| # One IP can have multiple host records, with associated ALIAS and CNAME records\n local_hr = @host_record_index[hr[:hostname]]\n if local_hr.nil?\n puts \"No local entry #{hr[:hostname]} for #{hr}\"\n end\n end\n end\nend", "def for_top_level_records(records)\n result = {}\n\n records.each do |record|\n result[record] = repository_published?(record[:repository_id]) && record.publish == 1 && record.suppressed == 0\n end\n\n result\n end", "def node_match?(other_node, self_node)\n keys_with_expected_diffs = ['facts_timestamp', 'catalog_timestamp']\n same_num_elements?(other_node, self_node) && same_contents?(other_node, self_node, keys_with_expected_diffs)\n end", "def get_and_update_node_state!(node, attribute_names)\n ret = {}\n external_ref = node[:external_ref]\n external_ref_changed = false\n attribute_names.each do |attribute_name|\n case attribute_name\n when :host_addresses_ipv4\n external_ref_changed = true if NodeState.update_host_addresses_ipv4!(ret, external_ref, node) \n when :fqdn\n external_ref_changed = true if NodeState.update_fqdn!(ret, external_ref, node) \n else\n Log.error(\"Not treating update of BOSH node attribute '#{attribute_name}'\")\n end\n end\n node.update(external_ref: external_ref) if external_ref_changed\n ret \n end", "def node_info(feed_ids, last_updated)\n node_info_path = '/var/puppetlabs/data/node_info/validated'\n node_info = {}\n\n lookup_ids = feed_ids.map { |f| \"#{node_info_path}/#{f}~*\" }\n node_info_files = Dir.glob(lookup_ids)\n return node_info if node_info_files.empty?\n\n if last_updated\n agent_last_updated = Time.parse(last_updated)\n source_last_updated = node_info_files.map { |f| [File.mtime(f)] }.sort.last[0]\n return node_info if agent_last_updated > source_last_updated\n end\n\n node_filename_pattern = %r{^(?<filename>[^~]+)~(?<feed_type>[^.]+).(?<file_type>\\w+)}\n node_info_files.each do |node_filename|\n file_data = node_filename_pattern.match(node_filename)\n unless file_data\n Puppet.warning \"node_info: #{node_filename} invalid filename pattern\"\n next\n end\n begin\n if file_data[:file_type] == 'json'\n file_data_hash = JSON.parse(File.read(node_filename))\n elsif file_data[:file_type] == 'yaml'\n file_data_hash = YAML.safe_load(File.read(node_filename))\n end\n rescue Exception => e # rubocop:disable Lint/RescueException\n Puppet.warning \"node_info: unable to parse data #{e}\"\n file_data_hash = {}\n end\n node_info[file_data[:feed_type]] = file_data_hash\n end\n node_info\n end", "def map_nodes_for_events(events_diff, handlers)\n nodes_events = {}\n events_diff.each do |object, events|\n events.each do |event, nodes|\n handlers.keys.each do |ip|\n if nodes.include?(ip)\n nodes_events[ip] ||= {}\n nodes_events[ip][object] ||= []\n nodes_events[ip][object].push(event)\n break\n end\n end\n end\n end\n nodes_events\n end", "def matching_data_files params_only=false\n \n results = {}\n\n if Seek::Config.solr_enabled && is_jws_supported?\n search_terms = parameters_and_values.keys\n unless params_only\n search_terms = search_terms | species | searchable_tags | organism_terms\n end\n #make the array uniq! case-insensistive whilst mainting the original case\n dc = []\n search_terms = search_terms.inject([]) do |r,v|\n unless dc.include?(v.downcase)\n r << v\n dc << v.downcase\n end\n r\n end\n\n fields = [:fs_search_fields, :spreadsheet_contents_for_search,:spreadsheet_annotation_search_fields, :searchable_tags]\n\n search_terms.each do |key|\n DataFile.search do |query|\n query.keywords key, :fields=>fields\n end.hits.each do |hit|\n unless hit.score.nil?\n results[hit.primary_key]||=DataFileMatchResult.new([],0,hit.primary_key)\n results[hit.primary_key].search_terms << key\n results[hit.primary_key].score += (0.75 + hit.score)\n end\n end\n end\n end\n\n results.values.sort_by{|a| -a.score}\n end", "def compute_changed_and_risk_files(params)\n commit_hash, file_arr = commit_info(params)\n changed_file_freq = file_arr.flatten!.each_with_object(Hash.new(0)) {|file, freq_acc| freq_acc[file] += 1}\n changed_g2_files = []\n changed_file_freq.select {|file, freq| changed_g2_files << file if freq > 2}\n risk_files = changed_g2_files.dup\n rf = risk_files.each_with_object({}) do |file, acc|\n author_set = Set.new\n commit_hash.each do |file_arr, author|\n acc[file] = (author_set << author ) if file_arr.include? (file)\n end\n end\n rf.delete_if {|_file, author_arr| author_arr.length < 2}\n {\n \"changed_files\" => changed_g2_files,\n \"risk_files\" => rf\n }\n end", "def cert_needs_updating?(node)\n if !file_exists?([:node_x509_cert, node.name], [:node_x509_key, node.name])\n return true\n else\n cert = load_certificate_file([:node_x509_cert, node.name])\n if cert.not_after < Time.now.advance(:months => 2)\n log :updating, \"cert for node '#{node.name}' because it will expire soon\"\n return true\n end\n if cert.subject.common_name != node.domain.full\n log :updating, \"cert for node '#{node.name}' because domain.full has changed (was #{cert.subject.common_name}, now #{node.domain.full})\"\n return true\n end\n cert.openssl_body.extensions.each do |ext|\n if ext.oid == \"subjectAltName\"\n ips = []\n dns_names = []\n ext.value.split(\",\").each do |value|\n value.strip!\n ips << $1 if value =~ /^IP Address:(.*)$/\n dns_names << $1 if value =~ /^DNS:(.*)$/\n end\n dns_names.sort!\n if ips.first != node.ip_address\n log :updating, \"cert for node '#{node.name}' because ip_address has changed (from #{ips.first} to #{node.ip_address})\"\n return true\n elsif dns_names != dns_names_for_node(node)\n log :updating, \"cert for node '#{node.name}' because domain name aliases have changed\\n from: #{dns_names.inspect}\\n to: #{dns_names_for_node(node).inspect})\"\n return true\n end\n end\n end\n end\n return false\n end", "def reduce(nodes)\n for node in nodes\n if node.class==ActiveSupport::HashWithIndifferentAccess && node['data']\n data_array = Array.new\n node['data'].map{|element| data_array << element[1]}\n node['data'] = data_array\n end\n\n if node.class==Array || node.class == ActiveSupport::HashWithIndifferentAccess\n reduce (node)\n end\n end\n end", "def hash_el_when_match(node, target_ref, extra_fields = {})\n NodeMatch.hash__when_match(self, node, target_ref, extra_fields)\n end", "def save_records\n return error_response unless is_save_records_request?\n if params[:changes]\n params[:changes].each_value do |changed|\n fields = changed.keys\n record = @record_class.find(changed[fields.delete('recid')])\n if record.present?\n fields.each do |field|\n table, column = get_table_and_column field\n obj = if table == @table_name\n record\n elsif record.respond_to?(table.to_sym)\n record.send(\"#{table}\")\n end\n obj.send(\"#{column}=\", changed[field]) if obj and obj.respond_to?(column.to_sym)\n end\n return error_response unless record.save\n end\n end\n end\n succes_response\n end", "def resolve(records, lookup_chain, domain)\n # check if key exists\n count = 0\n\n for keyHash in records[:ADDRESS]\n if keyHash.has_key? domain.to_sym\n count=1\n end\n end\n\n for keyHash in records[:CNAME]\n if keyHash.has_key? domain.to_sym\n count=1\n end\n end\n\n if count == 0\n puts \"Error: record not found for #{domain}\"\n exit\n end\n\n # flag to exit when it enters ADDRESS key\n flag = 0\n\n # Access the Keys in ADDRESS key from records\n for keyHash in records[:ADDRESS]\n if keyHash.has_key? domain.to_sym\n lookup_chain.push(keyHash[domain.to_sym][0])\n flag = 1\n break\n end\n end\n\n\n if flag == 0\n\n for keyHash in records[:CNAME]\n if keyHash.has_key? domain.to_sym\n lookup_chain.push(keyHash[domain.to_sym][0])\n resolve(records, lookup_chain, keyHash[domain.to_sym][0])\n else\n flag = 1\n #Recursion till it enters :ADDRESS key\n resolve(records, lookup_chain, keyHash[domain.to_sym][0])\n break\n end\n end\n if lookup_chain.length == 1\n puts \"Error: record not found for #{lookup_chain}\"\n else\n return lookup_chain\n end\n else\n if lookup_chain.length == 1\n puts \"Error: record not found for #{lookup_chain}\"\n else\n return lookup_chain\n end\n end\nend", "def compute_changes\n @old_spec_set.each do |gem_specs|\n unless ( old_version = gem_specs.version ) == ( new_version = @new_spec_set[ gem_specs.name ].first.version )\n @changes[ gem_specs.name ] = { versions: { old: old_version.to_s, new: new_version.to_s } }\n end\n end\n end", "def diff_digests(old_products, new_products)\n # create maps of product sets\n old_map, new_map = [old_products, new_products].map do |products|\n return [] unless products\n Hash[*products.flat_map { |d| [d.product, d] }]\n end\n\n build_diff(new_map, old_map)\n end", "def iterateThroughRecordsAndWriteQueries(masterRecords, testPatients)\n\n\t\t#will store the results\n\t\ttestToMasterHash= Hash.new\n\n\t\tfor testRecord in testPatients\n\t\t\t#iterate through master records and find candidates \n\t\t\tprint \".\"\n\t\t\tcandidates= Utils::iterateThroughMasterRecordsAndFindCandidates(masterRecords, testRecord.patient)\n\t\t\t#resolve cadidates to unique MRNs\n\t\t\tUtils::getUniqueMedicalRecordNumber(candidates,testRecord,testToMasterHash)\n\t\tend\n\n\t\t#print results\n\t\tputs \"\"\n\t\ttestToMasterHash.each_pair do |key, value|\n\t\t\tputs \"#{key}\\t\\t\\t#{value}\"\n\t\tend\n\tend", "def update_found(node)\n old_vals = node.workorder.rfcCi.ciBaseAttributes\n new_vals = node.workorder.rfcCi.ciAttributes\n new_vals.keys.each do |k|\n if old_vals.has_key?(k) && \n old_vals[k] != new_vals[k]\n Chef::Log.info(\"changed: old #{k}:#{old_vals[k]} != new #{k}:#{new_vals[k]}\")\n return true\n end\n end\n return false\n end", "def calculate_hash!\n prefix = PREFIX_NAME_LOOKUP[self.type]\n # add special cases for refs\n self.hash_id = NodeId.sha1(\"#{prefix} #{self.size}\\0#{self.content}\")\n end", "def prepare_data(results_hash)\n\n @entities_store = {}\n @generic_relations_store = {}\n @events_store = {}\n # find all Entities in response\n @entities_store = results_hash.select{|key, hash| hash[\"_typeGroup\"] == \"entities\"}\n # find all GenericRelations in response\n @generic_relations_store = results_hash.select{|key, hash| hash[\"_typeGroup\"] == \"relations\" &&\n hash[\"_type\"] == \"GenericRelations\"}\n # find all Events in response\n @events_store = results_hash.select{|key, hash| hash[\"_typeGroup\"] == \"relations\" &&\n hash[\"_type\"] != \"GenericRelations\"}\n\n Ohm.redis = Redic.new(REDIS)\n\n\n #for each Entity find all related Relations and Events and store them to Ohm/Redis\n @entities_store.each_pair do |k, v|\n\n entity_set = EntityModel.find(calais_id: k)\n\n if entity_set.size > 0 #entity already exists in store\n entity = entity_set.first\n k = entity.calais_id\n else #entity doesn't exist in store\n entity = EntityModel.create(:name => v['name'], :type => v['_type'], :calais_id => k)\n entity.save\n end #if\n\n\n #get all referenced relations\n find_in_relations(k).each do |obj|\n\n found_rel = get_relation(obj[0])\n if found_rel\n\n found_rel.subject = convert_to_hash(found_rel.subject)\n found_rel.object = convert_to_hash(found_rel.object)\n\n relation = EntityModel::RelationModel.create(:subject => found_rel.subject,\n :object => found_rel.object,\n :verb => found_rel.verb,\n :detection => found_rel.detection,\n :calais_id => obj[0])\n entity.relations.add(relation)\n end #if\n end #each\n #get all referenced events\n find_in_events(k).each do |obj|\n found_event = get_event(obj[0])\n attribs = {}\n if found_event\n\n found_event.each_pair do |key, val|\n\n key = key.to_s.slice(1, key.length-1)\n attribs[key] = val\n\n end #block\n\n event = EntityModel::EventModel.create(:calais_id => obj[0], :info_hash => attribs)\n entity.events.add(event)\n\n end #if\n\n end #each\n end #each_pair\n end", "def jenkins_node_compare(current_node, new_node)\n new_node = jenkins_node_defaults(new_node)\n default = jenkins_node_defaults({})\n default.keys.each do |key|\n val = new_node[key] || default[key]\n if !val.nil? && current_node[key.to_s] != val\n Chef::Log::debug(\"#{new_node[:name]} node.#{key} changed (#{current_node[key.to_s]} != #{val})\")\n return true\n end\n end\n Chef::Log::debug(\"#{new_node[:name]} node unchanged\")\n false\nend", "def merge_data(old_data)\n old_data.each do |old_meta,old_stats|\n if stats = metric_hash[old_meta]\n metric_hash[old_meta] = stats.combine!(old_stats)\n else\n metric_hash[old_meta] = old_stats\n end\n end\n metric_hash\n end", "def recalculate_hash_at(node)\n return node._hash = node.value if node.value\n recalculate_hash_at(node.left) if node.left\n recalculate_hash_at(node.right) if node.right\n node._hash = self.class.hash_children(*node_subhashes(node))\n end", "def match_sales_levels\n nodes = {}\n nodes.merge!(match_sl3)\n nodes.merge!(match_sl4)\n nodes.merge!(match_sl5)\n nodes.merge!(match_sl6)\n nodes\n end", "def rehash() end", "def compare_nodes(a, b) # FIXME: WTF?\n a = a[1]\n b = b[1]\n type_a = 0\n type_b = 0\n type_a = 1 if a.key?('{DAV:}resourcetype') && a['{DAV:}resourcetype'].value.include?('{DAV:}collection')\n type_b = 1 if b.key?('{DAV:}resourcetype') && b['{DAV:}resourcetype'].value.include?('{DAV:}collection')\n\n # If same type, sort alphabetically by filename:\n return (a['displayPath'] <=> b['displayPath']) if type_a == type_b\n ((type_a < type_b) ? 1 : -1)\n end", "def search_results_hashes(match)\n case match\n when 'pending-delete not available to purge'\n search_results_pending_delete_table.rows_text.map{ |row| Hash[* search_results_pending_delete_table.headers_text.zip(row).flatten] } unless search_results_pending_delete_table.text.include?(\"No results found\")\n when 'pending-delete available to purge'\n search_results_available_to_purge_table.rows_text.map{ |row| Hash[*search_results_available_to_purge_table.headers_text.zip(row).flatten] } unless search_results_available_to_purge_table.text.include?(\"No results found\")\n when 'who have been purged'\n search_results_purged_table.rows_text.map{ |row| Hash[*search_results_purged_table.headers_text.zip(row).flatten] } unless search_results_purged_table.text.include?(\"No results found\")\n end\n end", "def eval_update_hash(node)\n\t\tconfig = lib('requirements')\n \n update_hash = {status: \"Ok\"}\n update_hash[\"selenium-server.jar\"] = config[\"selenium\"][\"server\"]\n update_hash[\"e3s-proxy.jar\"] = config[\"selenium\"][\"e3s_proxy\"]\n update_hash[\"rabbitmq-client.jar\"] = config[\"rabbitmq-java-client\"]\n update_hash[\"IEDriverServer.exe\"] = config[\"ie_driver_server\"][\"#{node['bit']}bit\"] if has_driver?(node, \"internet explorer\")\n \n update_hash\n\tend", "def make_into_replicas(records)\n begin # Basic safety net, avoiding trouble\n safe_records = []\n records.each do |r|\n case\n when r.kind == 'Group',\n r.kind == 'Smart Group'\n @unify_url_log.warn \"Group with URL: '#{r.name}' (#{r.kind}). Not handled, since 'make_into_replicas' does not handle groups.\"\n # TODO Exclude records that are in Trash\n else # (Normal case)\n safe_records << r\n end\n end\n end\n records = safe_records\n return false if records.size == 0 # TODO Is this a reasonable result?\n\n # Remove items from records that are already replicas\n records = remove_replicas(records) # Note: This also getifies records, making them less prone\n # for bugs when removing stuff\n return true if records.size == 1 # Job done if array has only one item left\n\n master = records.pop\n\n begin # Safety net - will raise an error if the items are not reasonably similar\n # Needs to be the same: name, URL, comment\n # Can be different: Kind, Date, Size etc.\n safe_records = []\n records.each do |r|\n case\n when master.name != r.name, # Stuff that must be the same.\n master.URL != r.URL\n @unify_url_log.warn \"WARNING To dissimular to safely make into replicas\"\n when master.comment != r.comment\n @unify_url_log.warn \"WARNING Comments differ - '#{r.name}' at '#{r.location}' will not replicated since I fear to loose unique comments.\"\n else # Normal case\n safe_records << r\n end\n end\n records = safe_records\n end\n\n # Delete records and replace them with replicas of master\n while records.size > 0\n r = records.pop\n rparents = r.parents.get\n rparents = remove_replicas(r.parents) # (Also .get-ifys)\n rparents.each do |rparent| # Record must be replaced in all its locations\n @devonthink.replicateRecord_to_(master, rparent)\n @created_deleted_log.info \"Created: '#{master.name}' (#{master.kind})\"\n trash(r, rparent)\n # TODO Check that tags also are preserved\n end\n end\n\n end", "def parse_dns(dns_raw)\n dns = []\n dns_records = {}\n record_type_A = []\n record_type_A_IP = []\n record_type_CNAME = []\n record_type_CNAME_alias = []\n\n #adds each line to dns array and splipt them with \",\"\n dns_raw.each do |lines_in_files|\n dns.push([lines_in_files.split(\",\")])\n end\n\n #Checks for recordA,IP or recordCNAME and adds them to the respected array\n dns.each do |words_in_files|\n if words_in_files[0][0] == \"A\"\n record_type_A.push(words_in_files[0][1].strip)\n record_type_A_IP.push(words_in_files[0][2].strip)\n elsif words_in_files[0][0] == \"CNAME\"\n record_type_CNAME.push(words_in_files[0][1].strip)\n record_type_CNAME_alias.push(words_in_files[0][2].strip)\n end\n end\n\n #record_A hash stores values of recordA\n record_A = {\n :source => record_type_A,\n :ip => record_type_A_IP,\n }\n\n #recordCNAME hash stores values of recordCNAME\n record_CNAME = {\n :source => record_type_CNAME,\n :alias => record_type_CNAME_alias,\n }\n\n #dns_records gets both Hashes\n dns_records = {\n :A => record_A,\n :CNAME => record_CNAME,\n }\n\n #returns record dns_record with two hashes.\n return dns_records\nend", "def findClosestMatch(records, templateName)\n # Build a placeholder to store the selected notification template\n selected_record = nil\n recipient_language = @recipient_json['language'].to_s\n recipient_region = @recipient_json['region'].to_s\n\n # Return an error if no notification template was found\n if records.length == 0\n @error_message = \"The following Notification Template or snippet was not located: #{templateName}\\n\\n\"\n # If only one template is returned, or the user has no preferences use the first match\n elsif records.length == 1 || (recipient_language.empty? && recipient_region.empty?)\n selected_record = records[0]['values']\n puts \"Only one record returned for #{templateName}, OR no preferences found so selected first\" if @debug_logging_enabled\n else\n # Select a template based on users preferences\n # Define an array of preferences for each record returned\n recordPreferences = records.map do |record|\n {\n 'id' => record['id'],\n 'language' => record['values']['Language'],\n 'region' => record['values']['Region'],\n 'score' => 0,\n }\n end\n # Loop over each record and try to match it\n recordPreferences.each do |record|\n language = record['language'].to_s\n region = record['region'].to_s\n # Test to see if both language and region match if neither are empty\n if recipient_language == language && recipient_region == region && (!recipient_region.empty? && !region.empty?) && (!recipient_language.empty? && !language.empty?)\n record['score'] += 3\n puts \"Matched on Language and Region for Template #{templateName}\" if @debug_logging_enabled\n # Test to see if a language matches if they are not empty\n elsif recipient_language == language && (!recipient_language.empty? && !language.empty?)\n record['score'] += 2\n puts \"Matched on Language only for Template #{templateName}\" if @debug_logging_enabled\n # Test to see if a region matches\n elsif recipient_region == region && (!recipient_region.empty? && !region.empty?)\n record['score'] += 1\n puts \"Matched on Region only for Template #{templateName}\" if @debug_logging_enabled\n end\n puts \"Score is #{record['score']} for Template #{templateName}\" if @debug_logging_enabled\n end\n\n # Determine which record should be choosen as the selected record\n closestMatch = recordPreferences.max_by { |element| element['score'] }\n # Get the ID so we can select this record. If multiple had the same score, choose the first\n closestMatch.kind_of?(Array) ? closestMatch = closestMatch[0]['id'] : closestMatch = closestMatch['id']\n # Set the selected record to be returned\n selected_record = records.find { |match| match['id'] == closestMatch }['values']\n end\n # Return the selected record\n return selected_record\n end", "def updateUC record\n for i in 0...record.valuesArr.length do\n for j in i+1...record.valuesArr.length do\n for k in [email protected] do\n if @pairs[k].firstParam == i and @pairs[k].secondParam == j\n for h in 0...@pairs[k].pairsArr.length do\n if @pairs[k].pairsArr[h].firstValue == record.valuesArr[i] and @pairs[k].pairsArr[h].secondValue == record.valuesArr[j]\n @parameters.paramsArr[i].elementsArr[record.valuesArr[i]].minusTimes\n @parameters.paramsArr[j].elementsArr[record.valuesArr[j]].minusTimes\n @pairs[k].pairsArr[h].setIsVisited \n end\n end\n end \n end\n end\n end\n end", "def same_record?(existing, incoming)\n\n return false if existing.nil? or incoming.nil?\n\n if !existing[\"version_number\"].nil? and !incoming[\"version_number\"].nil?\n\n if existing[\"version_number\"].strip == incoming[\"version_number\"].strip\n\n return true\n\n end\n\n end\n\n found = [false, false, false, false]\n\n if !existing[\"gender\"].nil? and !incoming[\"gender\"].nil?\n\n if existing[\"gender\"].strip.downcase == incoming[\"gender\"].strip.downcase\n\n found[0] = true\n\n end\n\n end\n\n if !existing[\"names\"][\"given_name_code\"].nil? and !incoming[\"names\"][\"given_name_code\"].nil?\n\n if existing[\"names\"][\"given_name_code\"].strip.downcase == incoming[\"names\"][\"given_name_code\"].strip.downcase\n\n if found[0]\n found[1] = true\n else\n found[0] = true\n end\n\n end\n\n end\n\n if !existing[\"names\"][\"family_name_code\"].nil? and !incoming[\"names\"][\"family_name_code\"].nil?\n\n if existing[\"names\"][\"family_name_code\"].strip.downcase == incoming[\"names\"][\"family_name_code\"].strip.downcase\n\n if found[0]\n found[1] = true\n elsif found[1]\n found[2] = true\n else\n found[0] = true\n end\n\n end\n\n end\n\n if !existing[\"birthdate\"].nil? and !incoming[\"birthdate\"].nil?\n\n if existing[\"birthdate\"].strip.downcase == incoming[\"birthdate\"].strip.downcase\n\n if found[0]\n found[1] = true\n elsif found[1]\n found[2] = true\n else\n found[0] = true\n end\n\n end\n\n end\n\n if !existing[\"patient\"].nil? and !incoming[\"patient\"].nil? and !existing[\"patient\"][\"identifiers\"].nil? and !incoming[\"patient\"][\"identifiers\"].nil?\n\n oldids = existing[\"patient\"][\"identifiers\"] rescue []\n\n newids = incoming[\"patient\"][\"identifiers\"] rescue []\n\n oldids.each do |id|\n\n break if found[3]\n\n newids.each do |nid|\n\n if id[id.keys[0]].strip == nid[nid.keys[0]].strip\n\n found[3] = true\n\n break\n\n end\n\n end\n\n end\n\n end\n\n return true if found == [true, true, true, true]\n\n return false\n\n end", "def for_tree_nodes(tree_nodes, check_root_record = true)\n return {} if tree_nodes.empty?\n assert_same_type!(tree_nodes)\n\n # E.g. ArchivalObject\n node_model = tree_nodes[0].class.node_model\n\n # E.g. Resource\n root_model = tree_nodes[0].class.root_model\n\n # Initialize our result map to true -- assuming \"published\" by default.\n result = Hash[tree_nodes.map {|node| [node, true]}]\n\n if check_root_record\n # If we're the top-level call, we'll check the repository and root\n # record's publication status. There's no point doing this at every\n # level of the tree, but do it up front to save some potential work.\n root_record_id_to_child = {}\n tree_nodes.each do |node|\n if repository_published?(node[:repository_id])\n root_record_id_to_child[node.root_record_id] ||= []\n root_record_id_to_child[node.root_record_id] << node\n else\n result[node] = false\n end\n end\n\n return result if root_record_id_to_child.empty?\n\n root_model\n .filter(:id => root_record_id_to_child.keys)\n .filter(Sequel.|({:publish => 0},\n {:suppressed => 1}))\n .select(:id)\n .each do |root_record|\n root_record_id_to_child.fetch(root_record.id).each do |node|\n result[node] = false\n end\n end\n end\n\n parent_id_to_child = {}\n tree_nodes.each do |node|\n if result[node] && node.publish == 1 && node.suppressed == 0\n # OK so far, but check the ancestors.\n if node.parent_id\n parent_id_to_child[node.parent_id] ||= []\n parent_id_to_child[node.parent_id] << node\n end\n else\n # Unpublished/suppressed. Nothing more to check.\n result[node] = false\n end\n end\n\n unless parent_id_to_child.empty?\n parent_ids = parent_id_to_child.keys\n parent_publication_status = for_tree_nodes(node_model.filter(:id => parent_ids)\n .select(:id, :parent_id, :root_record_id, :publish, :suppressed)\n .all,\n false)\n\n parent_publication_status.each do |parent, published|\n # If the parent was unpublished, that overrides our previous result.\n parent_id_to_child.fetch(parent.id).each do |node|\n result[node] &&= published\n end\n end\n end\n\n result\n end", "def merge_hasharray(array1, array2, commonfield)\n xref = {}\n array2.each { |hash| xref[hash[commonfield]] = hash }\n array1.each do |hash|\n next if xref[hash[commonfield]].empty?\n xref[hash[commonfield]].each_pair do |kk, vv|\n next if commonfield == kk\n hash[kk] = vv\n end\n end\nend", "def hashedxml(node)\n \n node.elements.map do |element|\n \n attributes = element.attributes.clone\n \n # Although attribute last_modified isn't used by rexle-diff it is \n # created by Dynarex whenever a record is created or updated. \n # This would of course cause the record to be flagged as changed even \n # when the element value itself hashn't changed.\n #\n %i(created last_modified).each {|x| attributes.delete x}\n x = element.elements.length > 0 ? '' : 0\n [element.name, attributes, element.text.to_s.strip, x].hash\n \n end\n end", "def update_hash\n nh = nil\n\n if is_branch != 0\n sha512 = OpenSSL::Digest::SHA512.new\n sha512 << HASH_+PREFIXES[:inner_node]\n hashes.each { |k,h|\n sha512 << v\n }\n nh = sha512.digest\n end\n\n return false if nh == self.hash\n self.hash = nh\n return true\n end", "def update_node_info_cache()\n @all_stats = []\n @nodes.each { |node|\n ip = node.private_ip\n acc = AppControllerClient.new(ip, @@secret)\n\n begin\n @all_stats << acc.get_stats()\n rescue FailedNodeException\n Djinn.log_warn(\"Failed to get status update from node at #{ip}, so \" +\n \"not adding it to our cached info.\")\n end\n }\n end", "def updates_from_data_key_documents(documents)\n documents.map do |doc|\n {\n update_one: {\n filter: { _id: doc[:_id] },\n update: {\n '$set' => {\n masterKey: doc[:masterKey],\n keyMaterial: doc[:keyMaterial]\n },\n '$currentDate' => { updateDate: true },\n },\n }\n }\n end\n end", "def compare_outputs(orig_plugin, verify_plugin, option)\n\t\t\t\tresults = [missed = [], mismatched = [], extra = []]\n\t\t\t\trs_plugin = get_node_attributes(verify_plugin, option)\n\t\t\t\tohai_plugin = get_node_attributes(orig_plugin, option)\n\t\t\t\tif ohai_plugin.nil?\n\t\t\t\t\tif rs_plugin.nil?\n\t\t\t\t\t\treturn results # both hashed are nil => identical\n\t\t\t\t\telse\n\t\t\t\t\t\treturn no_option = \"rightscale plugin has this attribute when original doesn't\"\n\t\t\t\t\tend\n\t\t\t\telse\n\t\t\t\t\tif rs_plugin.nil?\n\t\t\t\t\t\treturn no_option = \"rightscale plugin missing the attribute when original has this node\" \n\t\t\t\t\telse\t\n\t\t\t\t\t\tresults = get_differences(ohai_plugin, rs_plugin)\n\t\t\t\t\t\treturn results\n\t\t\t\t\tend\n\t\t\t\tend\n\n\t\t\tend", "def records=(records)\n clear\n concat(records)\n end", "def assert_nodes_are_equal(a, b)\n assert_equal a.id, b.id, \"node IDs\"\n assert_equal a.latitude, b.latitude, \"latitude on node #{a.id}\"\n assert_equal a.longitude, b.longitude, \"longitude on node #{a.id}\"\n assert_equal a.changeset_id, b.changeset_id, \"changeset ID on node #{a.id}\"\n assert_equal a.visible, b.visible, \"visible on node #{a.id}\"\n assert_equal a.version, b.version, \"version on node #{a.id}\"\n assert_equal a.tags, b.tags, \"tags on node #{a.id}\"\n end", "def assert_nodes_are_equal(a, b)\n assert_equal a.id, b.id, \"node IDs\"\n assert_equal a.latitude, b.latitude, \"latitude on node #{a.id}\"\n assert_equal a.longitude, b.longitude, \"longitude on node #{a.id}\"\n assert_equal a.changeset_id, b.changeset_id, \"changeset ID on node #{a.id}\"\n assert_equal a.visible, b.visible, \"visible on node #{a.id}\"\n assert_equal a.version, b.version, \"version on node #{a.id}\"\n assert_equal a.tags, b.tags, \"tags on node #{a.id}\"\n end", "def hash_record! ht, rec, duplicate_type\n case duplicate_type\n when 1 # whole header match\n unless ht.has_key? rec.header\n ht[rec.header] = rec\n end\n when 2 # header ID match\n unless ht.has_key? rec.id\n ht[rec.id] = rec\n end\n when 3 # whole seq match\n unless ht.has_key? rec.seq\n ht[rec.seq] = rec\n end\n when 4 # whole seq + whole header\n key = \"#{rec.header}#{rec.seq}\"\n unless ht.has_key? key\n ht[key] = rec\n end\n when 5 # whole seq + hedaer ID\n key = \"#{rec.id}#{rec.seq}\"\n unless ht.has_key? key\n ht[key] = rec\n end\n end\nend", "def updates\n @report = {}\n Server.find_each do |server|\n # Go through each package. In some cases (gems) there may be multiple\n # versions of a package on the machine.\n packages = {}\n server.servers_to_packages.find_each do |package_map|\n next unless package_map.status == 'pending'\n package = Package.find(package_map.package_id)\n\n new = {}\n new['provider'] = package.provider\n new['version'] = package.version\n packages[package.name] = [] unless packages.key?(package.name)\n packages[package.name] << new\n end\n @report[server.hostname] = packages unless packages.empty?\n end\n end", "def compare_results(bldg_types, vintages, climate_zones, file_ext=\"\")\n \n #### Compare results against legacy idf results \n acceptable_error_percentage = 10 # Max 5% error for any end use/fuel type combo\n failures = []\n \n # Load the legacy idf results JSON file into a ruby hash\n temp = File.read(\"#{Dir.pwd}/legacy_idf_results.json\")\n legacy_idf_results = JSON.parse(temp) \n \n # List of all fuel types\n fuel_types = ['Electricity', 'Natural Gas', 'Additional Fuel', 'District Cooling', 'District Heating', 'Water']\n\n # List of all end uses\n end_uses = ['Heating', 'Cooling', 'Interior Lighting', 'Exterior Lighting', 'Interior Equipment', 'Exterior Equipment', 'Fans', 'Pumps', 'Heat Rejection','Humidification', 'Heat Recovery', 'Water Systems', 'Refrigeration', 'Generators']\n\n # Create a hash of hashes to store all the results from each file\n all_results_hash = Hash.new{|h,k| h[k]=Hash.new(&h.default_proc) }\n \n # Create a hash of hashes to store the results from each file\n results_total_hash = Hash.new{|h,k| h[k]=Hash.new(&h.default_proc) }\n\n # Loop through all of the given combinations\n bldg_types.sort.each do |building_type|\n vintages.sort.each do |building_vintage|\n climate_zones.sort.each do |climate_zone|\n puts \"**********#{building_type}-#{building_vintage}-#{climate_zone}******************\"\n # Open the sql file, skipping if not found\n model_name = \"#{building_type}-#{building_vintage}-#{climate_zone}\"\n sql_path_string = \"#{Dir.pwd}/build/#{model_name}/ModelToIdf/ExpandObjects-0/EnergyPlusPreProcess-0/EnergyPlus-0/eplusout.sql\"\n sql_path = OpenStudio::Path.new(sql_path_string)\n sql = nil\n if OpenStudio.exists(sql_path)\n puts \"Found SQL file.\"\n sql = OpenStudio::SqlFile.new(sql_path)\n else\n failures << \"****Error - #{model_name} - Could not find sql file\"\n puts \"**********no sql here #{sql_path}******************\"\n next\n end\n\n # Create a hash of hashes to store the results from each file\n results_hash = Hash.new{|h,k| h[k]=Hash.new(&h.default_proc) }\n\n # Get the osm values for all fuel type/end use pairs\n # and compare to the legacy idf results\n total_legacy_energy_val = 0\n total_osm_energy_val = 0\n total_legacy_water_val = 0\n total_osm_water_val = 0\n fuel_types.each do |fuel_type|\n end_uses.each do |end_use|\n next if end_use == 'Exterior Equipment'\n # Get the legacy results number\n legacy_val = legacy_idf_results.dig(building_type, building_vintage, climate_zone, fuel_type, end_use)\n # Combine the exterior lighting and exterior equipment\n if end_use == 'Exterior Lighting'\n legacy_exterior_equipment = legacy_idf_results.dig(building_type, building_vintage, climate_zone, fuel_type, 'Exterior Equipment')\n unless legacy_exterior_equipment.nil?\n legacy_val += legacy_exterior_equipment\n end\n end\n\n #legacy_val = legacy_idf_results[building_type][building_vintage][climate_zone][fuel_type][end_use]\n if legacy_val.nil?\n failures << \"Error - #{model_name} - #{fuel_type} #{end_use} legacy idf value not found\"\n next\n end\n\n # Add the energy to the total\n if fuel_type == 'Water'\n total_legacy_water_val += legacy_val\n else\n total_legacy_energy_val += legacy_val\n end\n\n # Select the correct units based on fuel type\n units = 'GJ'\n if fuel_type == 'Water'\n units = 'm3'\n end\n\n # End use breakdown query\n energy_query = \"SELECT Value FROM TabularDataWithStrings WHERE (ReportName='AnnualBuildingUtilityPerformanceSummary') AND (ReportForString='Entire Facility') AND (TableName='End Uses') AND (ColumnName='#{fuel_type}') AND (RowName = '#{end_use}') AND (Units='#{units}')\"\n\n # Get the end use value\n osm_val = sql.execAndReturnFirstDouble(energy_query)\n if osm_val.is_initialized\n osm_val = osm_val.get\n else\n failures << \"Error - #{model_name} - No sql value found for #{fuel_type}-#{end_use} via #{energy_query}\"\n osm_val = 0\n end\n\n # Combine the exterior lighting and exterior equipment\n if end_use == 'Exterior Lighting'\n # End use breakdown query\n energy_query = \"SELECT Value FROM TabularDataWithStrings WHERE (ReportName='AnnualBuildingUtilityPerformanceSummary') AND (ReportForString='Entire Facility') AND (TableName='End Uses') AND (ColumnName='#{fuel_type}') AND (RowName = 'Exterior Equipment') AND (Units='#{units}')\"\n\n # Get the end use value\n osm_val_2 = sql.execAndReturnFirstDouble(energy_query)\n if osm_val_2.is_initialized\n osm_val_2 = osm_val_2.get\n else\n failures << \"Error - #{model_name} - No sql value found for #{fuel_type}-Exterior Equipment via #{energy_query}\"\n osm_val_2 = 0\n end\n osm_val += osm_val_2\n end\n\n # Add the energy to the total\n if fuel_type == 'Water'\n total_osm_water_val += osm_val\n else\n total_osm_energy_val += osm_val\n end\n\n # Calculate the error and check if less than\n # acceptable_error_percentage\n percent_error = nil\n add_to_all_results = true\n if osm_val > 0 && legacy_val > 0\n # If both\n percent_error = ((osm_val - legacy_val)/legacy_val) * 100\n if percent_error.abs > acceptable_error_percentage\n failures << \"#{building_type}-#{building_vintage}-#{climate_zone}-#{fuel_type}-#{end_use} Error = #{percent_error.round}% (#{osm_val}, #{legacy_val})\"\n end\n elsif osm_val > 0 && legacy_val.abs < 1e-6\n # The osm has a fuel/end use that the legacy idf does not\n percent_error = 1000\n failures << \"#{building_type}-#{building_vintage}-#{climate_zone}-#{fuel_type}-#{end_use} Error = osm has extra fuel/end use that legacy idf does not (#{osm_val})\"\n elsif osm_val.abs < 1e-6 && legacy_val > 0\n # The osm has a fuel/end use that the legacy idf does not\n percent_error = 1000\n failures << \"#{building_type}-#{building_vintage}-#{climate_zone}-#{fuel_type}-#{end_use} Error = osm is missing a fuel/end use that legacy idf has (#{legacy_val})\"\n else\n # Both osm and legacy are == 0 for this fuel/end use, no error\n percent_error = 0\n add_to_all_results = false\n end\n\n results_hash[building_type][building_vintage][climate_zone][fuel_type][end_use]['Legacy Val'] = legacy_val.round(2)\n results_hash[building_type][building_vintage][climate_zone][fuel_type][end_use]['OpenStudio Val'] = osm_val.round(2)\n results_hash[building_type][building_vintage][climate_zone][fuel_type][end_use]['Percent Error'] = percent_error.round(2)\n results_hash[building_type][building_vintage][climate_zone][fuel_type][end_use]['Absolute Error'] = (legacy_val-osm_val).abs.round(2)\n\n if add_to_all_results\n all_results_hash[building_type][building_vintage][climate_zone][fuel_type][end_use]['Legacy Val'] = legacy_val.round(2)\n all_results_hash[building_type][building_vintage][climate_zone][fuel_type][end_use]['OpenStudio Val'] = osm_val.round(2)\n all_results_hash[building_type][building_vintage][climate_zone][fuel_type][end_use]['Percent Error'] = percent_error.round(2)\n all_results_hash[building_type][building_vintage][climate_zone][fuel_type][end_use]['Absolute Error'] = (legacy_val-osm_val).abs.round(2)\n end\n\n end # Next end use\n end # Next fuel type\n\n # Calculate the overall energy error\n total_percent_error = nil\n if total_osm_energy_val > 0 && total_legacy_energy_val > 0\n # If both\n total_percent_error = ((total_osm_energy_val - total_legacy_energy_val)/total_legacy_energy_val) * 100\n failures << \"#{building_type}-#{building_vintage}-#{climate_zone} *** Total Energy Error = #{total_percent_error.round}% ***\"\n elsif total_osm_energy_val > 0 && total_legacy_energy_val == 0\n # The osm has a fuel/end use that the legacy idf does not\n total_percent_error = 1000\n failures << \"#{building_type}-#{building_vintage}-#{climate_zone} *** Total Energy Error = osm has extra fuel/end use that legacy idf does not (#{total_osm_energy_val})\"\n elsif total_osm_energy_val == 0 && total_legacy_energy_val > 0\n # The osm has a fuel/end use that the legacy idf does not\n total_percent_error = 1000\n failures << \"#{building_type}-#{building_vintage}-#{climate_zone} *** Total Energy Error = osm is missing a fuel/end use that legacy idf has (#{total_legacy_energy_val})\"\n else\n # Both osm and legacy are == 0 for, no error\n total_percent_error = 0\n failures << \"#{building_type}-#{building_vintage}-#{climate_zone} *** Total Energy Error = both idf and osm don't use any energy.\"\n end\n \n results_total_hash[building_type][building_vintage][climate_zone] = total_percent_error\n\n # Save the results to JSON\n File.open(\"#{Dir.pwd}/build/#{model_name}/comparison#{file_ext}.json\", 'w') do |file|\n file << JSON::pretty_generate(results_hash)\n end\n end\n end\n end\n\n # Get all the fuel type and end user combination\n all_fuel_end_user_hash = Hash.new{|h,k| h[k]=Hash.new(&h.default_proc) }\n all_results_hash.each_pair do |building_type, value1|\n value1.each_pair do |building_vintage, value2|\n value2.each_pair do |climate_zone, value3|\n value3.each_pair do |fuel_type, value4|# fuel type\n value4.each_pair do |end_use, value5| # end use\n all_fuel_end_user_hash[fuel_type][end_use] = true\n end\n end\n end\n end\n end\n\n # Fill in the missing value with 0,0,0\n all_results_hash.each_pair do |building_type, value1|\n value1.each_pair do |building_vintage, value2|\n value2.each_pair do |climate_zone, value3|\n all_fuel_end_user_hash.each_pair do |fuel_type, end_users|\n end_users.each_pair do |end_use, value|\n if value3[fuel_type][end_use].empty?\n value3[fuel_type][end_use]['Legacy Val'] = 0\n value3[fuel_type][end_use]['OpenStudio Val'] = 0\n value3[fuel_type][end_use]['Percent Error'] = 0\n value3[fuel_type][end_use]['Absolute Error'] = 0\n end\n end\n end\n end\n end\n end\n\n fuel_type_names = []\n end_uses_names =[]\n\n all_fuel_end_user_hash.each_pair do |fuel_type, end_users|\n end_users.each_pair do |end_use, value|\n fuel_type_names.push(fuel_type)\n end_uses_names.push(end_use)\n end\n end\n \n #######\n # results_total_hash[building_type][building_vintage][climate_zone]\n csv_file_total = File.open(\"#{Dir.pwd}/build/comparison_total#{file_ext}.csv\", 'w')\n # Write the header\n csv_file_total.write(\"building_type,building_vintage,climate_zone,\")\n line2_str =\",,,\"\n #results_hash=Hash[building_type][building_vintage][climate_zone][fuel_type][end_use]['Legacy Val']\n results_total_hash.values[0].values[0].each_pair do |climate_zone, total_error|\n csv_file_total.write(\"#{total_error},\")\n end\n csv_file_total.write(\"\\n\")\n # Save the results to CSV\n results_total_hash.each_pair do |building_type, value1|\n value1.each_pair do |building_vintage, value2|\n value2.each_pair do |climate_zone, value3|\n csv_file_total.write(\"#{building_type},#{building_vintage},#{climate_zone},#{value3}\")\n csv_file_total.write(\"\\n\")\n end\n end\n end\n\n csv_file_total.close \n \n \n\n # Create a CSV to store the results\n csv_file = File.open(\"#{Dir.pwd}/build/comparison#{file_ext}.csv\", 'w')\n csv_file_simple = File.open(\"#{Dir.pwd}/build/comparison_simple#{file_ext}.csv\", 'w')\n\n # Write the header\n csv_file.write(\"building_type,building_vintage,climate_zone,\")\n csv_file_simple.write(\"building type,building vintage,climate zone,fuel type,end use,legacy val,openstudio val,percent error,absolute error\\n\")\n line2_str =\",,,\"\n #results_hash=Hash[building_type][building_vintage][climate_zone][fuel_type][end_use]['Legacy Val']\n all_results_hash.values[0].values[0].values[0].each_pair do |fuel_type, end_users|\n end_users.keys.each do |end_user|\n csv_file.write(\"#{fuel_type}-#{end_user},,,,\")\n line2_str+= \"Legacy Val,OSM Val,Diff (%),Absolute Diff,\"\n end\n end\n csv_file.write(\"\\n\")\n csv_file.write(line2_str + \"\\n\")\n\n # Save the results to CSV\n all_results_hash.each_pair do |building_type, value1|\n value1.each_pair do |building_vintage, value2|\n value2.each_pair do |climate_zone, value3|\n csv_file.write(\"#{building_type},#{building_vintage},#{climate_zone},\")\n for fuel_end_use_index in 0...fuel_type_names.count\n fuel_type = fuel_type_names[fuel_end_use_index]\n end_use = end_uses_names[fuel_end_use_index]\n value5 = value3[fuel_type][end_use]\n csv_file.write(\"#{value5['Legacy Val']},#{value5['OpenStudio Val']},#{value5['Percent Error']},#{value5['Absolute Error']},\")\n # if value5['Percent Error'].abs > 0.1\n unless value5['Legacy Val'].nil?\n csv_file_simple.write(\"#{building_type},#{building_vintage},#{climate_zone},#{fuel_type},#{end_use},#{value5['Legacy Val']},#{value5['OpenStudio Val']},#{value5['Percent Error']},#{value5['Absolute Error']}\\n\")\n end\n end\n csv_file.write(\"\\n\")\n end\n end\n end\n\n csv_file.close\n csv_file_simple.close\n #### Return the list of failures\n return failures\n end", "def deduplicate(records:)\n return [] unless records.present? && records.is_a?(Array)\n\n out = []\n found = []\n records.each do |rec|\n next if found.include?(rec[:sort_name]) || found.include?(rec[:id])\n\n found << rec[:sort_name]\n found << rec[:id] if rec[:id].present?\n out << rec\n end\n out\n end", "def update_photo_urls(records, subdirectories)\r\n\r\n updated_records = []\r\n\r\n records.each do |record_hash|\r\n\r\n urls_array = generate_image_urls(record_hash[\"photos\"], subdirectories)\r\n record_hash[\"photos\"] = urls_array\r\n updated_records.push(record_hash)\r\n\r\n end\r\n\r\n return updated_records \r\n\r\nend", "def compare_product_tests(test1, test2)\n # compare relevant details\n test1.save\n test2.save\n assert_performed_jobs 2\n\n test1.reload\n test2.reload\n compare_results(test1, test2)\n\n # compare records\n test1.patients.each_index do |x|\n patient1 = test1.patients.fetch(x)\n patient2 = test2.patients.fetch(x)\n compare_records(patient1, patient2)\n end\n end", "def prepare(search_term:, records:)\n return [] unless search_term.present? && records.present? && records.is_a?(Array)\n\n array = []\n records.map do |rec|\n item = rec.is_a?(Org) ? OrgSelection::OrgToHashService.to_hash(org: rec) : rec\n array << evaluate(search_term: search_term, record: item)\n end\n sort(array: deduplicate(records: filter(array: array)))\n end" ]
[ "0.5970906", "0.5812563", "0.5770197", "0.5332086", "0.528021", "0.5234157", "0.5077164", "0.50481206", "0.49992508", "0.49362028", "0.49212083", "0.48143524", "0.4779476", "0.47244877", "0.47005835", "0.4669268", "0.46248737", "0.45822784", "0.45342234", "0.45327076", "0.45185488", "0.45082545", "0.44628105", "0.44424525", "0.4432155", "0.44209915", "0.44065017", "0.43912905", "0.43831512", "0.43776473", "0.43497798", "0.4348241", "0.4346812", "0.43228233", "0.42988783", "0.42896658", "0.42896092", "0.42864552", "0.42863387", "0.42768863", "0.42729834", "0.42626062", "0.42546368", "0.4251506", "0.4241854", "0.42412975", "0.42309695", "0.421512", "0.42144352", "0.4210769", "0.4209326", "0.42080742", "0.41990024", "0.41968253", "0.4194548", "0.41902384", "0.4190147", "0.41741365", "0.41632304", "0.41525468", "0.41486534", "0.41480234", "0.41156906", "0.4113832", "0.41125086", "0.4111201", "0.4109451", "0.41081434", "0.41076726", "0.4103164", "0.41022414", "0.4096021", "0.4082406", "0.4076289", "0.40740496", "0.40659732", "0.40607706", "0.4060345", "0.40568593", "0.4052449", "0.405003", "0.4046456", "0.40400845", "0.40388575", "0.40359163", "0.40337345", "0.40336987", "0.40292725", "0.4026754", "0.40238073", "0.40155512", "0.40154874", "0.40116552", "0.4010055", "0.4009251", "0.4008684", "0.40056202", "0.40049648", "0.40036583", "0.39968395" ]
0.63428754
0
Compare the addresses in the specified +records+ with the given +addresses+ and error if any are not present.
def compare_a_records( records, addresses ) record_addresses = Set.new( records.map(&:address) ) addresses = Set.new( addresses.map {|addr| Resolv::IPv4.create(addr)} ) status = nil if addresses.subset?( record_addresses ) status = { a_record: {addresses: record_addresses.map(&:to_s)} } else missing = addresses - record_addresses status = { error: "missing A records: %s" % [ missing.map(&:to_s).join(', ') ] } end return status end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def compare_ns_records( records, hosts )\n\t\t\trecord_hosts = Set.new( records.map(&:name) )\n\t\t\thosts = Set.new( hosts.map {|name| Resolv::DNS::Name.create(name + '.')} )\n\n\t\t\tself.log.debug \"Comparing %p to %p\" % [ record_hosts, hosts ]\n\n\t\t\tstatus = nil\n\t\t\tif ( record_hosts ^ hosts ).empty?\n\t\t\t\tstatus = { ns_record: record_hosts.map(&:to_s) }\n\t\t\telsif !( subset = record_hosts - hosts ).empty?\n\t\t\t\tstatus = { error: \"missing NS records: %s\" % [subset.map(&:to_s).join(', ')] }\n\t\t\telsif !( subset = hosts - record_hosts ).empty?\n\t\t\t\tstatus = { error: \"extra NS records: %s\" % [subset.map(&:to_s).join(', ')] }\n\t\t\tend\n\n\t\t\treturn status\n\t\tend", "def compare_mx_records( records, hosts )\n\t\t\trecord_hosts = Set.new( records.map(&:exchange) )\n\t\t\thosts = Set.new( hosts.map {|name| Resolv::DNS::Name.create(name + '.')} )\n\n\t\t\tself.log.debug \"Comparing %p to %p\" % [ record_hosts, hosts ]\n\n\t\t\tstatus = nil\n\t\t\tif ( record_hosts ^ hosts ).empty?\n\t\t\t\trecord_strings = records.\n\t\t\t\t\tmap {|rec| \"%s[%d]\" % [rec.exchange, rec.preference || 0] }\n\t\t\t\tstatus = {\n\t\t\t\t\tmx_record: record_strings.join( ', ' )\n\t\t\t\t}\n\t\t\telsif !( subset = record_hosts - hosts ).empty?\n\t\t\t\tstatus = { error: \"missing MX records: %s\" % [subset.map(&:to_s).join(', ')] }\n\t\t\telsif !( subset = hosts - record_hosts ).empty?\n\t\t\t\tstatus = { error: \"extra MX records: %s\" % [subset.map(&:to_s).join(', ')] }\n\t\t\tend\n\n\t\t\treturn status\n\t\tend", "def match_records(records)\n records.select do |record|\n conditions.matches?(record)\n end\n end", "def compare_values( records, node_data )\n\t\t\ttype = node_data['record_type']\n\n\t\t\tcase type\n\t\t\twhen 'A'\n\t\t\t\treturn self.compare_a_records( records, node_data['values'] )\n\t\t\twhen 'NS'\n\t\t\t\treturn self.compare_ns_records( records, node_data['values'] )\n\t\t\twhen 'MX'\n\t\t\t\treturn self.compare_mx_records( records, node_data['values'] )\n\t\t\telse\n\t\t\t\treturn { dns: \"#{type} not comparable yet.\" }\n\t\t\tend\n\t\tend", "def diff_zone_file(zone, records)\n #Compare dumped strings directly instead of RR objects\n zone_records = zone.records.map{ |rec|\n rec.dump\n }\n recs = records.map{ |rec|\n rec.dump\n }\n removed_records = zone_records - recs\n added_records = recs - zone_records\n if $options[:verbose]\n if removed_records.any?\n puts \"Removed records in zone file: #{zone.file_path}\"\n removed_records.each{ |rec|\n puts rec\n }\n end\n if added_records.any?\n puts \"Added records in zone file: #{zone.file_path}\"\n added_records.each{ |rec|\n puts rec\n }\n end\n end\n return added_records.any? || removed_records.any?\nend", "def wanted_records(records, domains)\n _records = records.select { |m| domains.include?(m[1]) }\n _records = _records.each_cons(2).select { |a, b| a.last == b.last }\n _records = _records.group_by(&:last).keys.map do |v|\n { record_id: v.first, ip_address: v.last }\n end\n _records\n end", "def delete_invalid_records(records)\n to_be_deleted = records.select do |record|\n EVENT_ID_IDENTIFIER_MAPPING[record.start_event_identifier].nil? ||\n EVENT_ID_IDENTIFIER_MAPPING[record.end_event_identifier].nil?\n end\n\n to_be_deleted.each(&:delete)\n records - to_be_deleted\n end", "def has_address?(addresses, addy)\n addy == addresses.bsearch { |a| addy <= a }\n end", "def check_import(mode, address_names) #, address_emails\n\n err_msgs = []\n\n # Existing Addresss\n unless self.id.nil? or self.id == 0 or self.id == ''\n if mode == 'add'\n err_msgs << I18n.t('address.import.dont_specify_id')\n else\n begin\n org_address = Address.find(self.id)\n rescue\n end\n if org_address.nil?\n err_msgs << I18n.t('address.import.not_found')\n end\n end\n end\n\n # Requierd\n if self.name.nil? or self.name.empty?\n err_msgs << Address.human_attribute_name('name') + I18n.t('msg.is_required')\n end\n\n # Groups\n unless self.groups.nil? or self.groups.empty?\n\n if (/^|([0-9]+|)+$/ =~ self.groups) == 0\n\n self.get_groups_a.each do |group_id|\n group = Group.find_by_id(group_id)\n if group.nil?\n err_msgs << I18n.t('address.import.not_valid_groups') + ': '+group_id.to_s\n break\n end\n end\n else\n err_msgs << I18n.t('address.import.invalid_groups_format')\n end\n end\n\n # Teams\n unless self.teams.nil? or self.teams.empty?\n\n if (/^|([0-9]+|)+$/ =~ self.teams) == 0\n\n self.get_teams_a.each do |team_id|\n team = Team.find_by_id(team_id)\n if team.nil?\n err_msgs << I18n.t('address.import.not_valid_teams') + ': '+team_id.to_s\n break\n end\n end\n\n else\n err_msgs << I18n.t('address.import.invalid_teams_format')\n end\n end\n\n return err_msgs\n end", "def compute_reverse_records(site_uid, site_records)\n reverse_records = {}\n\n site_records.each { |zone, records|\n # Sort records\n site_records[zone] = sort_records(records)\n\n records.each{ |record|\n # Get reverse records\n reverse_file_name, reverse_record = get_reverse_record(record, site_uid)\n if reverse_file_name != nil\n reverse_records[reverse_file_name] ||= []\n reverse_records[reverse_file_name].each {|r|\n if r.label == reverse_record.label\n puts \"Warning: reverse entry with address #{reverse_record.label} already exists in #{reverse_file_name}, #{reverse_record.name} is duplicate\"\n end\n }\n reverse_records[reverse_file_name] << reverse_record\n end\n }\n }\n\n reverse_records\nend", "def print_records(records)\n\tprint_status(\"Records found:\")\n\trecords.each do |r|\n\t\tprint_good(\"\\tHost: #{r[:host]}\")\n\t\tprint_good(\"\\tIP: #{r[:ip]}\")\n\t\tprint_good(\"\\tPort: #{r[:port]}\")\n\t\tprint_good(\"\\tService:#{r[:service]}\")\n\t\tprint_good(\"\\tText:#{r[:txt]}\")\n\t\tprint_good(\"\")\n\tend\nend", "def add_records(records)\n atoms = {}\n\n records.each do |record|\n next unless @if_proc.call(record)\n\n condensed_record = condense_record(record)\n atoms = add_occurences(condensed_record, record.id, atoms)\n end\n\n @storage.add(atoms)\n end", "def verify_record_or_report_errors(record, status, record_checks)\n record_checks.collect { |check| check.verify_or_report_errors(record, status) }.\n reject { |r| r }.empty? # does the result contain anything that isn't truthy?\n end", "def remove_bad_business_records(records)\n records.each do |rec|\n malformed_found = false\n if (rec.fields[\"name\"].nil?)\n\trec.is_malformed = true\n\trec.add_malformed_message(\"ERROR: INVALID NAME\")\n\t@bad_business_records << rec\n\tmalformed_found = true\n end\n \n if (!rec.fields[\"email\"].nil? && VALID_EMAIL_REGEX.match(rec.fields[\"email\"]).nil?)\n\trec.is_malformed = true\n\trec.add_malformed_message(\"ERROR: INVALID EMAIL\")\n\tif (!malformed_found)\n\t @bad_business_records << rec\n\t malformed_found = true\n\tend\n end\n \n if (!rec.is_malformed)\n\t@good_business_records << rec\n end\n end\n end", "def addresses=(addresses)\n set_collection(CapsuleCRM::Address, addresses)\n end", "def add_records(records)\n atoms = ActiveSupport::OrderedHash.new\n records_count = 0\n\n records.each do |record|\n next unless allow_indexing?(record)\n records_count += 1\n\n condensed_record = condense_record(record)\n atoms = add_occurences(condensed_record, record.id, atoms)\n end\n\n @storage.add(atoms, records_count)\n end", "def add_records(records)\n records.each do |r|\n condensed_record = condense_record(r)\n load_atoms(condensed_record)\n add_occurences(condensed_record,r.id)\n @records_size += 1\n end\n end", "def check_all_records\n models.each do |model|\n begin\n # TODO: Can we filter based on those records that are already present in the 'invalid_records' table - especially since they have been re-verified in the method before?\n model.find_each(batch_size: Checker.batch_size) do |record|\n invalid_record!(record) unless record.valid?\n end\n rescue => e\n # Rescue from exceptions (table does not exists,\n # deserialization error, ...)\n puts e.message\n puts \"Skipping validations for #{model}\"\n end\n end\n end", "def verify_address_owners\n if complete?\n errors.add(:bill_address, 'Billing address should not have a user') if bill_address.try(:user_id)\n errors.add(:ship_address, 'Shipping address should not have a user') if ship_address.try(:user_id)\n else\n if bill_address && bill_address.valid? && !bill_address.user_id.nil? && bill_address.user_id != user_id\n errors.add(\n :bill_address,\n \"Billing address user #{bill_address.user_id.inspect} does not match order #{user_id.inspect}\"\n )\n end\n\n if ship_address && ship_address.valid? && !ship_address.user_id.nil? && ship_address.user_id != user_id\n errors.add(\n :ship_address,\n \"Shipping address user #{ship_address.user_id.inspect} does not match order #{user_id.inspect}\"\n )\n end\n end\n end", "def remove_bad_user_records(records)\n \n records.each do |rec|\n malformed_found = false\n if (VALID_EMAIL_REGEX.match(rec.fields[\"email\"]).nil?)\n\trec.is_malformed = true\n\trec.add_malformed_message(\"ERROR: INVALID EMAIL\")\n\t@bad_user_records << rec\n\tmalformed_found = true\n end\n \n if (rec.fields[\"name\"].nil?)\n\trec.is_malformed = true\n\trec.add_malformed_message(\"ERROR: INVALID NAME\")\n\tif (!malformed_found)\n\t @bad_user_records << rec\n\t malformed_found = true\n\tend\n end\n \n if (rec.fields[\"businessname\"].nil?)\n\trec.is_malformed = true\n\trec.add_malformed_message(\"ERROR: INVALID BUSINESS NAME\")\n\tif (!malformed_found)\n\t @bad_user_records << rec\n\tend\n end\n \n if (!rec.is_malformed)\n\t@good_user_records << rec\n\tmalformed_found = true\n end\n end\n end", "def findClosestMatch(records, templateName)\n # Build a placeholder to store the selected notification template\n selected_record = nil\n recipient_language = @recipient_json['language'].to_s\n recipient_region = @recipient_json['region'].to_s\n\n # Return an error if no notification template was found\n if records.length == 0\n @error_message = \"The following Notification Template or snippet was not located: #{templateName}\\n\\n\"\n # If only one template is returned, or the user has no preferences use the first match\n elsif records.length == 1 || (recipient_language.empty? && recipient_region.empty?)\n selected_record = records[0]['values']\n puts \"Only one record returned for #{templateName}, OR no preferences found so selected first\" if @debug_logging_enabled\n else\n # Select a template based on users preferences\n # Define an array of preferences for each record returned\n recordPreferences = records.map do |record|\n {\n 'id' => record['id'],\n 'language' => record['values']['Language'],\n 'region' => record['values']['Region'],\n 'score' => 0,\n }\n end\n # Loop over each record and try to match it\n recordPreferences.each do |record|\n language = record['language'].to_s\n region = record['region'].to_s\n # Test to see if both language and region match if neither are empty\n if recipient_language == language && recipient_region == region && (!recipient_region.empty? && !region.empty?) && (!recipient_language.empty? && !language.empty?)\n record['score'] += 3\n puts \"Matched on Language and Region for Template #{templateName}\" if @debug_logging_enabled\n # Test to see if a language matches if they are not empty\n elsif recipient_language == language && (!recipient_language.empty? && !language.empty?)\n record['score'] += 2\n puts \"Matched on Language only for Template #{templateName}\" if @debug_logging_enabled\n # Test to see if a region matches\n elsif recipient_region == region && (!recipient_region.empty? && !region.empty?)\n record['score'] += 1\n puts \"Matched on Region only for Template #{templateName}\" if @debug_logging_enabled\n end\n puts \"Score is #{record['score']} for Template #{templateName}\" if @debug_logging_enabled\n end\n\n # Determine which record should be choosen as the selected record\n closestMatch = recordPreferences.max_by { |element| element['score'] }\n # Get the ID so we can select this record. If multiple had the same score, choose the first\n closestMatch.kind_of?(Array) ? closestMatch = closestMatch[0]['id'] : closestMatch = closestMatch['id']\n # Set the selected record to be returned\n selected_record = records.find { |match| match['id'] == closestMatch }['values']\n end\n # Return the selected record\n return selected_record\n end", "def test_address_auth \n c = customers(:bob)\n assert_equal(addresses(:address1), c.find_address(1))\n assert_equal(addresses(:address2), c.find_address(2))\n assert_nil(c.find_address(3))\n assert_nil(c.find_address(100000))\n end", "def address_match_match(clientid, addresses, opts = {})\n data, _status_code, _headers = address_match_match_with_http_info(clientid, addresses, opts)\n return data\n end", "def filter_records(records)\n records = records.uniq if unique?\n records = match_records(records)\n records = sort_records(records)\n records = limit_records(records)\n records\n end", "def locationsCompare(arr1, arr2)\n if arr2.length == 0\n return false\n elsif arr1.length != arr2.length || arr1 & arr2 != arr1\n return true\n end\n return false\n end", "def format_records(records)\n records.each do |record|\n report record\n end\n end", "def validate_matches\n return success unless (@matched_ids.present? || @unmatched_ids.present?)\n\n # common ids in matches_ids and unmatched_ids\n r = validate_matched_unmatched_records\n return r unless r.success?\n\n success\n\n end", "def assert_valid_address_from_attributes(attributes, lines)\n address = LocalPostal::Address.new(attributes)\n\n assert address.valid?, address.errors.full_messages.join(' and ')\n assert_equal lines, address.lines, 'invalid address lines'\n end", "def prefer_ipv4(addresses)\n return nil unless addresses.is_a?(Array)\n\n addresses.find { |ip| IPAddress.valid_ipv4?(ip) } ||\n addresses.find { |ip| IPAddress.valid_ipv6?(ip) }\n end", "def ==(other)\n records == Array(other)\n end", "def touch_addresses\n if changes.include?(:bill_address_id) && self.bill_address.present?\n self.bill_address.touch\n end\n\n if changes.include?(:ship_address_id) && self.ship_address.present?\n self.ship_address.touch\n end\n end", "def address_matches\n filtered_matches(ignore: [:first_name, :family_name], perfect: [:street, :city])\n end", "def address_matches(spec, address)\n (0..3).all? { |i| spec[i]=='*' || spec[i]==address[i] }\nend", "def merge(addresses)\n return if addresses.empty? # nothing to do here\n\n update = { '$set' => { wallet: id(addresses) } }\n if addresses.is_a? Array\n addresses.each { |address| upsert({ _id: address }, update) }\n else\n upsert({ _id: addresses }, update)\n end\n end", "def records=(records)\n raise(ArgumentError, \"Cannot assign type: #{records.class.name} to records\") unless records.is_a?(Array)\n\n @records = records\n end", "def delete(*records)\n records = flatten_deeper(records)\n records.each { |record| raise_on_type_mismatch(record) }\n delete_records(records)\n records.each { |record| @collection.delete(record) } if loaded?\n end", "def check_for_payment_addresses(parties)\n out = []\n parties.each do |party|\n res = party.self_check\n party.errors.map{|err| @errors << \"Party #{ party.name } #{ err }\"} unless res == true\n out << res\n end\n \n out.include?(false)\n end", "def deduplicate(records:)\n return [] unless records.present? && records.is_a?(Array)\n\n out = []\n found = []\n records.each do |rec|\n next if found.include?(rec[:sort_name]) || found.include?(rec[:id])\n\n found << rec[:sort_name]\n found << rec[:id] if rec[:id].present?\n out << rec\n end\n out\n end", "def parse_dns(dns_raw)\n dns = []\n dns_records = {}\n record_type_A = []\n record_type_A_IP = []\n record_type_CNAME = []\n record_type_CNAME_alias = []\n\n #adds each line to dns array and splipt them with \",\"\n dns_raw.each do |lines_in_files|\n dns.push([lines_in_files.split(\",\")])\n end\n\n #Checks for recordA,IP or recordCNAME and adds them to the respected array\n dns.each do |words_in_files|\n if words_in_files[0][0] == \"A\"\n record_type_A.push(words_in_files[0][1].strip)\n record_type_A_IP.push(words_in_files[0][2].strip)\n elsif words_in_files[0][0] == \"CNAME\"\n record_type_CNAME.push(words_in_files[0][1].strip)\n record_type_CNAME_alias.push(words_in_files[0][2].strip)\n end\n end\n\n #record_A hash stores values of recordA\n record_A = {\n :source => record_type_A,\n :ip => record_type_A_IP,\n }\n\n #recordCNAME hash stores values of recordCNAME\n record_CNAME = {\n :source => record_type_CNAME,\n :alias => record_type_CNAME_alias,\n }\n\n #dns_records gets both Hashes\n dns_records = {\n :A => record_A,\n :CNAME => record_CNAME,\n }\n\n #returns record dns_record with two hashes.\n return dns_records\nend", "def overlaps?(record)\n events = Event.find_all_by_nurse_id(record.nurse_id)\n events.each do |e|\n if e.id == record.id\n next\n end\n\n # start dates overlap\n if (e.start_at.to_date <= record.start_at.to_date) and (record.start_at.to_date <= e.end_at.to_date)\n return true\n end\n # end dates overlap\n if (e.start_at.to_date <= record.end_at.to_date) and (record.end_at.to_date <= e.end_at.to_date)\n return true\n end\n # e contains record\n if (e.start_at.to_date < record.start_at.to_date) and (record.end_at.to_date < e.end_at.to_date)\n return true\n end\n # record contains e\n if (e.start_at.to_date > record.start_at.to_date) and (record.end_at.to_date > e.end_at.to_date)\n return true\n end \n end\n return false\n end", "def delete_records(records)\n if sql = @reflection.options[:delete_sql]\n records.each { |record| @owner.connection.delete(interpolate_and_sanitize_sql(sql, record)) }\n else\n\n relation = Arel::Table.new(@reflection.options[:join_table], arel_engine)\n \n relation.where(relation[@reflection.primary_key_name].eq(@owner.id).\n and(relation[@reflection.association_foreign_key].in(records.map { |x| x.id }.compact))\n ).delete\n end\n end", "def dns_check\n gen_host_records # These are the hosts we have\n load_all_subnets # These are the DNS entries\n \n # We want a standard layout, with the hypervisor API entries being \n @host_record.each do |hr| # Array of host record Hash's\n hn = hr[:hostname]\n shn = hn.split('.',2)[0] # Remove the domain\n forward_hr = @forward_host_record[hn] # Find Host Record\n if forward_hr.nil?\n # We have no IPAM entry for this hostname\n if (rhr = @reverse_host_records[hr[:ip]])\n puts \"Only Reverse IPAM entry for #{shn}: #{rhr}\"\n @infoblox.create_host_record(ip_address: hr[:ip], hostname: hn, aliases: hr[:aliases])\n else\n puts \"No IPAM entry for hostrecord: #{hr}\"\n @infoblox.create_host_record(ip_address: hr[:ip], hostname: hn, aliases: hr[:aliases])\n end\n else\n # We have an IPAM record for this hostname\n if forward_hr[:ip] != hr[:ip]\n puts \"IP mismatch #{shn} #{hr[:ip]} != #{forward_hr[:ip]} for IPAM: #{forward_hr}\"\n elsif forward_hr[:hostname] != hn\n # Reference must be via ALIASES or CNAMES\n if forward_hr[:aliases].include?(shn)\n puts \"Hostname #{shn} is an ALIAS. IPAM: #{forward_hr}\"\n elsif forward_hr[:cnames].include?(hn)\n puts \"Hostname #{shn} is a CNAME. IPAM: #{forward_hr}\"\n end\n end\n end\n end\n \n # We want to find IPAM entries, not matching existing @host_record entries\n @reverse_host_records.each do |ip, ahr| # Hash to array of host records from IPAM, indexed by IP\n ahr.each do |hr| # One IP can have multiple host records, with associated ALIAS and CNAME records\n local_hr = @host_record_index[hr[:hostname]]\n if local_hr.nil?\n puts \"No local entry #{hr[:hostname]} for #{hr}\"\n end\n end\n end\nend", "def check_list(records)\n entering = []\n exiting = []\n status = {}\n records.each do |user, action|\n prev = status[user]\n\n if !exiting.include?(user)\n # EXITING: recorded an enter without a matching exit\n exiting << user if action == \"enter\" && !(prev == \"exit\" || prev.nil?)\n end\n\n if !entering.include?(user)\n # ENTERING: recorded an exit without a matching enter.\n entering << user if action == \"exit\" && prev != \"enter\"\n end\n\n status[user] = action\n end\n\n # EXITING: recorded an enter without a matching FINAL exit\n status.each_pair do |user, action|\n unless exiting.include?(user)\n exiting << user if action == \"enter\"\n end\n end\n [exiting, entering]\nend", "def set_coordinates\n addrs = [\n \"#{address_line_1}\",\n \"#{address_line_2}\",\n \"#{address_line_1}, #{address_line_2}\"\n ]\n catch(:geocoded) do\n addrs.each do |addr|\n begin\n loc = MultiGeocoder.geocode(\n \"#{addr}, #{city}, #{state} #{zip}\"\n )\n if loc.success\n self.lat = loc.lat\n self.lng = loc.lng\n throw :geocoded\n end\n\n rescue Exception => ex\n puts \" -> #{addr} did not resolve\"\n puts \" -> #{ex.message}\"\n end\n end\n puts \"did not match any combination of address1 and address2\"\n end\n end", "def map_addresses(logger,eventid,input_addresses)\n input_addresses.map do |a| \n addressid = logger.addresses(eventid,a)\n begin\n result = Geokit::Geocoders::GoogleGeocoder.geocode(a)\n rescue\n return \"GeoError: Either there is a problem reaching Google maps or the input is invalid #{a}\"\n end\n if ((result.lat).kind_of? Float ) && ((result.lng).kind_of? Float)\n logger.address_coordinates(addressid,[to_radians(result.lat),to_radians(result.lng)])\n [to_radians(result.lat), to_radians(result.lng)]\n else\n return \"Validation Error: #{a} is not a valid address input.\"\n end\n end\n end", "def expect_order_addresses(order)\n order.reload\n expect_selected(order.bill_address, :order, :bill)\n expect_selected(order.ship_address, :order, :ship)\n\n expect(page).to have_content(content_regex(order.bill_address)) if order.bill_address\n expect(page).to have_content(content_regex(order.ship_address)) if order.ship_address\n end", "def records=(records)\n clear\n concat(records)\n end", "def check_record(rec)\n @mapping.each { |mapping|\n unless rec[mapping[:field_name]].blank?\n if mapping.has_key?(:format) && !mapping[:format].match(rec[mapping[:field_name]])\n Rails.logger.error([\"invalid value for field #{mapping[:name]} [#{rec[mapping[:field_name]]}]\", \"#{@filename}, line #{@linenumber}\", \"SchoolDataImporter\"].join(\" - \"))\n return false\n end\n end\n }\n return true\n end", "def sort_records(records)\n sort_order = order.map { |direction| [ direction.target, direction.operator == :asc ] }\n\n records.sort_by do |record|\n sort_order.map do |(property, ascending)|\n Sort.new(record_value(record, property), ascending)\n end\n end\n end", "def household_with_same_address_search_results\n\t\t@new_household_address = Address.find(params[:new_household_address_id].to_i)\n\t\t@household = Household.find(params[:current_household_id].to_i)\n\t\t@address_search_results = Address.search_any_household_in_this_address(params[:new_household_address_id].to_i)\n\t\t# Rails.logger.debug(\"@address_search_results = #{@address_search_results.inspect}\")\n\trescue => err\n\t\terror_object = CommonUtil.write_to_attop_error_log_table(\"HouseholdAddressChangesController\",\"household_with_same_address_search_results\",err,current_user.uid)\n\t\tflash[:alert] = \"Error occurred when showing households residing in same address, for more details refer to error ID: #{error_object.id}.\"\n\t\tredirect_to_back\n\tend", "def check_for_invalid_external_references(record, logical_urls)\n if record.respond_to?(:to_array)\n record.each {|e| check_for_invalid_external_references(e, logical_urls)}\n elsif record.respond_to?(:each)\n record.each do |k, v|\n if k == 'ref' && !logical_urls.has_key?(v)\n URIResolver.ensure_reference_is_valid(v, RequestContext.get(:repo_id))\n elsif k != '_resolved'\n check_for_invalid_external_references(v, logical_urls)\n end\n end\n end\n end", "def multi_addr(addresses)\n get(\"addr/\"+addresses.join(\",\")+\"/balance?noCache=1\")\n end", "def get_address_to_change(addresses, sale)\n addresses.each do |address|\n return address if address[\"id\"] == sale[\"address_id\"]\n end\n raise \"couldn't find the address associated with the sale\"\n end", "def any_records?(type)\n @records.any? { |rec| rec.is_a?(type) && rec.valid? }\n end", "def check_addresses\n\n if location = @found_params.entities.detect {|entity| entity.name == \"from\"} || @ride.start_address\n if @ride.start_address\n address = @ride.start_address\n else\n address = geocode(location.value)\n end\n\n @ride.start_address = address\n @ride.save\n\n geo = Geocoder.search(\"#{address.latitude},#{address.longitude}\").first.address_components\n @start_address_nice = geo.first[\"short_name\"] + \" \" + geo.second[\"short_name\"] + \" à \" + geo.third[\"short_name\"]\n\n\n @time = UberService.new(@ride).time_estimates\n @time = @time / 60 if @time.class == Fixnum\n\n end\n\n if location = @found_params.entities.detect {|entity| entity.name == \"to\"} || @ride.end_address\n if @ride.end_address\n address = @ride.end_address\n else\n address = geocode(location.value)\n end\n\n @ride.end_address = address\n @ride.save\n\n geo = Geocoder.search(\"#{address.latitude},#{address.longitude}\").first.address_components\n @end_address_nice = geo.first[\"short_name\"] + \" \" + geo.second[\"short_name\"] + \" à \" + geo.third[\"short_name\"]\n\n end\n\n if (location = @found_params.entities.detect {|entity| entity.name == \"address\"}) && (@ride.end_address || @ride.start_address)\n\n address = geocode(location.value)\n geo = Geocoder.search(\"#{address.latitude},#{address.longitude}\").first.address_components\n nice_address = geo.first[\"short_name\"] + \" \" + geo.second[\"short_name\"] + \" à \" + geo.third[\"short_name\"]\n\n\n if @ride.start_address\n @ride.end_address = address\n @end_address_nice = nice_address\n else\n @ride.end_address = address\n @start_address_nice = nice_address\n end\n\n @ride.save\n end\n\n if [email protected]_address.nil? && [email protected]_address.nil?\n @price = UberService.new(@ride).price_estimates\n end\n end", "def create\n @record = Record.new(record_params)\n\n if @record.type_record == \"CNAME\"\n # Se o tipo é um CNAME, não pode existir outro Record, \n # com o mesmo nome, no mesmo Domain.\n\n records_domain = Record.where(domain_id: @record.domain_id)\n\n records_domain.each do |r|\n \n if @record.name == r.name\n \n respond_to do |format|\n format.html { \n flash[:notice] = 'O Record não foi inserido! Existe um Record com mesmo nome no mesmo Domain'\n render :new\n }\n format.json { render json: @record.errors, status: :unprocessable_entity }\n end\n return \n end\n\n end\n \n else\n # Se o tipo não é um CNAME, não pode existir um Record do tipo CNAME, \n # com o mesmo nome, no mesmo Domain.\n\n records_cname_domain = Record.where(type_record: \"CNAME\", name: @record.name,\n domain_id: @record.domain_id).count()\n\n if records_cname_domain != 0\n\n respond_to do |format|\n format.html { \n flash[:notice] = 'O Record não foi inserido! Existe um Record do tipo CNAME com mesmo nome no mesmo Domain'\n render :new\n }\n format.json { render json: @record.errors, status: :unprocessable_entity }\n end\n\n return\n end\n\n end\n\n\n respond_to do |format|\n if @record.save\n format.html { redirect_to @record, notice: 'Record inserido com sucesso!' }\n format.json { render :show, status: :created, location: @record }\n else\n format.html { \n flash[:notice] = 'O Record não pode ser inserido! '\n render :new\n }\n format.json { render json: @record.errors, status: :unprocessable_entity }\n end\n end\n end", "def validate_dns_a_record(domain, expected_ip_address)\n return true if skip_validations?\n say \"Checking that DNS #{domain.green} resolves to IP address #{expected_ip_address.green}... \", \" \"\n packet = Net::DNS::Resolver.start(domain, Net::DNS::A)\n resolved_a_records = packet.answer.map(&:value)\n if packet.answer.size == 0\n error = \"Domain '#{domain.green}' does not resolve to an IP address\"\n end\n unless resolved_a_records == [expected_ip_address]\n error = \"Domain #{domain} should resolve to IP address #{expected_ip_address}\"\n end\n if error\n say \"ooh no!\".red\n say \"Please setup your DNS:\"\n say \"Subdomain: * \" + \"(wildcard)\".yellow\n say \"IP address: #{expected_ip_address}\"\n err(error)\n else\n say \"ok\".green\n true\n end\n end", "def add_record record\n # Record all leases, since the definition of a duplicate depends on whether we\n # are searching by ip or mac. Arrays have fixed sort order so we can rely on this\n # being the order they were read from the file\n @records.push record\n logger.debug \"Added #{record} to #{to_s}\"\n return true\n end", "def matches?(another_address)\n return(false) if another_address.nil?\n attrs_to_match = [:kind, :address_1, :address_2, :address_3, :city, :state, :zip]\n attrs_to_match.all? { |attr| attribute_matches?(attr, another_address) }\n end", "def test_address_validation\n response = nil\n assert_nothing_raised do\n #response = @carrier_prod.validate_addresses({'address_from' => @locations[:ottawa], 'address_to' => @locations[:beverly_hills]}, :test=>false)\n @carrier_prod.validate_addresses({'address_from' => Location.new(\n :country => 'US',\n :state => 'TX',\n :city => 'Houston',\n :address1 => '11811 North Freeway',\n :address2 => 'suite 500',\n :zip => '77060'), \n 'address_to' => Location.new(:country => 'US',\n :state => 'NY',\n :city => 'Brooklyn',\n :address1 => '7 Balfour pl',\n :address2 => 'Apt E3',\n :zip => '11225')})\n end\n end", "def match\n $people_array.each_with_index do |person, i|\n if person.lname.downcase == $pairs[i].lname.downcase\n return false\n end\n end\n return true\nend", "def validate_911(address1, address2, city, state, zip, plus_four, caller_name)\n self.arguments = {\n address1: address1, \n address2: address2, \n city: city,\n state: state,\n zip: zip,\n plus_four: plus_four,\n caller_name: caller_name,\n }\n self.action = :validate911\n self.response = VoipApi.account.request(self.action, self.klass, self.arguments)\n self\n end", "def find_address_duplicates(master_customer)\n if master_customer.pxid != nil\n duplicate_by_address = Customer.select(:id).where(\"pxid = '#{master_customer.pxid}'\n and id < #{master_customer.id}\")\n end\nend", "def difffiles(fileone, filetwo, addresskeys)\n fileoneresult = String.new\n filetworesult = String.new\n arry = Array.new\n addresskeys.each do |key|\n fileoneresult = balongnvtool(fileone, key)\n filetworesult = balongnvtool(filetwo, key)\n unless fileoneresult.eql? filetworesult\n #Determine the name of the feild that has a diff\n arry.push(getdifname(fileoneresult))\n end\n end\n cleanedarry = removebandspecificaddresses(arry)\n puts cleanedarry\n return cleanedarry\n end", "def addresses\n h2 :addresses\n @record.addresses.empty? ? p('-') : addresses_table\n gap\n end", "def dns_update(zone, records)\n update = Dnsruby::Update.new(zone)\n records.each do |r|\n if r.type.upcase == 'ADD'\n s = \"#{Domain} 3600 #{Type} #{RDATA}\"\n rr = Dnsruby::RR.create(s)\n update.add(rr)\n else\n update.delete(r['Domain'], r['Type'], r['RDATA'])\n end\n end\n update\n end", "def merge(streets, emails)\n\n # No street address match. Return e-mail match [if any]\n if streets.count == 0\n return emails.first\n end\n\n # Multiple street address match. Search by name\n #\n # The odds of two people with the same exact first AND last name\n # living at the same address is low enough to be negligible.\n if street = streets.select { |resident| resident.first_name == self.first_name }\n if street.count > 1\n # Well, what are the odds? =(\n end\n\n street = street.first\n if emails.count == 0\n return street\n end\n\n # Check to see if they're the same file\n email = emails.first\n if street == email\n return street\n end\n\n # Merge them if they're not the same file\n if street.present? and email.present?\n street.email = email.email\n email.address = street.address\n email.add_tags(street.tags)\n street.destroy\n end\n\n return email\n end\n\n # None of the names matched\n return emails.first\n end", "def emailable_records(records, email_format)\n @emailable_records ||= records.collect{ |record| emailable_record(record, email_format) }\n end", "def verify_oldest_records\n log(:info, \"#{Time.now}: ***** Start Verifying Oldest Records *****\")\n current_deploy =\n if (`cat #{File.expand_path ''}/REVISION`).blank?\n 'None'\n else\n `cat #{File.expand_path ''}/REVISION`\n end\n bq_attributes.each do |model, attrs|\n # add any records to oldest_records that are new (Or more simply make sure that that there is a record using find_by_or_create)\n attrs.each do |attr|\n OldestRecord.find_or_create_by(table_name: model, attr: attr)\n end\n # delete any records that are not in bq_attributes\n OldestRecord.where(table_name: model).each do |record|\n record.destroy unless attrs.include?(record.attr.to_sym)\n end\n end\n update_revision = OldestRecord.find_or_create_by(table_name: '! revision !')\n update_revision.update(attr: current_deploy, archived: true)\n log(:info, \"#{Time.now}: ***** End Verifying Oldest Records *****\")\n end", "def from_addresses=(value)\n @from_addresses = value\n end", "def valid_address?(address)\n all_addresses = get_all_addresses\n all_addresses.include? address\nend", "def cons_check(coords)\n coords.each_cons(2).all? do |first, second|\n if first[0] == second[0]\n first[1].ord + 1 == second[1].ord\n elsif first[0].ord + 1 == second[0].ord\n first[1] == second[1]\n else\n false\n end\n end\n end", "def identical?(record1, record2)\n if regexp\n if (str1_match_data = regexp.match(record1.whole)) and (str2_match_data = regexp.match(record2.whole))\n str1_match_data.captures.join.downcase == str2_match_data.captures.join.downcase\n else\n nil\n end\n else\n proc.call record1.original, record2.original\n end\n end", "def load(records)\n records.map do |h| \n r = WeatherObservation.first_or_create({:weather_station_id => h[:weather_station_id], :date => h[:date]}, h)\n if (r.id.nil?)\n $stderr.puts(\"failed to save #{r.inspect}, #{r.errors.full_messages}\")\n end\n end\n end", "def apply_filtering(records, params)\n return records unless params[:conditions]\n records.select! do |record|\n params[:conditions].all? do |key, value|\n db_value = record.send(key) if record.respond_to?(key)\n if key == :id\n value = value.is_a?(Array) ? value.map(&:to_i) : value.to_i\n end\n if value.is_a?(Array)\n value.include?(db_value)\n else\n db_value == value\n end\n end\n end\n records\n end", "def search_records(match)\n match = match.downcase\n conn.exec_params('SELECT * FROM contacts WHERE lower(first_name)=$1 OR lower(last_name)=$1', [match])\n end", "def validate\n unless address =~ /^(\\d{1,3}\\.){3}\\d{1,3}$/\n raise ValidationError, \"Invalid address\"\n end\n end", "def employer_address_line(first_address, last_address)\n return if percentage_similarity(first_address, last_address) < 0.97\n\n (first_address.length > last_address.length) ? first_address : last_address\n end", "def select_addresses(order_or_user, addresses={})\n visit_addresses(order_or_user)\n\n select_address(addresses[:user_bill], :user, :bill) if addresses[:user_bill]\n select_address(addresses[:user_ship], :user, :ship) if addresses[:user_ship]\n select_address(addresses[:order_bill], :order, :bill) if addresses[:order_bill]\n select_address(addresses[:order_ship], :order, :ship) if addresses[:order_ship]\n\n submit_addresses(!addresses[:fail])\n end", "def aramex_address_validation\n zones = Spree::ShippingMethod.where(['LOWER(admin_name) like ?', '%aramex%']).map(&:zones).flatten\n if zones.map(&:countries).flatten.map(&:iso).include?(country.iso)\n response = JSON.parse(validate_address(city, zipcode, country.iso))\n if response['HasErrors'] == true && errors[:zipcode].blank?\n if response['SuggestedAddresses'].present?\n errors.add(:base, response['Notifications'].map { |data| data['Message'] }.join(', ') + ', Suggested city name is - ' + response['SuggestedAddresses'].map { |data| data['City'] }.join(', '))\n else\n cities_response = JSON.parse(fetch_cities(country.iso, city[0..1]))\n errors.add(:base, cities_response['Notifications'].map { |data| data['Message'] }.join(', ') + ', Suggested city name is - ' + cities_response['Cities'].join(' ,'))\n end\n end\n end\n rescue\n return true\n end", "def update\n\n if @record.type_record == \"CNAME\"\n # Se o tipo é um CNAME, não pode existir outro Record, \n # com o mesmo nome, no mesmo Domain.\n\n records_domain = Record.where(domain_id: @record.domain_id)\n\n records_domain.each do |r|\n \n if @record.name == r.name\n \n respond_to do |format|\n format.html { \n flash[:notice] = 'O Record não foi atualizado! Existe um Record com mesmo nome no mesmo Domain'\n render :edit\n }\n format.json { render json: @record.errors, status: :unprocessable_entity }\n end\n\n return \n end\n\n end\n \n else\n # Se o tipo não é um CNAME, não pode existir um Record do tipo CNAME, \n # com o mesmo nome, no mesmo Domain.\n\n records_cname_domain = Record.where(type_record: \"CNAME\", name: params[:name],\n domain_id: @record.domain_id).where('id != ?', @record.id).count()\n\n if records_cname_domain != 0\n\n respond_to do |format|\n format.html { \n flash[:notice] = 'O Record não foi atualizado! Existe um Record do tipo CNAME com mesmo nome no mesmo Domain'\n render :edit\n }\n format.json { render json: @record.errors, status: :unprocessable_entity }\n end\n\n return\n end\n\n end\n\n\n\n respond_to do |format|\n if @record.update(record_params)\n format.html { redirect_to @record, notice: 'Record atualizado com sucesso!' }\n format.json { render :show, status: :ok, location: @record }\n else\n format.html { \n flash[:notice] = 'O Record não pode ser atualizado! '\n render :edit \n }\n format.json { render json: @record.errors, status: :unprocessable_entity }\n end\n end\n end", "def delete(*records)\n records.each do |record|\n target.delete(record)\n end\n end", "def overlaps_with(other_billing_record, min_proximity = 0)\n return true if start_time == other_billing_record.start_time\n first, second = nil, nil # Which record started first?\n if start_time < other_billing_record.start_time\n first, second = self, other_billing_record\n else\n first, second = other_billing_record, self\n end\n second.start_time - first.stop_time <= min_proximity\n end", "def parse(addresses, syntax_only = true)\n validate_addrs = addresses.join(\";\")\n\n res = @client.get \"address/parse\", {:addresses => validate_addrs,\n :syntax_only => syntax_only.to_s}\n return res.to_h!\n end", "def address_match_match_with_http_info(clientid, addresses, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: AddressNormalisationAndMatchingApi.address_match_match ...\"\n end\n # verify the required parameter 'clientid' is set\n fail ArgumentError, \"Missing the required parameter 'clientid' when calling AddressNormalisationAndMatchingApi.address_match_match\" if clientid.nil?\n # verify the required parameter 'addresses' is set\n fail ArgumentError, \"Missing the required parameter 'addresses' when calling AddressNormalisationAndMatchingApi.address_match_match\" if addresses.nil?\n # resource path\n local_var_path = \"/api/{clientid}/addressmatch/match\".sub('{format}','json').sub('{' + 'clientid' + '}', clientid.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json', 'text/json', 'application/xml', 'text/xml'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json', 'text/json', 'application/xml', 'text/xml', 'application/x-www-form-urlencoded'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(addresses)\n auth_names = ['basic']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'AddressMatchResponse')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: AddressNormalisationAndMatchingApi#address_match_match\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def diff(aws)\n diffs = []\n\n if @ttl != aws.ttl\n diffs << SingleRecordDiff.new(RecordChange::TTL, aws, self)\n end\n if [email protected]? and @value.sort != aws.resource_records.map(&:value).sort\n diffs << SingleRecordDiff.new(RecordChange::VALUE, aws, self)\n end\n if !@alias_target.nil?\n if aws.alias_target.nil? or\n (is_elb_alias? and aws.alias_target.elb_dns_name != ELB::get_aws(@alias_target.name).dns_name) or\n (aws.alias_target.chomped_dns != @alias_target.dns_name)\n diffs << SingleRecordDiff.new(RecordChange::ALIAS, aws, self)\n end\n end\n\n diffs\n end", "def search_records(record_list, search_opts = {}, full_notes = false)\n search_opts = DEFAULT_SEARCH_OPTS.merge(search_opts)\n\n url = build_url('/search/records', search_opts.merge(\"uri[]\" => record_list))\n results = do_search(url)\n\n # Ensure that the order of our results matches the order of `record_list`\n results['results'] = results['results'].sort_by {|result| record_list.index(result.fetch('uri'))}\n\n SolrResults.new(results, search_opts, full_notes)\n end", "def test_check_first_bad_address_non_num\n assert_raises SystemExit do\n assert_output 'Line 1: Invalid address 1ab4c67\\nBLOCKCHAIN INVALID' do\n add = %w[1ab4c67 123456]\n line_num = 1\n @verify.check_addresses(add, line_num)\n end\n end\n end", "def test_records_dont_overlap\n\t\ta = TestWabaRecord.new; \n\t\ta.site = \"A\"\n\t\tb = TestWabaRecord.new\n\t\tb.site = \"B\"\n\t\tassert_equal(a.site, \"A\")\n\t\tassert_equal(b.site, \"B\")\n\tend", "def valid_dns_record? (hostname)\n\t\tputs \"Validate the hostname record: #{hostname}\" if @verbose\n\t\tbegin\n\t\t\tips=Resolv.getaddresses(hostname)\n\t\t\tif ips.empty?\n\t\t\t\treturn false\n\t\t\telse\n\t\t\t\tputs \"Found: #{hostname}\" if @verbose\n\t\t\t\treturn true\n\t\t\tend\n\t\trescue => ee\n\t\t\tputs \"Exception on method #{__method__} for host #{hostname}: #{ee}\" if @verbose\n\t\t\treturn false\n\t\tend\n\tend", "def concat(*records)\n records = records.flatten\n concat_records(records)\n end", "def delete_records(records:, record_class: nil)\n records.each do |record|\n key = get_key_for_record(record: record, record_class: record_class)\n REDIS_APP_JOIN.del(key)\n end\n end", "def add_ip_addresses(addresses, comment=\"\", list=\"GFWed\")\n logger.info \"addresses: #{addresses}\"\n message = \"\"\n counter = 0\n addresses.gsub!(\"\\r\", \"\")\n addresses.split(\"\\n\").each do |address|\n counter += 1\n message += add_ip_address(address, comment, list, false)\n end\n \n message = message == \"\" ? \"#{settings.rui_host}: #{counter} ip addresses add commands was sent.\" : message\n return message\n end", "def match?(rec_1, rec_2)\n (rec_1.is_a?(ApplicationRecord) || rec_2.is_a?(ApplicationRecord)) &&\n (rec_1.class.try(:base_class) == rec_2.class.try(:base_class)) &&\n (rec_1.attributes == rec_2.attributes)\n end", "def update_if_necessary(new_contact_record)\n matched = []\n updated_details = []\n self.first_name = new_contact_record.first_name\n self.last_name = new_contact_record.last_name\n \n new_contact_record.details.each do |ncd|\n details.each do |ocd|\n matched << [ocd,ncd] and break if ocd.matches?(ncd)\n end\n end\n # Update the matched records if necessary\n matched.each do |ocd,ncd|\n if not ocd.update_if_necessary(ncd).empty?\n updated_details << ocd\n end\n end\n # Also add the new contact details that we are adding\n # We need to dup to make sure we don't overwrite the original\n \n (new_contact_record.details - matched.column(1)).each do |ncd| \n self.details << ncd.dup\n updated_details << self.details.last\n end \n updated_details\n end", "def records?\n defined?(@record_types) and ! @record_types.empty?\n end", "def geocode(addresses, options = {})\n if addresses.size < 1\n raise ArgumentError, 'You must provide at least one address to geocode.'\n elsif addresses.size == 1\n geocode_single(addresses.first, options)\n else\n geocode_batch(addresses, options)\n end\n end", "def find(address)\n address && @mapped_addresses[address.comparison_attributes.except('user_id')]\n end", "def delete_old_addresses\n\n # Fetch token id\n # delete if any address present in token addresses table and client_wallet_addresses table\n\n token_id = @token_details.id\n ClientWalletAddress.where(client_id: @client_id, sub_environment: GlobalConstant::Base.sub_environment_name ).destroy_all\n token_addresses = TokenAddresses.where(token_id: token_id, kind: GlobalConstant::TokenAddresses.owner_address_kind).first\n\n if token_addresses.present? && token_addresses.known_address_id.present?\n SaasApi::WalletAddress::RemoveKnownAddress.new.perform({known_address_id: token_addresses.known_address_id, client_id: @client_id})\n token_addresses.destroy!\n end\n\n success\n\n end", "def check_records(user)\n\t\[email protected]{|record| !record.has_return && record.user_id == user.id}.each do |record|\n\t\t\[email protected] do |book,value|\n\t\t\t\tif book.isbn == record.book_isbn then\n\t\t\t\t\tputs \"=> Warning! \\n\"\n\t\t\t\t\tputs \"#{user.name} borrowed <#{book.title}> on #{record.borrow_date}, #{user.name} has to return it before #{record.due}.\"\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\tend" ]
[ "0.6614167", "0.6467907", "0.59206665", "0.5809993", "0.56610745", "0.55722725", "0.52838653", "0.5208784", "0.5201524", "0.51864046", "0.51519865", "0.5139839", "0.5101072", "0.508444", "0.5021086", "0.49941257", "0.4979793", "0.49712396", "0.49596435", "0.49403507", "0.49157253", "0.49008405", "0.4894681", "0.48832357", "0.48504162", "0.48421448", "0.48400545", "0.48115766", "0.47820652", "0.4767725", "0.4747669", "0.47476315", "0.47464964", "0.4720473", "0.4701692", "0.4668725", "0.46673435", "0.4653515", "0.46332628", "0.4632578", "0.4623703", "0.4606719", "0.45817724", "0.45767486", "0.45517775", "0.45516914", "0.45463702", "0.45434383", "0.45378417", "0.45268372", "0.4518473", "0.4513564", "0.45046902", "0.44910893", "0.44765034", "0.44676286", "0.44670758", "0.44588113", "0.4443675", "0.44304103", "0.44270763", "0.4418555", "0.44160825", "0.44156814", "0.4409549", "0.44026244", "0.4396413", "0.43962643", "0.4395774", "0.4392915", "0.4392191", "0.43893427", "0.43840733", "0.43828663", "0.43801636", "0.43787995", "0.4368187", "0.43679133", "0.43674502", "0.43668953", "0.435263", "0.433129", "0.43308094", "0.432252", "0.43201807", "0.43140474", "0.43116742", "0.43057784", "0.43030325", "0.429068", "0.42819116", "0.42772886", "0.4271385", "0.42702314", "0.42685908", "0.42660373", "0.42651144", "0.42637336", "0.42608097", "0.42605093" ]
0.83933824
0
Compare the expected +hosts+ with those in the fetched NS +records+.
def compare_ns_records( records, hosts ) record_hosts = Set.new( records.map(&:name) ) hosts = Set.new( hosts.map {|name| Resolv::DNS::Name.create(name + '.')} ) self.log.debug "Comparing %p to %p" % [ record_hosts, hosts ] status = nil if ( record_hosts ^ hosts ).empty? status = { ns_record: record_hosts.map(&:to_s) } elsif !( subset = record_hosts - hosts ).empty? status = { error: "missing NS records: %s" % [subset.map(&:to_s).join(', ')] } elsif !( subset = hosts - record_hosts ).empty? status = { error: "extra NS records: %s" % [subset.map(&:to_s).join(', ')] } end return status end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def compare_mx_records( records, hosts )\n\t\t\trecord_hosts = Set.new( records.map(&:exchange) )\n\t\t\thosts = Set.new( hosts.map {|name| Resolv::DNS::Name.create(name + '.')} )\n\n\t\t\tself.log.debug \"Comparing %p to %p\" % [ record_hosts, hosts ]\n\n\t\t\tstatus = nil\n\t\t\tif ( record_hosts ^ hosts ).empty?\n\t\t\t\trecord_strings = records.\n\t\t\t\t\tmap {|rec| \"%s[%d]\" % [rec.exchange, rec.preference || 0] }\n\t\t\t\tstatus = {\n\t\t\t\t\tmx_record: record_strings.join( ', ' )\n\t\t\t\t}\n\t\t\telsif !( subset = record_hosts - hosts ).empty?\n\t\t\t\tstatus = { error: \"missing MX records: %s\" % [subset.map(&:to_s).join(', ')] }\n\t\t\telsif !( subset = hosts - record_hosts ).empty?\n\t\t\t\tstatus = { error: \"extra MX records: %s\" % [subset.map(&:to_s).join(', ')] }\n\t\t\tend\n\n\t\t\treturn status\n\t\tend", "def compare_a_records( records, addresses )\n\t\t\trecord_addresses = Set.new( records.map(&:address) )\n\t\t\taddresses = Set.new( addresses.map {|addr| Resolv::IPv4.create(addr)} )\n\n\t\t\tstatus = nil\n\t\t\tif addresses.subset?( record_addresses )\n\t\t\t\tstatus = { a_record: {addresses: record_addresses.map(&:to_s)} }\n\t\t\telse\n\t\t\t\tmissing = addresses - record_addresses\n\t\t\t\tstatus = { error: \"missing A records: %s\" % [ missing.map(&:to_s).join(', ') ] }\n\t\t\tend\n\n\t\t\treturn status\n\t\tend", "def dns_check\n gen_host_records # These are the hosts we have\n load_all_subnets # These are the DNS entries\n \n # We want a standard layout, with the hypervisor API entries being \n @host_record.each do |hr| # Array of host record Hash's\n hn = hr[:hostname]\n shn = hn.split('.',2)[0] # Remove the domain\n forward_hr = @forward_host_record[hn] # Find Host Record\n if forward_hr.nil?\n # We have no IPAM entry for this hostname\n if (rhr = @reverse_host_records[hr[:ip]])\n puts \"Only Reverse IPAM entry for #{shn}: #{rhr}\"\n @infoblox.create_host_record(ip_address: hr[:ip], hostname: hn, aliases: hr[:aliases])\n else\n puts \"No IPAM entry for hostrecord: #{hr}\"\n @infoblox.create_host_record(ip_address: hr[:ip], hostname: hn, aliases: hr[:aliases])\n end\n else\n # We have an IPAM record for this hostname\n if forward_hr[:ip] != hr[:ip]\n puts \"IP mismatch #{shn} #{hr[:ip]} != #{forward_hr[:ip]} for IPAM: #{forward_hr}\"\n elsif forward_hr[:hostname] != hn\n # Reference must be via ALIASES or CNAMES\n if forward_hr[:aliases].include?(shn)\n puts \"Hostname #{shn} is an ALIAS. IPAM: #{forward_hr}\"\n elsif forward_hr[:cnames].include?(hn)\n puts \"Hostname #{shn} is a CNAME. IPAM: #{forward_hr}\"\n end\n end\n end\n end\n \n # We want to find IPAM entries, not matching existing @host_record entries\n @reverse_host_records.each do |ip, ahr| # Hash to array of host records from IPAM, indexed by IP\n ahr.each do |hr| # One IP can have multiple host records, with associated ALIAS and CNAME records\n local_hr = @host_record_index[hr[:hostname]]\n if local_hr.nil?\n puts \"No local entry #{hr[:hostname]} for #{hr}\"\n end\n end\n end\nend", "def check_hosts(hosts_stat, deep = false, verbose = true)\n\n $stderr.print \"Checking which hosts are up:\\n \" if verbose\n\n tot = hosts_stat.length\n count = 1\n\n hosts_stat.each do |hs|\n\n if verbose\n pct = 100. * count.to_f / tot.to_f\n $stderr.printf( \"\\r\\033[0K[% 3d%%] %s\", pct.round, hs[:names].first )\n $stderr.flush\n end\n\n hn = hs[:names].first\n\n if deep\n rnd = (rand()*10000000).round()\n hs[:up] = %x[ ssh -oUserKnownHostsFile=#{$known_hosts} \\\n -oConnectTimeout=5 #{hn} echo #{rnd} 2> /dev/null ].include?(rnd.to_s)\n else\n hs[:up] = system(\"nc -z #{hn} 22 2> /dev/null > /dev/null\")\n end\n\n count += 1\n\n end\n\n warn \"\\r\\033[0KDone!\" if verbose\n\nend", "def hosts_eql?(a, b) # rubocop:disable Naming/UncommunicativeMethodParamName\n parse_host(a) == parse_host(b)\n rescue IPAddr::InvalidAddressError\n false\n end", "def wanted_records(records, domains)\n _records = records.select { |m| domains.include?(m[1]) }\n _records = _records.each_cons(2).select { |a, b| a.last == b.last }\n _records = _records.group_by(&:last).keys.map do |v|\n { record_id: v.first, ip_address: v.last }\n end\n _records\n end", "def verify_abs_hosts(hosts)\n success = false\n puts \"Verifying ABS hosts: #{hosts}\"\n hosts.each do |host|\n puts\n puts \"Current host: #{host}\"\n\n success = verify_abs_host(host[:hostname])\n break unless success\n end\n\n puts \"Unable to verify the provisioned hosts\" unless success\n return success\n end", "def check_hostnames\n all_good = true\n \n @check_groups.each do |group|\n group.checks.each do |check|\n unless check.hostname && Dnsruby::Resolv.getaddress(check.hostname)\n puts \"Error: check #{check.name} has invalid hostname '#{check.hostname}'\"\n all_good = false\n end\n end\n end\n \n all_good\n end", "def assert_ip_has_saved_hostnames(ip, hostnames)\n record = DnsRecord.find_by! ip: ip\n\n saved_hostnames = record.hostnames.map do |hostname|\n hostname.hostname\n end\n\n assert_equal ip, record.ip\n assert_equal hostnames.to_set, saved_hostnames.to_set\n end", "def get_puppetdb_hosts\n curl = setup_curl(\"#{@puppetdb_url}/v3/nodes\")\n curl.get\n servers_junk = JSON.parse(curl.body_str)\n servers_array = []\n servers_junk.each { |server| servers_array << server['name'] }\n @puppetdb_hosts = servers_array\n end", "def known_host_hash?(hostlist, entries); end", "def compare_values( records, node_data )\n\t\t\ttype = node_data['record_type']\n\n\t\t\tcase type\n\t\t\twhen 'A'\n\t\t\t\treturn self.compare_a_records( records, node_data['values'] )\n\t\t\twhen 'NS'\n\t\t\t\treturn self.compare_ns_records( records, node_data['values'] )\n\t\t\twhen 'MX'\n\t\t\t\treturn self.compare_mx_records( records, node_data['values'] )\n\t\t\telse\n\t\t\t\treturn { dns: \"#{type} not comparable yet.\" }\n\t\t\tend\n\t\tend", "def transform_hosts(hosts)\n require 'time'\n\n node_data = []\n\n hosts.each do |host|\n if host[:report_timestamp].nil?\n # This can happen in weird cases. Mark as an expired node, so\n # the expired logic doesn't try to do math on a nil timestamp.\n last_checkin = nil\n formatted_checkin = 'N/A'\n host[:expired] = nil\n else\n last_checkin = Time.now - Time.parse(host[:report_timestamp])\n formatted_checkin = sprintf(\"%#{@options.round_to}f\",(last_checkin * @options.divisor).abs)\n end\n node_data << {\n :last_checkin => last_checkin,\n :expired => host[:expired].nil? ? false : host[:expired],\n :certname => host[:certname],\n :environment => host[:report_environment].nil? ? 'N/A' : host[:report_environment],\n :status => host[:latest_report_status].nil? ? 'N/A' : host[:latest_report_status],\n :formatted_checkin => formatted_checkin\n }\n end\n\n unless @options.environments.empty?\n node_data.delete_if {|node| not @options.environments.include? node[:environment] }\n end\n unless @options.statuses.empty?\n node_data.delete_if {|node| not @options.statuses.include? node[:status] }\n end\n\n node_data\n end", "def query_files_hosts(hostlist, hosts)\n report_dir = get_report_dir\n\n existing_nodes = hostlist.map{|x| x[:certname]}\n\n local_host_template = {\n :deactivated=>false,\n :latest_report_hash=>nil,\n :facts_environment=>nil,\n :cached_catalog_status=>\"not_used\",\n :report_environment=>nil,\n :latest_report_corrective_change=>nil,\n :catalog_environment=>nil,\n :facts_timestamp=>nil,\n :latest_report_noop=>nil,\n :expired=>false,\n :latest_report_noop_pending=>nil,\n :report_timestamp=>nil,\n :certname=>nil,\n :catalog_timestamp=>nil,\n :latest_report_job_id=>nil,\n :latest_report_status=>nil\n }.freeze\n\n local_host_reports = []\n\n if File.directory?(report_dir)\n @logger.debug(\"Processing Report Directory: #{report_dir}\")\n\n Dir.glob(\"#{report_dir}/*\").each do |node_dir|\n @logger.debug(\"Processing Node Directory: #{node_dir}\")\n\n latest_report = Dir.glob(\"#{node_dir}/*.yaml\").sort.last\n if latest_report\n @logger.debug(\"Processing YAML Report: #{latest_report}\")\n\n begin\n require 'puppet'\n\n transaction_report = YAML.load_file(latest_report)\n\n unless (hosts.empty? || hosts.include?(transaction_report.host))\n @logger.debug(\"Skipping #{transaction_report.host} since it is not in the host list\")\n next\n end\n\n if existing_nodes.include?(transaction_report.host)\n @logger.debug(\"Skipping #{transaction_report.host} since it already exists\")\n next\n end\n\n local_host_data = Marshal.load(Marshal.dump(local_host_template))\n local_host_data[:latest_report_hash] = transaction_report.catalog_uuid\n local_host_data[:facts_environment] = transaction_report.environment\n local_host_data[:report_environment] = transaction_report.environment\n local_host_data[:latest_report_corrective_change] = transaction_report.corrective_change\n local_host_data[:catalog_environment] = transaction_report.environment\n local_host_data[:facts_timestamp] = transaction_report.time.to_s\n local_host_data[:latest_report_noop] = transaction_report.noop\n local_host_data[:latest_report_noop_pending] = transaction_report.noop_pending\n local_host_data[:report_timestamp] = transaction_report.time.to_s\n local_host_data[:certname] = transaction_report.host\n local_host_data[:catalog_timestamp] = transaction_report.time.to_s\n local_host_data[:latest_report_job_id] = transaction_report.catalog_uuid\n local_host_data[:latest_report_status] = transaction_report.status\n\n hostlist << local_host_data\n\n @logger.debug(\"Processed Host Report: #{local_host_data}\")\n rescue => e\n @logger.warn \"Error processing report at '#{latest_report}': #{e}\"\n end\n else\n @logger.debug \"Could not find latest report in '#{node_dir}'\"\n end\n end\n else\n @logger.debug \"Could not find report directory at '#{report_dir}'\"\n end\n end", "def hosts(touchAndPrune=false)\n hosts=@vp_lock.synchronize{@hostname2vp.keys}\n if touchAndPrune\n check_up_hosts(hosts)\n else\n hosts\n end\n end", "def does_resolve_to_host?\n mx_records.include? Socket.gethostname\n end", "def find_hosts( fqdn, zone_id = nil)\n if zone_id.nil?\n #look for matching host across all zones\n request(\n :expects => 200,\n :method => 'GET',\n :parser => Fog::Parsers::Zerigo::DNS::FindHosts.new,\n :path => \"/api/1.1/hosts.xml?fqdn=#{fqdn}\"\n )\n else\n #look for hosts in a specific zone\n request(\n :expects => 200,\n :method => 'GET',\n :parser => Fog::Parsers::Zerigo::DNS::FindHosts.new,\n :path => \"/api/1.1/zones/#{zone_id}/hosts.xml?fqdn=#{fqdn}\"\n )\n end\n end", "def get_host_by_hostname(hostname, collector)\n host = nil\n if collector\n hosts_json = rpc(\"getHosts\", {\"hostGroupId\" => 1})\n hosts_resp = JSON.parse(hosts_json)\n# p hosts_resp\n collector_resp = JSON.parse(rpc(\"getAgents\", {}))\n if hosts_resp[\"status\"] == 200\n hosts_resp[\"data\"][\"hosts\"].each do |h|\n if h[\"hostName\"].eql?(hostname)\n # puts(\"Found host with matching hostname: #{resource[:hostname]}\")\n # puts(\"Checking agent match\")\n if collector_resp[\"status\"] == 200\n collector_resp[\"data\"].each do |c|\n if c[\"description\"].eql?(collector)\n host = h\n end\n end\n else\n puts(\"Unable to retrieve collector list from server\")\n end\n end\n end\n else\n puts(\"Unable to retrieve host list from server\" )\n end\n end\n host\nend", "def print_known_hosts\n\t\tputs \"\\nSummary of local hosts Table:\"\n\t\tputs \"Total entries: #{@known_hosts.size}\"\n\t\t(@known_hosts.keys.sort-[\"\",nil]).each do |key|\n\t\t\tvalue=@known_hosts[key]\n\t\t\tputs \"#{key}\\t#{value}\" if is_fqdn?(key)\n\t\tend\n\t\tputs \"End of the summary\"\n\tend", "def print_records(records)\n\tprint_status(\"Records found:\")\n\trecords.each do |r|\n\t\tprint_good(\"\\tHost: #{r[:host]}\")\n\t\tprint_good(\"\\tIP: #{r[:ip]}\")\n\t\tprint_good(\"\\tPort: #{r[:port]}\")\n\t\tprint_good(\"\\tService:#{r[:service]}\")\n\t\tprint_good(\"\\tText:#{r[:txt]}\")\n\t\tprint_good(\"\")\n\tend\nend", "def load_known_hosts_from_file (f_hosts=@hosts_file)\n\t\tputs \"Loading local hosts from file: #{f_hosts} ...\" if @verbose\n\t\tknown_hosts=Hash.new\n\t\t@alias = Hash.new\n\t\tFile.write(f_hosts, \"\") unless File.exist?(f_hosts)\n\t\tf=File.open(f_hosts, 'r')\n\t\tf.each do |line|\n\t\t\tnext unless line =~ /\\d+\\.\\d+\\.\\d+\\.\\d+/\n\t\t\tentry=line.chomp.split(%r{\\t+|\\s+|\\,})\n\t\t\tkey=entry[0].downcase\n\t\t\tvalue=entry[1]\n\t\t\tputs \"Loading key value pair: #{key} - #{value}\" if @verbose\n\t\t\tknown_hosts[key] = Hash.new unless known_hosts.key?(key)\n\t\t\tknown_hosts[key]= value\n\t\t\t# For reverse host lookup\n\t\t\tknown_hosts[value] = Hash.new unless known_hosts.key?(value)\n\t\t\tknown_hosts[value] = key\n\t\t\t# Count the number of alias for the recorded IP\n\t\t\tif @alias.key?(value)\n\t\t\t\t@alias[value]+=1\n\t\t\telse\n\t\t\t\t@alias[value]=1\n\t\t\tend\n\t\tend\n\t\tf.close\n\t\treturn known_hosts\n\t\t#rescue => ee\n\t\t#\tputs \"Exception on method #{__method__}: #{ee}\"\n\t\t#\treturn known_hosts\n\tend", "def full_host_record(ip:)\n load_cnames\n \n @forward_host_record ||= {} # Global, as we want to do name lookups.\n return_record = []\n unless( (host_records = @infoblox.get_host_by_ip(ip_address: ip)).nil? )\n host_records.each do |hosts|\n hosts.each do |hn|\n # Assign an empty record, if we haven't seen this host before\n @forward_host_record[hn] ||= { hostname: hn, ip: '', aliases: [], cnames: [] }\n \n # Record the IP. There may be multiple IPs with one hostname.\n @forward_host_record[hn][:ip] = ip\n \n # The hostname might have CNAMES point to it\n unless @reverse_cnames[hn].nil?\n @reverse_cnames[hn].each do |cn| \n @forward_host_record[hn][:cnames] << cn \n end\n end\n \n # The hostname may have alternate hostname A records, stored in IPAM as ALIASES\n @infoblox.get_alias(hostname: hn) do |a| \n short_alias = a.split('.',2)[0]\n @forward_host_record[hn][:aliases] << short_alias\n \n # The ALIASes might have CNAME records pointing to it\n unless @reverse_cnames[a].nil?\n # Record the ALIAS CNAMES against the parent hostname.\n @reverse_cnames[a].each do |cn| \n @forward_host_record[hn][:cnames] << cn \n end\n end\n end\n return_record << @forward_host_record[hn]\n \n # Add forward lookup entries for each ALIAS\n host_domain = hn.split('.',2)[1]\n @forward_host_record[hn][:aliases].each do |a|\n @forward_host_record[\"#{a}.#{host_domain}\"] = @forward_host_record[hn]\n end\n \n # Add forward lookup entries for each CNAME\n @forward_host_record[hn][:cnames].each do |cn|\n @forward_host_record[cn] = @forward_host_record[hn]\n end\n \n end\n end\n end\n return return_record\nend", "def refetch_hosts?\n return false unless vima?\n return true if hosts_updated_at.nil?\n\n hosts_updated_at < Archiving.settings[:skip_host_fetch_time_period].ago\n end", "def find_and_merge_duplicate_hosts!\n # find all duplicate addresses within the same workspace currently in the db\n dupe_addresses_and_workspaces = ApplicationRecord.connection.execute(%Q{\n SELECT workspace_id, address, count_addr\n FROM (\n SELECT workspace_id, address, COUNT(address) AS count_addr\n FROM hosts\n GROUP BY address, workspace_id\n ) X\n WHERE count_addr > 1\n })\n\n if dupe_addresses_and_workspaces.present? and\n not dupe_addresses_and_workspaces.num_tuples.zero?\n puts \"Duplicate hosts in workspace found. Merging host references.\"\n # iterate through the duped IPs\n dupe_addresses_and_workspaces.each do |result|\n # so its come to this\n address = ApplicationRecord.connection.quote(result['address'])\n workspace_id = result['workspace_id'].to_i\n # look up the duplicate Host table entries to find all IDs of the duped Hosts\n hosts = ApplicationRecord.connection.execute(%Q|\n SELECT id\n FROM hosts\n WHERE address=#{address} AND workspace_id=#{workspace_id}\n ORDER BY id DESC\n |)\n # grab and quote the ID for each result row\n hosts = hosts.map { |h| h[\"id\"].to_i }\n # grab every Host entry besides the first one\n first_host_id = hosts.first\n dupe_host_ids = hosts[1..-1]\n # update associations to these duplicate Hosts\n HOST_ASSOCIATION_MAP.each do |table, column|\n ApplicationRecord.connection.execute(%Q|\n UPDATE #{table} SET #{column}=#{first_host_id}\n WHERE #{column} IN (#{dupe_host_ids.join(',')})\n |)\n end\n # destroy the duplicate host rows\n ApplicationRecord.connection.execute(%Q|\n DELETE FROM hosts WHERE id IN (#{dupe_host_ids.join(',')})\n |)\n end\n\n # At this point all duped hosts in the same workspace should be merged.\n # You could end up with duplicate services, but hey its better than just\n # dropping all data about the old Host.\n end\n end", "def valid_abs_resource_hosts?(abs_resource_hosts)\n is_valid = false\n\n if abs_resource_hosts.nil?\n puts \"A valid hosts array is required; nil was specified\"\n puts\n else\n\n begin\n hosts = JSON.parse(abs_resource_hosts)\n host = hosts[0]\n hostname = host[\"hostname\"]\n if !hostname.nil? && !hostname.empty?\n is_valid = true\n else\n puts \"The specified resource host array is not valid: #{abs_resource_hosts}\"\n puts\n end\n rescue\n # TODO: raise?\n puts \"JSON::ParserError encountered parsing the hosts array: #{abs_resource_hosts}\"\n end\n\n end\n\n is_valid\n end", "def sort_hosts(hosts,sort_by)\n case sort_by\n when :certname\n return hosts.sort{ |a,b| a[:certname] <=> b[:certname] }\n when :time\n return hosts.sort{ |a,b| \n if a[:last_checkin].nil? and !b[:last_checkin].nil?\n 1\n elsif !a[:last_checkin].nil? and b[:last_checkin].nil?\n -1\n else\n a[:last_checkin] <=> b[:last_checkin]\n end\n }\n when :status\n return hosts.sort{ |a,b| a[:status].downcase <=> b[:status].downcase }\n when :environment\n return hosts.sort{ |a,b| a[:environment] <=> b[:environment] }\n else\n return hosts\n end\n end", "def compare_domain(args)\r\n server_list = args[:server_list]\r\n domain = args[:domain_name]\r\n rtype = args[:rtype]\r\n rdata = args[:actual_rdata]\r\n rdata = (rtype == \"NAPTR\") ? rdata : rdata.downcase\r\n r = \"\"\r\n failed_rlist = []\r\n @timeout = 30\r\n sleep 15 if args[:sleepfirst]\r\n server_list.each do |server|\r\n dig_pass = \"succeed to dig @#{server} #{domain} #{rtype} => #{rdata}\"\r\n dig = `dig @#{server} #{domain} #{rtype}`\r\n if dig.include?(rdata)\r\n puts dig_pass\r\n else\r\n puts \"dig @#{server} #{domain} #{rtype} failed as expected!\" if args[:expected_dig_fail]\r\n return \"succeed\" if args[:expected_dig_fail]\r\n begin\r\n Timeout::timeout(@timeout){\r\n while !dig.include?(rdata)\r\n sleep 5\r\n dig_retry = `dig @#{server} #{domain} #{rtype}`\r\n puts dig_pass if dig_retry.include?(rdata)\r\n break if dig_retry.include?(rdata)\r\n end\r\n }\r\n rescue Timeout::Error\r\n puts \"Error => dig @#{server} #{domain} #{rtype} timed out!\"\r\n failed_rlist << \"failed\"\r\n end\r\n end\r\n end\r\n failed_rlist.empty? ? 'succeed' : 'failed'\r\n end", "def server_hosts\n return [:default] if @resource[:server_hosts] &&\n @resource[:server_hosts][0] == :default &&\n @property_hash[:server_hosts] ==\n @aaa_group.default_servers\n @property_hash[:server_hosts]\n end", "def host_uniqueness?(zone, host_list, vdc_id = -1)\n all_hosts = \"\"\n zone.vdcs.all.each{|vdc|\n if vdc.hosts != nil and !vdc.hosts.empty? and vdc.id != vdc_id\n all_hosts << ',' << vdc.hosts\n end\n }\n\n all_hosts = all_hosts.split(',')\n\n host_list.split(\",\").each{|host|\n return false if all_hosts.include?(host)\n }\n\n return true\n end", "def bad_hosts\n bad_hosts.collect {|r| r.host }\n end", "def outdated_hosts\n hosts = []\n scanned_hosts = self.date_severity_count\n scanned_hosts.each do |sh|\n sh.hosts.split(\", \").each do |host|\n hosts << host.gsub(/\\s+/, \"\")\n end\n end\n return hosts.uniq.sort\n end", "def match_host(hostname)\n hostname === host\n end", "def hosts_with_data(resultset)\n resultset.count { |_host, values| !values['data'].empty? }\n end", "def cleanup_records\n Fog::DNS[:dreamhost].records.each do |r|\n # Do not delete the 'do-not-delete' record, we need it for the tests\n r.destroy if r.name =~ /#{test_domain}/ and r.name != do_not_delete_record\n end\nend", "def find_hosts!(host_spec)\n if self.groups[host_spec]\n return self.groups[host_spec].host_list.map { |m| self.hosts[m] }\n elsif self.hosts[host_spec]\n return [self.hosts[host_spec]]\n else\n say \"No inventory matching: '#{host_spec}' found. \"\n say ([\"Available hosts:\"] + self.hosts.keys).join(\"\\n\\t\")\n say ([\"Available groups:\"] + self.groups.keys).join(\"\\n\\t\")\n exit\n end\n end", "def get_a2a_hosts\n abs_initialize\n mom =\n { 'role': \"mom\",\n 'size': @mom_size,\n 'volume_size': @mom_volume_size }\n\n metrics =\n { 'role': \"metrics\",\n 'size': @metrics_size,\n 'volume_size': @metrics_volume_size }\n\n hosts = [mom, metrics]\n\n return hosts\n end", "def collectHosts(rf, db)\n\trf.childEntity.grep(RbVmomi::VIM::Datacenter).each do |dc|\n\t\tprogressbar = ProgressBar.create(:title => \"Hosts\", :format => '%t |%b>>%i| %p%% %a')\n\t\tprogressbar.total = counter(dc, \"h\")\n\t\tdc.hostFolder.childEntity.each do |cluster|\n\t\t\tcluster.host.each do |host|\n\t\t\t\tdb.select(2)\n\t\t\t\tdb.hset(\"#{host.name}\", \"Status\", \"#{host.summary.overallStatus}\")\n\t\t\t\tdb.hset(\"#{host.name}\", \"PowerStatus\", \"#{host.summary.runtime.powerState}\")\n\t\t\t\tdb.hset(\"#{host.name}\", \"Connection\", \"#{host.summary.runtime.connectionState}\")\n\t\t\t\tdb.hset(\"#{host.name}\", \"OverallCpu\", \"#{host.summary.quickStats.overallCpuUsage}\")\n\t\t\t\tdb.hset(\"#{host.name}\", \"OverallMem\", \"#{host.summary.quickStats.overallMemoryUsage}\") \n\t\t\t\t#db.hset(\"#{host.name}\", \"SystemSensor\", \"#{host.summary.runtime.healthSystemRuntime.systemHealthInfo.numericSensorInfo.name}\")\n\t\t\t\tprogressbar.increment\n\t\t\tend\n\t\tend\n\tend\nend", "def assert_propstat_response response, properties, statuses\n assert_instance_of RubyDav::PropstatResponse, response\n\n assert_equal properties.keys.sort, statuses.keys.sort\n assert_equal properties.keys.sort, response.resources.keys.sort\n\n response.resources.each do |url, results|\n\n successful_results = results.reject do |pk, r|\n r.inner_value.nil? || r.inner_value.strip.empty?\n end\n\n successful_keys = successful_results.keys.sort\n assert_equal properties[url].keys.sort, successful_keys\n\n successful_keys.each do |pk|\n assert_equal properties[url][pk], results[pk].inner_value\n end\n\n keys = results.keys.sort\n assert_equal statuses[url].keys.sort, results.keys.sort\n\n keys.each do |pk|\n assert_equal statuses[url][pk], results[pk].status\n end\n end\n end", "def hosts_delta(puppetdb_hosts = @puppetdb_hosts, foreman_hosts = @foreman_hosts)\n @hosts_delta = foreman_hosts - puppetdb_hosts\n end", "def other_server_hosts\n @other_server_hosts ||= all_server_hosts.reject {|x| x == server_host}\n end", "def hosts\n # prevent original array from being changed\n @hosts.dup\n end", "def make_into_replicas(records)\n begin # Basic safety net, avoiding trouble\n safe_records = []\n records.each do |r|\n case\n when r.kind == 'Group',\n r.kind == 'Smart Group'\n @unify_url_log.warn \"Group with URL: '#{r.name}' (#{r.kind}). Not handled, since 'make_into_replicas' does not handle groups.\"\n # TODO Exclude records that are in Trash\n else # (Normal case)\n safe_records << r\n end\n end\n end\n records = safe_records\n return false if records.size == 0 # TODO Is this a reasonable result?\n\n # Remove items from records that are already replicas\n records = remove_replicas(records) # Note: This also getifies records, making them less prone\n # for bugs when removing stuff\n return true if records.size == 1 # Job done if array has only one item left\n\n master = records.pop\n\n begin # Safety net - will raise an error if the items are not reasonably similar\n # Needs to be the same: name, URL, comment\n # Can be different: Kind, Date, Size etc.\n safe_records = []\n records.each do |r|\n case\n when master.name != r.name, # Stuff that must be the same.\n master.URL != r.URL\n @unify_url_log.warn \"WARNING To dissimular to safely make into replicas\"\n when master.comment != r.comment\n @unify_url_log.warn \"WARNING Comments differ - '#{r.name}' at '#{r.location}' will not replicated since I fear to loose unique comments.\"\n else # Normal case\n safe_records << r\n end\n end\n records = safe_records\n end\n\n # Delete records and replace them with replicas of master\n while records.size > 0\n r = records.pop\n rparents = r.parents.get\n rparents = remove_replicas(r.parents) # (Also .get-ifys)\n rparents.each do |rparent| # Record must be replaced in all its locations\n @devonthink.replicateRecord_to_(master, rparent)\n @created_deleted_log.info \"Created: '#{master.name}' (#{master.kind})\"\n trash(r, rparent)\n # TODO Check that tags also are preserved\n end\n end\n\n end", "def classifier_database_matches_self?(replica_host)\n original_host_name = host.host_hash[:vmhostname]\n begin\n host.host_hash[:vmhostname] = replica_host.hostname\n\n other_nodes = get_list_of_nodes\n other_classes = get_list_of_classes\n other_environments = get_list_of_environments\n other_groups = get_list_of_node_groups\n ensure\n host.host_hash[:vmhostname] = original_host_name\n end\n\n self_nodes = get_list_of_nodes\n self_classes = get_list_of_classes\n self_environments = get_list_of_environments\n self_groups = get_list_of_node_groups\n\n nodes_match = nodes_match?(other_nodes, self_nodes)\n classes_match = classes_match?(other_classes, self_classes)\n environments_match = environments_match?(other_environments, self_environments)\n groups_match = groups_match?(other_groups, self_groups)\n\n errors = ''\n errors << \"Nodes do not match\\r\\n\" unless nodes_match\n errors << \"Classes do not match\\r\\n\" unless classes_match\n errors << \"Environments do not match\\r\\n\" unless environments_match\n errors << \"Groups do not match\\r\\n\" unless groups_match\n\n host.logger.warn(errors.chomp) unless errors.empty?\n errors.empty?\n end", "def hosts\n (self.web_hosts.to_a + self.db_hosts.to_a + self.balance_hosts.to_a + self.app_hosts.to_a).uniq.sort\n end", "def ==(o)\n return true if self.equal?(o)\n self.class == o.class &&\n host_list == o.host_list &&\n total_matching == o.total_matching &&\n total_returned == o.total_returned\n end", "def add_hosts?(description, servers)\n !!(member_of_this_set?(description) &&\n (!has_primary?(servers) || description.primary?))\n end", "def hosts(wspace = workspace, only_up = false, addresses = nil)\n\t\tconditions = {}\n\t\tconditions[:state] = [Msf::HostState::Alive, Msf::HostState::Unknown] if only_up\n\t\tconditions[:address] = addresses if addresses\n\t\twspace.hosts.all(:conditions => conditions, :order => :address)\n\tend", "def test_resolver_converts_ipaddrs_array\n @parser.log_reader\n assert_equal @parser.ipaddrs_q.size, 12\n assert_equal @parser.records_q.size, 12\n @parser.resolve_names\n assert_equal @parser.domains_hash.map { |k,v| [ k, v[0] ] }.sort, [\n [\"208.77.188.166\", \"www.example.com\"],\n [\"74.125.67.100\", \"gw-in-f100.google.com\"],\n [\"75.119.201.189\", \"apache2-moon.legs.dreamhost.com\"],\n [\"75.146.57.34\", \"greed.zenspider.com\"]\n]\n end", "def test_hosts\n assert_equal(['localhost'], cmk.folder('folder1').hosts)\n end", "def update_required?(hosts)\n if !@refresh_node || !@refresh_node.active?\n begin\n @refresh_node = get_valid_seed_node\n rescue ConnectionFailure\n warn \"Could not refresh config because no valid seed node was available.\"\n return\n end\n end\n node = @refresh_node\n\n node_list = node.node_list\n\n unconnected_nodes = node_list - hosts\n removed_nodes = hosts - node_list\n\n if unconnected_nodes.empty? && removed_nodes.empty?\n return false\n else\n {:unconnected => unconnected_nodes, :removed => removed_nodes}\n end\n end", "def local_to_remote_check\n #checking locals are consistent with remotes\n obj_ptr_re = /\\[\\d{1},(.*)\\]/\n locRW_re = /\\locRW\\:(.?)\\,/\n recalled_re = /recalled\\:(.?)\\,/\n count = 0\n $arr_local_store.each do |local_file|\n local_file.each do |line|\n unless line.chomp.empty?\n obj_ptr = line.match(obj_ptr_re)\n remote_host = line.match(locRW_re)\n recalled_for = line.match(recalled_re)\n\n #check if remote host knows about this obj_ptr\n if !remote_host[1].eql?\"\" and recalled_for[1].eql?\"\"\n found = false\n $arr_remote_store[remote_host[1].to_i].each do |r_line|\n if r_line.include? obj_ptr[1]\n found = true\n end\n end\n if !found\n p \"OMG! #{count} gave its object #{obj_ptr[1]} to #{remote_host[1]}, but it doesn't seem to know about it\"\n end\n end\n end\n end\n count = count + 1\n end\nend", "def all_hosts_in(file)\n servers = []\n file.each do |line|\n if line.include?('Host ')\n servers << line.sub('Host ', '').strip\n end\n end\n servers\n end", "def ==(other)\n @rye_host == other.host\n end", "def diff_zone_file(zone, records)\n #Compare dumped strings directly instead of RR objects\n zone_records = zone.records.map{ |rec|\n rec.dump\n }\n recs = records.map{ |rec|\n rec.dump\n }\n removed_records = zone_records - recs\n added_records = recs - zone_records\n if $options[:verbose]\n if removed_records.any?\n puts \"Removed records in zone file: #{zone.file_path}\"\n removed_records.each{ |rec|\n puts rec\n }\n end\n if added_records.any?\n puts \"Added records in zone file: #{zone.file_path}\"\n added_records.each{ |rec|\n puts rec\n }\n end\n end\n return added_records.any? || removed_records.any?\nend", "def test_host_specific\n client1 = \"client1.example.com\"\n client2 = \"client2.example.com\"\n ip = \"127.0.0.1\"\n\n # Setup a directory hierarchy for the tests\n fsdir = File.join(tmpdir, \"host-specific\")\n @@tmpfiles << fsdir\n hostdir = File.join(fsdir, \"host\")\n fqdndir = File.join(fsdir, \"fqdn\")\n client1_hostdir = File.join(hostdir, \"client1\")\n client2_fqdndir = File.join(fqdndir, client2)\n contents = {\n client1_hostdir => \"client1\\n\",\n client2_fqdndir => client2 + \"\\n\"\n }\n [fsdir, hostdir, fqdndir, client1_hostdir, client2_fqdndir].each { |d| Dir.mkdir(d) }\n\n [client1_hostdir, client2_fqdndir].each do |d|\n File.open(File.join(d, \"file.txt\"), \"w\") do |f|\n f.print contents[d]\n end\n end\n conffile = tempfile\n File.open(conffile, \"w\") do |f|\n f.print(\"\n[host]\npath #{hostdir}/%h\nallow *\n[fqdn]\npath #{fqdndir}/%H\nallow *\n\")\n end\n\n server = nil\n assert_nothing_raised {\n\n server = Puppet::Network::Handler.fileserver.new(\n\n :Local => true,\n\n :Config => conffile\n )\n }\n\n # check that list returns the correct thing for the two clients\n list = nil\n sfile = \"/host/file.txt\"\n assert_nothing_raised {\n list = server.list(sfile, :manage, true, false, client1, ip)\n }\n assert_equal(\"/\\tfile\", list)\n assert_nothing_raised {\n list = server.list(sfile, :manage, true, false, client2, ip)\n }\n assert_equal(\"\", list)\n\n sfile = \"/fqdn/file.txt\"\n assert_nothing_raised {\n list = server.list(sfile, :manage, true, false, client1, ip)\n }\n assert_equal(\"\", list)\n assert_nothing_raised {\n list = server.list(sfile, :manage, true, false, client2, ip)\n }\n assert_equal(\"/\\tfile\", list)\n\n # check describe\n sfile = \"/host/file.txt\"\n assert_nothing_raised {\n list = server.describe(sfile, :manage, client1, ip).split(\"\\t\")\n }\n assert_equal(5, list.size)\n assert_equal(\"file\", list[1])\n md5 = Digest::MD5.hexdigest(contents[client1_hostdir])\n assert_equal(\"{md5}#{md5}\", list[4])\n\n assert_nothing_raised {\n list = server.describe(sfile, :manage, client2, ip).split(\"\\t\")\n }\n assert_equal([], list)\n\n sfile = \"/fqdn/file.txt\"\n assert_nothing_raised {\n list = server.describe(sfile, :manage, client1, ip).split(\"\\t\")\n }\n assert_equal([], list)\n\n assert_nothing_raised {\n list = server.describe(sfile, :manage, client2, ip).split(\"\\t\")\n }\n assert_equal(5, list.size)\n assert_equal(\"file\", list[1])\n md5 = Digest::MD5.hexdigest(contents[client2_fqdndir])\n assert_equal(\"{md5}#{md5}\", list[4])\n\n # Check retrieve\n sfile = \"/host/file.txt\"\n assert_nothing_raised {\n list = server.retrieve(sfile, :manage, client1, ip).chomp\n }\n assert_equal(contents[client1_hostdir].chomp, list)\n\n assert_nothing_raised {\n list = server.retrieve(sfile, :manage, client2, ip).chomp\n }\n assert_equal(\"\", list)\n\n sfile = \"/fqdn/file.txt\"\n assert_nothing_raised {\n list = server.retrieve(sfile, :manage, client1, ip).chomp\n }\n assert_equal(\"\", list)\n\n assert_nothing_raised {\n list = server.retrieve(sfile, :manage, client2, ip).chomp\n }\n assert_equal(contents[client2_fqdndir].chomp, list)\n end", "def check_up_hosts(hostlisthash, settings={ :retry => true, :maxalert => NO_EMAIL, :timeout => 30})\n if hostlisthash.class==Array\n hostlisthash=hostlisthash.to_h(true)\n end\n if not settings.include?(:timeout)\n settings[:timeout]=30\n end\n if not settings.include?(:retry)\n settings[:retry]=true\n end\n if not settings.include?(:maxalert)\n settings[:maxalert]=NO_EMAIL\n end\n results, unsuccessful_hosts=issue_command_on_hosts(hostlisthash,settings){|h,p| h.backtic(\"hostname --fqdn\").chomp(\"\\n\").strip.downcase}\n uphosts=[]\n results.each{|vp|\n uphosts << ($rename_vp.has_key?(vp.at(0)) ? $rename_vp[vp.at(0)] : vp.at(0))\n if vp.at(0) != vp.at(1)\n log { \"check_up_hosts(): vp.at(0) != vp.at(1): #{vp.join(\" \")}\" }\n end\n }\n # if prune\n # unsuccessful_hosts.each{|h|\n # self.unregister_host(h)\n # }\n # end\n return uphosts\n end", "def list_records(host)\n domain = find_domain!(host)\n raise IncorrectDomainType, \"Domain #{host} is a #{domain.type} domain\" unless domain.can_have_records?\n \n return @domain_records[domain.host] unless @domain_records.nil? or @domain_records[domain.host].nil?\n \n res = get('/dns.php', domain.list_records_options)\n if response_status_message(res) == (RESPONSE_MESSAGES[:DOMAIN_LIST_RECORDS] % domain.host)\n record_list = RecordList.parse_list(domain, get(URI.join(\"http://#{EVERYDNS_HOSTNAME}/dns.php\", res['location']).to_s).body)\n cache_domain_records(domain.host, record_list)\n return record_list\n else\n return false\n end\n end", "def monitor_hosts_and_vms\n totalmemory = 0\n totalcpu = 0\n\n host_info = \"HYPERVISOR=opennebula\\n\"\n host_info << \"PUBLIC_CLOUD=YES\\n\"\n host_info << \"PRIORITY=-1\\n\"\n host_info << \"CPUSPEED=1000\\n\"\n host_info << \"HOSTNAME=\\\"#{@host['hostname']}\\\"\\n\"\n case @host['host_mon']['type']\n when 'fixed'\n host_info << \"TOTALMEMORY=#{@host['host_mon']['memory']}\\n\"\n host_info << \"TOTALCPU=#{@host['host_mon']['cpu']}\\n\"\n when 'instance_based'\n @host['capacity'].each { |name, size|\n cpu, mem = instance_type_capacity(name)\n totalmemory += mem * size.to_i\n totalcpu += cpu * size.to_i\n }\n host_info << \"TOTALMEMORY=#{totalmemory.round}\\n\"\n host_info << \"TOTALCPU=#{totalcpu}\\n\"\n when 'dynamic'\n host_info << get_remote_quotas\n end\n\n usedcpu = 0\n usedmemory = 0\n\n vms_info = get_all_vms_poll_info\n puts host_info\n puts vms_info\n end", "def de_duplicate\n\t\t\t@known_hosts.keys.map do |key|\n\t\t\t\tip=@known_hosts[key]\n\t\t\t\tif @known_ips.key?(ip)\n\t\t\t\t\t@known_hosts.delete(key)\n\t\t\t\telse\n\t\t\t\t\t@known_ips[ip]=true\n\t\t\t\tend\n\t\t\tend\n\t\tend", "def find_applying_nodes(hosts, statuses = [])\n Log.debug(\"checking applying status of #{hosts.inspect}\")\n @client.filter[\"identity\"].clear\n hosts.each do |host|\n @client.identity_filter(host)\n end\n\n results = @client.status\n\n hosts.each do |host|\n result = results.select { |r| r[:sender] == host }.first\n status = statuses.select { |s| s[:name] == host }.first\n\n unless status\n status = make_status(host)\n statuses << status\n end\n\n if result\n # check the value of applying as defined in the agent ddl\n if result[:data][:applying] == true\n # we're applying\n if result[:data][:initiated_at]\n # it's a new agent, we can record when it started\n Log.debug(\"#{host} run was started at #{result[:data][:initiated_at]}\")\n status[:initiated_at] = result[:data][:initiated_at]\n else\n Log.debug(\"#{host} run started\")\n end\n else\n # Here we check the \"asked to run but not yet started\" state.\n if result[:data][:lastrun].to_i >= status[:initiated_at]\n Log.debug(\"#{host} run completed\")\n # The node has finished applying, remove from the running set\n statuses.reject! { |s| s[:name] == host }\n next\n else\n # We haven't started yet that we can see, increment the check counter\n status[:checks] += 1\n Log.debug(\"#{host} starting, checks #{status[:checks]}\")\n end\n end\n else\n # We didn't get a result from this host, log and record a check happened\n log(\"Host #{host} did not respond to the status action.\")\n status[:no_response] += 1\n end\n\n if status[:no_response] >= 5\n # If we missed many responses to status, assume it's a dead node\n log(\"Host #{host} failed to respond multiple times. Skipping.\")\n statuses.reject! { |s| s[:name] == host }\n end\n\n if status[:checks] >= 5\n # If we hit more than 5 checks, assume it couldn't start\n log(\"Host #{host} did not move into an applying state. Skipping.\")\n statuses.reject! { |s| s[:name] == host }\n end\n end\n\n return statuses\n end", "def checkHosts(layer)\n @host_facts.each do |f|\n # each host has a list of facts\n f[1].each do |l|\n if l['deploop_category'] == layer\n up = @mchandler.ifHostUp f[0]\n if @opt.verbose\n puts \"checking host #{f[0]} is up: \" \n puts up\n end\n if !up\n msg = \"ERROR: host \\'#{f[0]}\\' is unreachable. Aboring.\"\n @outputHandler.msgError msg\n end\n deplUp = @mchandler.checkIfDeploopHost f[0]\n if @opt.verbose\n puts \"checking Deploop enabled host #{f[0]}: \" \n puts deplUp\n end\n if !deplUp\n msg = \"ERROR: host \\'#{f[0]}\\' is not Deploop enabled, fix this. Aborting.\"\n @outputHandler.msgError msg\n end\n end\n end\n\n end # @host_facts.each\n msg = \"The layer \\'#{layer}\\' has all host Deploop enabled\"\n @outputHandler.msgOutput msg\n end", "def reload_hosts_list\n self.hosts = self.storage_servers\n write_hosts_to_file\n end", "def hosts=(hosts)\n @host = nil\n @hosts = hosts\n end", "def test_records_dont_overlap\n\t\ta = TestWabaRecord.new; \n\t\ta.site = \"A\"\n\t\tb = TestWabaRecord.new\n\t\tb.site = \"B\"\n\t\tassert_equal(a.site, \"A\")\n\t\tassert_equal(b.site, \"B\")\n\tend", "def fetch_host_keys(hosts_stat, verbose = false)\n\n #\n # 1. Remove keys from known_hosts, using ssh-keygen -R <host>\n #\n\n hosts_stat.each do |hs|\n\n hs[:names].each do |hn|\n system(\n \"ssh-keygen -f \\\"#{$known_hosts}\\\" -R #{hn} > /dev/null 2> /dev/null\" )\n end\n\n end\n\n #\n # 2. Fetch new keys using ssh-keyscan\n #\n\n begin\n\n $stderr.print \"Fetching host keys to #{$known_hosts}:\\n \" if verbose\n\n open($known_hosts, 'a') do |file|\n\n tot = hosts_stat.length\n count = 1\n\n hosts_stat.each do |hs|\n\n if verbose\n pct = 100. * count.to_f / tot.to_f\n $stderr.printf( \"\\r\\033[0K[% 3d%%] %s\", pct.round, hs[:names].first )\n $stderr.flush\n end\n\n keys = %x[ ssh-keyscan -t rsa,dsa #{hs[:names].first} 2> /dev/null ]\n keys.gsub!( hs[:names].first, hs[:names].join(',') )\n\n file << keys\n\n count += 1\n\n end\n\n file.close\n\n end\n\n warn \"\\r\\033[0KDone!\" if verbose\n\n rescue\n return false\n end\n\n return true\n\nend", "def find(conditions={})\n\n result = []\n\n hostname_condition = conditions[:hostname]\n status_condition = conditions[:status]\n downtime_condition = conditions[:downtime]\n ack_condition = conditions[:ack]\n\n @hosts.each do |host|\n unless hostname_condition.nil?\n next if hostname_condition.is_a? String and host.hostname != hostname_condition\n next if hostname_condition.is_a? Regexp and !host.hostname.match(hostname_condition)\n end\n\n unless status_condition.nil?\n next if status_condition.is_a? Symbol and host.status != status_condition\n next if status_condition.is_a? String and host.status != status_condition.to_sym\n end\n next if !downtime_condition.nil? and host.downtime != downtime_condition\n next if !ack_condition.nil? and host.ack != ack_condition\n\n result << host\n end\n result\n end", "def test_sorting\n # Make sure declarations with no length sort first.\n host_exact = Declaration.new(:allow, \"host.com\")\n host_range = Declaration.new(:allow, \"*.host.com\")\n\n ip_exact = Declaration.new(:allow, \"192.168.0.1\")\n ip_range = Declaration.new(:allow, \"192.168.0.*\")\n\n\n assert_equal(\n -1, host_exact <=> host_range,\n\n \"exact name match did not sort first\")\n\n\n assert_equal(\n -1, ip_exact <=> ip_range,\n\n \"exact ip match did not sort first\")\n\n # Next make sure we sort by length\n ip_long = Declaration.new(:allow, \"192.168.*\")\n assert_equal(-1, ip_range <=> ip_long, \"/16 sorted before /24 in ip\")\n\n # Now try it using masks\n ip24 = Declaration.new(:allow, \"192.168.0.0/24\")\n ip16 = Declaration.new(:allow, \"192.168.0.0/16\")\n\n assert_equal(-1, ip24 <=> ip16, \"/16 sorted before /24 in ip with masks\")\n\n # Make sure ip checks sort before host checks\n assert_equal(-1, ip_exact <=> host_exact, \"IP exact did not sort before host exact\")\n\n\n assert_equal(\n -1, ip_range <=> host_range,\n\n \"IP range did not sort before host range\")\n\n host_long = Declaration.new(:allow, \"*.domain.host.com\")\n\n assert_equal(-1, host_long <=> host_range, \"did not sort by domain length\")\n\n # Now make sure denies sort before allows, for equivalent\n # declarations.\n host_deny = Declaration.new(:deny, \"host.com\")\n assert_equal(-1, host_deny <=> host_exact, \"deny did not sort before allow when exact\")\n\n host_range_deny = Declaration.new(:deny, \"*.host.com\")\n assert_equal(-1, host_range_deny <=> host_range, \"deny did not sort before allow when ranged\")\n\n ip_allow = Declaration.new(:allow, \"192.168.0.0/16\")\n ip_deny = Declaration.new(:deny, \"192.168.0.0/16\")\n\n\n assert_equal(\n -1, ip_deny <=> ip_allow,\n\n \"deny did not sort before allow in ip range\")\n\n %w{host.com *.domain.com 192.168.0.1 192.168.0.1/24}.each do |decl|\n assert_equal(0, Declaration.new(:allow, decl) <=>\n Declaration.new(:allow, decl),\n \"Equivalent declarations for #{decl} were considered different\"\n )\n end\n end", "def run\n super\n res = []\n entity_name = _get_entity_name\n entity_type = _get_entity_type_string\n\n # skip cdns\n if !get_cdn_domains.select{ |x| entity_name =~ /#{x}/}.empty? || \n !get_internal_domains.select{ |x| entity_name =~ /#{x}/}.empty?\n _log \"This domain resolves to a known cdn or internal host, skipping\"\n return\n end\n\n # check that it resolves\n resolves_to = resolve_names entity_name\n unless resolves_to.first\n _log \"No resolution for this record, unable to check\"\n return \n end\n\n # We use their DNS servers to query\n nameservers= ['185.228.168.168', '185.228.168.169']\n _log \"Querying #{nameservers}\"\n dns_obj = Resolv::DNS.new(nameserver: nameservers)\n \n # Try twice, just in case (avoid FP's)\n res = dns_obj.getaddresses(entity_name)\n res.concat(dns_obj.getresources(entity_name, Resolv::DNS::Resource::IN::CNAME)).flatten\n\n # Detected only if there's no resolution\n if res.any?\n _log \"Resolves to #{res.map{|x| \"#{x.to_s}\" }}. Seems we're good!\"\n else\n source = \"CleanBrowsing\"\n description = \"The Cleanbrowsing DNS security filter focuses on restricting access \" + \n \"to malicious activity. It blocks phishing, spam and known malicious domains.\"\n \n _create_linked_issue(\"blocked_by_dns\", {\n status: \"confirmed\",\n additional_description: description,\n source: source, \n proof: \"Resolved to the following address(es) outside of #{source} (#{nameservers}): #{resolves_to.join(\", \")}\",\n to_reproduce: \"dig #{entity_name} @#{nameservers.first}\",\n references: [{ type: \"remediation\", uri: \"https://cleanbrowsing.org/\" }]\n }) \n \n # Also store it on the entity \n blocked_list = @entity.get_detail(\"suspicious_activity_detected\") || [] \n @entity.set_detail(\"suspicious_activity_detected\", blocked_list.concat([{source: source}]))\n\n end\n\n end", "def resolved?(dns, challenge)\n valid = false\n dns.each_resource(challenge[:acme_domain], Resolv::DNS::Resource::IN::TXT) { |resp|\n resp.strings.each do |curr_resp|\n if curr_resp == challenge[:txt_challenge]\n puts \"✔ #{challenge[:acme_domain]}: Found #{curr_resp}, a match.\"\n return true\n end\n end\n valid = true\n puts \"✘ #{challenge[:acme_domain]}: Found TXT record, but didn't match expected value of #{challenge[:txt_challenge]}\" \n }\n if !valid\n puts \"✘ #{challenge[:acme_domain]}: Found no TXT record\"\n end\n return false\nend", "def get_foreman_hosts(per_page = 10000)\n curl = setup_curl(\"#{@foreman_url}/api/hosts?per_page=#{per_page}\", true)\n curl.perform\n servers_junk = JSON.parse(curl.body_str)\n servers_array = []\n servers_junk.each { |server| servers_array << server['host']['name'] }\n @foreman_hosts = servers_array\n end", "def bulk_delete(list)\n\t\tputs \"Delete entries to the local host repository from:\\n #{list}\"\n\t\thosts=list\n\t\tchanges=Array.new\n\t\tif hosts.size > 0\n\t\t\thosts.map do |x|\n\t\t\t\thost=delete(x)\n\t\t\t\tchanges.push(host) unless host.nil?\n\t\t\tend\n\t\t\tputs \"Done deleting hosts.\"\n\t\t\treturn changes\n\t\telse\n\t\t\tputs \"Error: empty list - no entry is loaded. Please check your list and try again.\"\n\t\tend\n\trescue => ee\n\t\tputs \"Exception on method #{__method__}: #{ee}\"\n\tend", "def parse_zk_hosts(servers)\n servers.join(',')\n end", "def parse_dns(dns_raw)\n dns = []\n dns_records = {}\n record_type_A = []\n record_type_A_IP = []\n record_type_CNAME = []\n record_type_CNAME_alias = []\n\n #adds each line to dns array and splipt them with \",\"\n dns_raw.each do |lines_in_files|\n dns.push([lines_in_files.split(\",\")])\n end\n\n #Checks for recordA,IP or recordCNAME and adds them to the respected array\n dns.each do |words_in_files|\n if words_in_files[0][0] == \"A\"\n record_type_A.push(words_in_files[0][1].strip)\n record_type_A_IP.push(words_in_files[0][2].strip)\n elsif words_in_files[0][0] == \"CNAME\"\n record_type_CNAME.push(words_in_files[0][1].strip)\n record_type_CNAME_alias.push(words_in_files[0][2].strip)\n end\n end\n\n #record_A hash stores values of recordA\n record_A = {\n :source => record_type_A,\n :ip => record_type_A_IP,\n }\n\n #recordCNAME hash stores values of recordCNAME\n record_CNAME = {\n :source => record_type_CNAME,\n :alias => record_type_CNAME_alias,\n }\n\n #dns_records gets both Hashes\n dns_records = {\n :A => record_A,\n :CNAME => record_CNAME,\n }\n\n #returns record dns_record with two hashes.\n return dns_records\nend", "def hosts\n @hosts ||= match[5].split(\",\")\n end", "def hosts\n if @hosts\n @hosts\n elsif @host\n [@host]\n else\n self.class.hosts\n end\n end", "def hosts\n `#{cmk} --list-hosts`.split(/\\n/).sort\n end", "def hosts(args = nil)\n if args and args[:server]\n args[:server].split(';').collect { |server| $hosts[server] ||\n Config.warn_fail(\"#{server} is not a known host\") }\n else\n $hosts.values\n end\nend", "def discrepancy?(local, remote)\n local.status != remote['status'] ||\n local.ad_description != remote['description']\n end", "def hosts\n @hosts ||= match[5].split(\",\")\n end", "def stub_hosts(ip_spec)\n stub_hosts_on(default, ip_spec)\n end", "def host_2_ip (hostname)\n\t\tputs \"Perform DNS query on host: #{hostname}\" if @verbose\n\t\tbegin\n\t\t\tips=Array.new\n\t\t\tif is_ip?(hostname)\n\t\t\t\tputs \"No change - same IP is returned. \" if @verbose\n\t\t\t\treturn hostname.strip\n\t\t\telse\n\t\t\t\tips=Resolv.getaddresses(hostname)\n\t\t\t\tif (ips.empty?) then\n\t\t\t\t\tputs \"Failed to resolve #{hostname}\" if @verbose\n\t\t\t\t\treturn nil\n\t\t\t\telse\n\t\t\t\t\tputs \"IP found: #{ips.first}\" if @verbose\n\t\t\t\t\treturn ips.first.strip\n\t\t\t\tend\n\t\t\tend\n\t\trescue => ee\n\t\t\tputs \"Exception on method host_2_ip for host #{hostname}: #{ee}\" if @verbose\n\t\t\treturn nil\n\t\tend\n\tend", "def valid_dns_record? (hostname)\n\t\tputs \"Validate the hostname record: #{hostname}\" if @verbose\n\t\tbegin\n\t\t\tips=Resolv.getaddresses(hostname)\n\t\t\tif ips.empty?\n\t\t\t\treturn false\n\t\t\telse\n\t\t\t\tputs \"Found: #{hostname}\" if @verbose\n\t\t\t\treturn true\n\t\t\tend\n\t\trescue => ee\n\t\t\tputs \"Exception on method #{__method__} for host #{hostname}: #{ee}\" if @verbose\n\t\t\treturn false\n\t\tend\n\tend", "def index\n included = default_to_array(params[:included])\n excluded = default_to_array(params[:excluded])\n records = Record.custom_filter(included, excluded)\n related_hostnames = Hostname\n .filter_related_hostnames(included, excluded)\n .map{ |h| h.address }\n .tally\n\n render json: {\n total_records: records.length,\n records: records.map { |r| { id: r.id, ip_address: r.ip } },\n related_hostnames: related_hostnames.map { |h, c| { hostname: h, count: c } }\n }\n end", "def diff\n\thost_tracker=Wmap::HostTracker.instance\n\tf_new = File.open(ARGV[1],'r')\n\tf_new.each do |line|\n\t\tsite=line.chomp.strip\n\t\tsite1=host_tracker.url_2_site(site)\n\t\tabort \"Error on processing site: #{site}\" if site1.nil?\n\t\thost=host_tracker.url_2_host(site1)\n\t\tabort \"Error on processing host: #{host}\" if host.nil?\n\t\tip=host_tracker.local_host_2_ip(host)\n\t\tip=host_tracker.host_2_ip(host) if ip.nil?\n\t\tabort \"Error resolve host: #{host}\" if ip.nil?\n\t\tport=host_tracker.url_2_port(site1)\n\t\tabort \"Error retrieve service port on site: #{site}\" if port.nil?\n\t\tkey=ip+\":\"\n\t\tkey+=port.to_s\n\t\tif @services.key?(key)\n\t\t\tputs \"No\"\n\t\telse\n\t\t\tputs \"Yes\"\n\t\tend\n\tend\n\tf_new.close\n\thost_tracker=nil \nend", "def post_connection_check(peer_cert, hostname)\n check_common_name = true\n cert = peer_cert\n cert.extensions.each{|ext|\n next if ext.oid != \"subjectAltName\"\n ext.value.split(/,\\s+/).each{|general_name|\n if /\\ADNS:(.*)/ =~ general_name\n check_common_name = false\n reg = Regexp.escape($1).gsub(/\\\\\\*/, \"[^.]+\")\n return true if /\\A#{reg}\\z/i =~ hostname\n elsif /\\AIP Address:(.*)/ =~ general_name\n check_common_name = false\n return true if $1 == hostname\n end\n }\n }\n if check_common_name\n cert.subject.to_a.each{|oid, value|\n if oid == \"CN\"\n reg = Regexp.escape(value).gsub(/\\\\\\*/, \"[^.]+\")\n return true if /\\A#{reg}\\z/i =~ hostname\n end\n }\n end\n raise OpenSSL::SSL::SSLError, \"hostname not match\"\n end", "def assert_resultsets_match(result_lines, expected_lines, filter_exp=nil, sort=false, regexp_matching=false)\n result_lines = ignore_xml_coverage(result_lines)\n expected_lines = ignore_xml_coverage(expected_lines)\n if filter_exp\n filter_exp = to_utf8_regex(filter_exp)\n expected_lines.delete_if {|line| !to_utf8(line).match(filter_exp) }\n result_lines.delete_if {|line| !to_utf8(line).match(filter_exp) }\n expected_lines.collect! {|line| line.chomp.strip}\n if sort\n result_lines.sort!\n expected_lines.sort!\n end\n\n assert(result_lines.length == expected_lines.length, \"Expected #{expected_lines.length} lines in result, \" +\n \"but got #{result_lines.length} lines.\\n\\nExpected lines:\\n#{expected_lines.join(%Q!\\n!)}\\n\\n\" +\n \"Returned lines:\\n#{result_lines.join(%Q!\\n!)}\" + (@current_assert_file != nil ? \"Answer file: #{@current_assert_file}\" : \"\"))\n\n expected_lines.each_index do |i|\n result = result_lines[i].strip\n if regexp_matching then\n assert(result.match(Regexp.new(expected_lines[i])), \"Result line #{i}:\\n#{result} does not match \" +\n \"expected result regexp #{i}:\\n#{expected_lines[i]}\" + (@current_assert_file != nil ? \"Answer file: #{@current_assert_file}\" : \"\"))\n else\n assert(result == expected_lines[i], \"Result line #{i}:\\n#{result} does not match \" +\n \"expected result line #{i}:\\n#{expected_lines[i]}\" + (@current_assert_file != nil ? \"Answer file: #{@current_assert_file}\" : \"\"))\n end\n end\n\n else\n offset = 0\n exp_xml = expected_lines.join(\"\\n\")\n got_xml = result_lines.join(\"\\n\")\n expected_lines.each do |line|\n index = got_xml.index(line, offset)\n assert(index, \"Not found: '#{line}' at #{offset} matching '#{exp_xml}' and '#{got_xml}'\" + (@current_assert_file != nil ? \"Answer file: #{@current_assert_file}\" : \"\"))\n offset = index + line.length - 1\n end\n end\n\n end", "def assert_queries_match(query1, query2, filter_exp=nil, sort=false, qrserver_id=0)\n result_xml1 = search(query1, qrserver_id).xmldata.split(\"\\n\")\n result_xml2 = search(query2, qrserver_id).xmldata.split(\"\\n\")\n assert_resultsets_match(result_xml1, result_xml2, filter_exp, sort)\n end", "def server_structs\n array = []\n if @struct.hosts\n @struct.hosts.count.times do |i|\n array << Lib.memcached_select_server_at(@struct, i)\n end\n end\n array\n end", "def check_aggregate(summary)\n #puts \"summary is #{summary}\"\n total, ok, silenced, stale, failing = summary.values_at(:total, :ok, :silenced, :stale, :failing)\n return 'OK', 'No servers running the check' if total.zero?\n\n eff_total = total - silenced * (config[:silenced] ? 1 : 0)\n return 'OK', 'All hosts silenced' if eff_total.zero?\n\n ok_pct = (100 * ok / eff_total.to_f).to_i\n\n # Loop through the arrays and split the hostname so we get a short hostname \n message = \"#{ok} OK out of #{eff_total} total.\"\n message << \" #{silenced} silenced.\" if config[:silenced] && silenced > 0\n message << \" #{stale.size} stale.\" unless stale.empty?\n message << \" #{ok_pct}% OK, #{config[:critical]}% threshold\"\n message << \"\\nStale hosts: #{stale.map{|host| host.split('.').first}.sort[0..10].join ','}\" unless stale.empty?\n message << \"\\nFailing hosts: #{failing.map{|host| host.split('.').first}.sort[0..10].join ','}\" unless failing.empty?\n message << \"\\nMinimum number of hosts required is #{config[:min_nodes]} and only #{ok} found\" if ok < config[:min_nodes]\n\n state = ok_pct >= config[:critical] ? 'OK' : 'CRITICAL'\n state = ok >= config[:min_nodes] ? state : 'CRITICAL'\n return state, message\n end", "def hosts\n h = []\n r = ('a'..'z')\n r.each do |i|\n r.each do |j|\n r.each do |k|\n h << i.to_s + j + k + \".com\"\n end\n end\n end\n h\n end", "def pdb_get_facts(node_ip_hostname)\n keyed_facts = {}\n\n if test_env\n response = \"[{\\\"certname\\\":\\\"host-name-01.domain.com\\\",\\\"name\\\":\\\"trusted\\\",\\\"value\\\":{\\\"authenticated\\\":\\\"remote\\\",\\\"certname\\\":\\\"host-name-01.domain.com\\\",\\\"domain\\\":\\\"domain.com\\\",\\\"extensions\\\":{\\\"company_trusted_swimlane\\\":\\\"n/a\\\",\\\"pp_datacenter\\\":\\\"mtv\\\",\\\"pp_environment\\\":\\\"qa\\\",\\\"pp_product\\\":\\\"test\\\",\\\"pp_role\\\":\\\"rabbit_mq\\\"},\\\"hostname\\\":\\\"host-name-01\\\"},\\\"environment\\\":\\\"tier2\\\"},{\\\"certname\\\":\\\"puppet.upguard.org\\\",\\\"environment\\\":\\\"production\\\",\\\"name\\\":\\\"virtual\\\",\\\"value\\\":\\\"#{TEST_OS_VIRT_PLATFORM}\\\"},{\\\"certname\\\":\\\"puppet.upguard.org\\\",\\\"environment\\\":\\\"production\\\",\\\"name\\\":\\\"operatingsystemmajrelease\\\",\\\"value\\\":\\\"#{TEST_OS_MAJOR_RELEASE}\\\"},{\\\"certname\\\":\\\"puppet.upguard.org\\\",\\\"environment\\\":\\\"production\\\",\\\"name\\\":\\\"operatingsystem\\\",\\\"value\\\":\\\"#{TEST_OS}\\\"}]\"\n else\n response = `curl -X GET #{PUPPETDB_URL}/pdb/query/v4/nodes/#{node_ip_hostname}/facts -d 'query=[\"or\", [\"=\",\"name\",\"trusted\"], [\"=\",\"name\",\"virtual\"], [\"=\",\"name\",\"operatingsystem\"], [\"=\",\"name\",\"operatingsystemmajrelease\"]]' --tlsv1 --cacert /etc/puppetlabs/puppet/ssl/certs/ca.pem --cert /etc/puppetlabs/puppet/ssl/certs/#{COMPILE_MASTER_PEM} --key /etc/puppetlabs/puppet/ssl/private_keys/#{COMPILE_MASTER_PEM}`\n Puppet.info(\"#{log_prefix} trusted facts for #{node_ip_hostname} is: response=#{response}\")\n end\n\n if response.nil?\n return nil\n end\n facts = JSON.load(response)\n if !facts.is_a?(Array) && !facts.any?\n return nil\n end\n facts.each do |fact|\n keyed_facts[fact['name']] = fact\n end\n keyed_facts\n end", "def add2hosts(session,record,hosts)\n\tip,host = record.split(\",\")\n\tprint_status(\"Adding Record for Host #{host} with IP #{ip}\")\n\tsession.sys.process.execute(\"cmd /c echo #{ip}\\t#{host} >> #{hosts}\",nil, {'Hidden' => true})\nend", "def hosts(opts)\n ::ApplicationRecord.connection_pool.with_connection {\n # If we have the ID, there is no point in creating a complex query.\n if opts[:id] && !opts[:id].to_s.empty?\n return Array.wrap(Mdm::Host.find(opts[:id]))\n end\n\n wspace = Msf::Util::DBManager.process_opts_workspace(opts, framework)\n\n conditions = {}\n conditions[:state] = [Msf::HostState::Alive, Msf::HostState::Unknown] if opts[:non_dead]\n conditions[:address] = opts[:address] if opts[:address] && !opts[:address].empty?\n\n if opts[:search_term] && !opts[:search_term].empty?\n column_search_conditions = Msf::Util::DBManager.create_all_column_search_conditions(Mdm::Host, opts[:search_term])\n tag_conditions = Arel::Nodes::Regexp.new(Mdm::Tag.arel_table[:name], Arel::Nodes.build_quoted(\"(?mi)#{opts[:search_term]}\"))\n search_conditions = column_search_conditions.or(tag_conditions)\n wspace.hosts.where(conditions).where(search_conditions).includes(:tags).references(:tags).order(:address)\n else\n wspace.hosts.where(conditions).order(:address)\n end\n }\n end", "def get_prim_uniq_sites\n\t\tputs \"Retrieve and prime unique sites in the site store. \" if @verbose\n\t\thost_tracker=Wmap::HostTracker.instance\n\t\thost_tracker.data_dir=@data_dir\n\t\tprimary_host_tracker=Wmap::HostTracker::PrimaryHost.instance\n\t\tprimary_host_tracker.data_dir=@data_dir\n\t\tprimary_host_tracker.hosts_file = primary_host_tracker.data_dir + \"/\" + \"prime_hosts\"\n\t\tprimary_host_tracker.known_hosts=primary_host_tracker.load_known_hosts_from_file(@hosts_file)\n\t\t# Step 1. Retrieve the unique site list first\n\t\tsites=get_uniq_sites\n\t\tprim_uniq_sites=Array.new\n\t\t# Step 2. Iterate on the unique site list, spit out the site in the primary host format one at a time\n\t\tsites.map do |site|\n\t\t\tputs \"Work on priming unique site: #{site}\" if @verbose\n\t\t\thost=url_2_host(site)\n\t\t\t# case#1, for the IP only site, do nothing (presuming 'refresh_ip_sites' or 'refresh_all' method already take care of the potential discrepancy here).\n\t\t\tif is_ip?(host)\n\t\t\t\tprim_uniq_sites.push(site)\n\t\t\t\tnext\n\t\t\tend\n\t\t\tip=@known_sites[site]['ip']\n\t\t\t# case#2, for site with an unique IP, do nothing\n\t\t\tputs \"Local hosts table entry count for #{ip}: #{host_tracker.alias[ip]}\" if @verbose\n\t\t\tif host_tracker.alias[ip] == 1\n\t\t\t\tprim_uniq_sites.push(site)\n\t\t\t\tnext\n\t\t\tend\n\t\t\t# case#3, case of multiple IPs for A DNS record, where the site IP may have 0 alias count, do nothing\n\t\t\tif host_tracker.alias[ip] == nil\n\t\t\t\tprim_uniq_sites.push(site)\n\t\t\t\tnext\n\t\t\tend\n\t\t\t# case#4, for the site has a duplicate IP with others, we try to determine which one is the primary site\n\t\t\t# raise \"Error: inconsistency detected on record: #{site}. Please run the following shell command to refresh it first: \\n\\srefresh #{site}\" if tracker1.alias[ip].nil?\n\t\t\tif ( primary_host_tracker.known_hosts.key?(ip) and (host_tracker.alias[ip] > 1) )\n\t\t\t\tnew_host=primary_host_tracker.prime(host)\n\t\t\t\tputs \"Host: #{host}, New host:#{new_host}\" if @verbose\n\t\t\t\tunless host==new_host\n\t\t\t\t\tnew_site=site.sub(host,new_host)\n\t\t\t\t\traise \"Site not found in the site tracking data repository: #{new_site}. You may need to add it into the site store first. Execute the following shell command before trying again: \\n\\twadd #{new_site}\\n\" unless @known_sites.key?(new_site)\n\t\t\t\t\tnew_ip=@known_sites[new_site]['ip']\n\t\t\t\t\tif new_ip==ip\t\t# consistency check\n\t\t\t\t\t\tsite=new_site\n\t\t\t\t\telse\n\t\t\t\t\t\t# TBD - case of multiple IPs for A DNS record\n\t\t\t\t\t\t#raise \"Inconsistency found on prime host entrance: #{new_ip}, #{ip}; #{new_site}, #{site}. Please refresh your entries by running the following shell command: \\n\\s refresh #{new_site}\"\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\t\tprim_uniq_sites.push(site)\n\t\tend\n\t\tprimary_host_tracker=nil\n\t\thost_tracker=nil\n\t\treturn prim_uniq_sites\n\t#rescue => ee\n\t#\tputs \"Exception on method #{__method__}: #{ee}\"\n\tend", "def get_aliases(hostname)\n headers = { Authorization: \"Bearer #{@token}\", 'Content-Type': 'application/json' }\n response = HTTParty.get(\"https://#{NETDB_SERVER}/nodes/#{hostname.fully_qualify}\",\n :headers => headers)\n if response.code != 200\n raise \"no node found for #{hostname}\"\n end\n\n # Parse through the response to find any aliases, and make sure that this is\n # the main hostname.\n node_json = JSON.parse(response.body)\n aliases = []\n node_json['names'].each do |n|\n if n['name'] != hostname\n raise \"#{hostname} is an alias for #{n['name']}. Please rerun with hostname #{n['name']}.\"\n elsif n.key?('aliases')\n aliases = (aliases + n['aliases'])\n end\n end\n\n aliases\nend", "def prefetch_property_hashes(names, portsdir, port_dbdir)\n names = [names] unless names.is_a?(Array)\n\n origins = names.select{|name| name=~/^#{PORTORIGIN_RE}$/}\n pkgnames_or_portnames = names - origins\n\n records_by_origin = search_ports_by_origin(origins, portsdir, port_dbdir)\n origins -= records_by_origin.keys\n\n records_by_pkgname = search_ports_by_pkgname(pkgnames_or_portnames, portsdir, port_dbdir)\n pkgnames_or_portnames -= records_by_pkgname.keys\n\n records_by_portname = search_ports_by_portname(pkgnames_or_portnames, portsdir, port_dbdir)\n pkgnames_or_portnames -= records_by_portname.keys\n\n array = records_by_portname.to_a + records_by_pkgname.to_a + records_by_origin.to_a\n\n errors = []\n missing = origins + pkgnames_or_portnames # what's left was not found\n unless missing.empty?\n list = missing.map{|m| \"'#{m}'\"}.join(', ')\n errors << \"the following packages could not be found: #{list}\"\n end\n detected = detect_ambiguous_search_results(array)\n unless detected.empty?\n detected[0] = \"found #{detected[0]}\"\n errors += detected\n end\n unless errors.empty?\n msg = errors.join(' and ')\n raise Puppet::Error, msg\n end\n\n # at this point we know, that array[i][1].length == 1 for each i\n Hash[ array.map{|key,records| [key, build_property_hash(records[0])]} ]\n end", "def fetch_site_records(site, type)\n site_records = {}\n\n # This makes no sense (currently) for external DNS\n if type == :internal\n # Servers\n site_records['servers'] = get_servers_records(site) unless site['servers'].nil?\n\n # PDUs\n site_records['pdus'] = get_pdus_records(site) unless site['pdus'].nil?\n\n # Laptops (same input format as networks)\n site_records['laptops'] = get_networks_records(site, 'laptops') unless site['laptops'].nil?\n end\n\n # Add L3 network devices to both internal and external DNS, but only keep IPv6 records for external DNS.\n site_records['networks'] = get_networks_records(site, 'network_equipments') unless site['network_equipments'].nil?\n if type == :external\n site_records['networks'].select! { |record| record.is_a?(DNS::Zone::RR::AAAA) }\n end\n\n site.fetch(\"clusters\", []).sort.each { |cluster_uid, cluster|\n\n cluster.fetch('nodes').select { |_node_uid, node|\n node != nil && node[\"status\"] != \"retired\" && node.has_key?('network_adapters')\n }.each_sort_by_node_uid { |node_uid, node|\n\n network_adapters = {}\n\n # Nodes\n node.fetch('network_adapters').each { |net|\n network_adapters[net['device']] = {\n \"ip6\" => net[\"ip6\"],\n \"mounted\" => net[\"mounted\"],\n 'alias' => net['alias'],\n 'pname' => net['name'],\n }\n network_adapters[net['device']][\"ip\"] = net[\"ip\"] if type == :internal\n }\n\n # Mic\n if node['mic'] && (node['mic']['ip'] || node['mic']['ip6'])\n network_adapters['mic0'] = {\"ip\" => node['mic']['ip'], \"ip6\" => node['mic']['ip6']}\n end\n\n site_records[cluster_uid] ||= []\n site_records[cluster_uid] += get_node_records(cluster_uid, node_uid, network_adapters)\n\n # Kavlan\n kavlan_adapters = {}\n kavlan_kinds = ['kavlan6']\n kavlan_kinds << 'kavlan' if type == :internal\n kavlan_kinds.each { |kavlan_kind|\n if node[kavlan_kind]\n node.fetch(kavlan_kind).each { |net_uid, net_hash|\n net_hash.each { |kavlan_net_uid, ip|\n kavlan_adapters[\"#{net_uid}-#{kavlan_net_uid}\"] ||= {}\n if /^eth[0-9]$/.match(net_uid)\n kavlan_adapters[\"#{net_uid}-#{kavlan_net_uid}\"]['mounted'] = node['network_adapters'].select { |n|\n n['device'] == net_uid\n }[0]['mounted']\n kavlan_adapters[\"#{net_uid}-#{kavlan_net_uid}\"]['pname'] = node['network_adapters'].select { |n|\n n['device'] == net_uid\n }.first['name'] + '-' + kavlan_net_uid\n if kavlan_kind == 'kavlan6'\n kavlan_adapters[\"#{net_uid}-#{kavlan_net_uid}\"]['ip6'] = ip\n else\n kavlan_adapters[\"#{net_uid}-#{kavlan_net_uid}\"]['ip'] = ip\n end\n end\n if /^fpga[0-9]$/.match(net_uid)\n kavlan_adapters[\"#{net_uid}-#{kavlan_net_uid}\"]['mountable'] = node['network_adapters'].select { |n|\n n['device'] == net_uid\n }[0]['moutable']\n if kavlan_kind == 'kavlan6'\n kavlan_adapters[\"#{net_uid}-#{kavlan_net_uid}\"]['ip6'] = ip\n else\n kavlan_adapters[\"#{net_uid}-#{kavlan_net_uid}\"]['ip'] = ip\n end\n end\n }\n }\n end\n }\n if kavlan_adapters.length > 0\n key_sr = \"#{cluster_uid}-kavlan\"\n site_records[key_sr] ||= []\n site_records[key_sr] += get_node_kavlan_records(cluster_uid, node_uid, network_adapters, kavlan_adapters)\n end\n } # each nodes\n } # each cluster\n\n site_records\nend", "def is_hosts_configured()\n ary = []\n hosts_file = \"hosts\"\n open(hosts_file, \"r\") {|f|\n f.each_line {|l|\n if not (l =~ /^(\\[|#|\\n)/) # match lines doesn't start from \"[\", \"#\" or \"\\n\"\n ary << l\n end\n }\n }\n if ary.size == 0\n return false\n else\n return true\n end\nend", "def mongo_hosts=(h)\n @mongo_hosts = h.to_a\n end", "def dns_a_record\n @_dns_a_record = \"0.0.0.0\" if @config[:dns_lookup] == :off\n @_dns_a_record ||= Socket.gethostbyname(dns_name)\n rescue SocketError # not found, but could also mean network not work\n @_dns_a_record ||= []\n end" ]
[ "0.7051646", "0.6280299", "0.61838067", "0.5814303", "0.5810692", "0.56846297", "0.56522155", "0.56451154", "0.55988854", "0.55615735", "0.5445128", "0.5437142", "0.5374587", "0.5336497", "0.5333903", "0.5324774", "0.5277381", "0.5245956", "0.52327573", "0.52294654", "0.521821", "0.5206462", "0.5201293", "0.5177595", "0.5164522", "0.5157822", "0.5143663", "0.51240146", "0.50909746", "0.50751", "0.50593996", "0.50246775", "0.50104415", "0.49838254", "0.49456397", "0.49422455", "0.49317336", "0.49270746", "0.492488", "0.4923771", "0.49227417", "0.49031666", "0.4890918", "0.488112", "0.4880607", "0.48763055", "0.48751912", "0.4870346", "0.48672292", "0.48605382", "0.483407", "0.4831714", "0.48231426", "0.48228705", "0.4822469", "0.48111728", "0.47990698", "0.47969645", "0.47774127", "0.47748986", "0.47726366", "0.4764085", "0.4763984", "0.47444394", "0.47344032", "0.47201696", "0.46897647", "0.468813", "0.46838865", "0.4682873", "0.46818054", "0.46783656", "0.4667236", "0.46581006", "0.46557206", "0.46465188", "0.46429223", "0.46417698", "0.4639201", "0.46288964", "0.46275553", "0.46262074", "0.46217597", "0.4619762", "0.46166632", "0.46165287", "0.46151868", "0.46081975", "0.46080863", "0.4599027", "0.4597009", "0.45935127", "0.4578626", "0.4571653", "0.45684537", "0.45663217", "0.45641488", "0.456332", "0.45633194", "0.4559145" ]
0.7778975
0
Compare the expected +hosts+ with those in the fetched MX +records+.
def compare_mx_records( records, hosts ) record_hosts = Set.new( records.map(&:exchange) ) hosts = Set.new( hosts.map {|name| Resolv::DNS::Name.create(name + '.')} ) self.log.debug "Comparing %p to %p" % [ record_hosts, hosts ] status = nil if ( record_hosts ^ hosts ).empty? record_strings = records. map {|rec| "%s[%d]" % [rec.exchange, rec.preference || 0] } status = { mx_record: record_strings.join( ', ' ) } elsif !( subset = record_hosts - hosts ).empty? status = { error: "missing MX records: %s" % [subset.map(&:to_s).join(', ')] } elsif !( subset = hosts - record_hosts ).empty? status = { error: "extra MX records: %s" % [subset.map(&:to_s).join(', ')] } end return status end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def compare_ns_records( records, hosts )\n\t\t\trecord_hosts = Set.new( records.map(&:name) )\n\t\t\thosts = Set.new( hosts.map {|name| Resolv::DNS::Name.create(name + '.')} )\n\n\t\t\tself.log.debug \"Comparing %p to %p\" % [ record_hosts, hosts ]\n\n\t\t\tstatus = nil\n\t\t\tif ( record_hosts ^ hosts ).empty?\n\t\t\t\tstatus = { ns_record: record_hosts.map(&:to_s) }\n\t\t\telsif !( subset = record_hosts - hosts ).empty?\n\t\t\t\tstatus = { error: \"missing NS records: %s\" % [subset.map(&:to_s).join(', ')] }\n\t\t\telsif !( subset = hosts - record_hosts ).empty?\n\t\t\t\tstatus = { error: \"extra NS records: %s\" % [subset.map(&:to_s).join(', ')] }\n\t\t\tend\n\n\t\t\treturn status\n\t\tend", "def dns_check\n gen_host_records # These are the hosts we have\n load_all_subnets # These are the DNS entries\n \n # We want a standard layout, with the hypervisor API entries being \n @host_record.each do |hr| # Array of host record Hash's\n hn = hr[:hostname]\n shn = hn.split('.',2)[0] # Remove the domain\n forward_hr = @forward_host_record[hn] # Find Host Record\n if forward_hr.nil?\n # We have no IPAM entry for this hostname\n if (rhr = @reverse_host_records[hr[:ip]])\n puts \"Only Reverse IPAM entry for #{shn}: #{rhr}\"\n @infoblox.create_host_record(ip_address: hr[:ip], hostname: hn, aliases: hr[:aliases])\n else\n puts \"No IPAM entry for hostrecord: #{hr}\"\n @infoblox.create_host_record(ip_address: hr[:ip], hostname: hn, aliases: hr[:aliases])\n end\n else\n # We have an IPAM record for this hostname\n if forward_hr[:ip] != hr[:ip]\n puts \"IP mismatch #{shn} #{hr[:ip]} != #{forward_hr[:ip]} for IPAM: #{forward_hr}\"\n elsif forward_hr[:hostname] != hn\n # Reference must be via ALIASES or CNAMES\n if forward_hr[:aliases].include?(shn)\n puts \"Hostname #{shn} is an ALIAS. IPAM: #{forward_hr}\"\n elsif forward_hr[:cnames].include?(hn)\n puts \"Hostname #{shn} is a CNAME. IPAM: #{forward_hr}\"\n end\n end\n end\n end\n \n # We want to find IPAM entries, not matching existing @host_record entries\n @reverse_host_records.each do |ip, ahr| # Hash to array of host records from IPAM, indexed by IP\n ahr.each do |hr| # One IP can have multiple host records, with associated ALIAS and CNAME records\n local_hr = @host_record_index[hr[:hostname]]\n if local_hr.nil?\n puts \"No local entry #{hr[:hostname]} for #{hr}\"\n end\n end\n end\nend", "def test_external_mx_defs\n\t\tEM::DnsCache.verbose\n\n\t\tEM::DnsCache.parse_local_mx_records LocalMxRecords\n\n\t\tout = nil\n\t\tEM.run {\n\t\t\td = EM::DnsCache.resolve_mx \"boondoggle.zzz\"\n\t\t\td.errback {EM.stop}\n\t\t\td.callback {|r|\n\t\t\t\tout = r\n\t\t\t\tEM.stop\n\t\t\t}\n\t\t}\n\t\tassert_equal( [\"esmtp.someone.zzz\", \"55.56.57.58\", \"65.66.67.68\"], out )\n\tend", "def does_resolve_to_host?\n mx_records.include? Socket.gethostname\n end", "def check_hostnames\n all_good = true\n \n @check_groups.each do |group|\n group.checks.each do |check|\n unless check.hostname && Dnsruby::Resolv.getaddress(check.hostname)\n puts \"Error: check #{check.name} has invalid hostname '#{check.hostname}'\"\n all_good = false\n end\n end\n end\n \n all_good\n end", "def assert_ip_has_saved_hostnames(ip, hostnames)\n record = DnsRecord.find_by! ip: ip\n\n saved_hostnames = record.hostnames.map do |hostname|\n hostname.hostname\n end\n\n assert_equal ip, record.ip\n assert_equal hostnames.to_set, saved_hostnames.to_set\n end", "def mxers\n return [[\"example.com\", \"0.0.0.0\", 1]] if @dns_disabled\n @mxers ||= Resolv::DNS.open { |dns|\n dns.timeouts = @config[:dns_timeout] if @config[:dns_timeout]\n\n ress = begin\n dns.getresources(@host, Resolv::DNS::Resource::IN::MX)\n rescue Resolv::ResolvTimeout\n []\n end\n\n records = ress.map { |r|\n if r.exchange.to_s > \" \"\n [r.exchange.to_s, IPSocket.getaddress(r.exchange.to_s), r.preference]\n end\n }\n records.compact\n }\n # not found, but could also mean network not work or it could mean one record doesn't resolve an address\n rescue SocketError\n [[\"example.com\", \"0.0.0.0\", 1]]\n end", "def compare_a_records( records, addresses )\n\t\t\trecord_addresses = Set.new( records.map(&:address) )\n\t\t\taddresses = Set.new( addresses.map {|addr| Resolv::IPv4.create(addr)} )\n\n\t\t\tstatus = nil\n\t\t\tif addresses.subset?( record_addresses )\n\t\t\t\tstatus = { a_record: {addresses: record_addresses.map(&:to_s)} }\n\t\t\telse\n\t\t\t\tmissing = addresses - record_addresses\n\t\t\t\tstatus = { error: \"missing A records: %s\" % [ missing.map(&:to_s).join(', ') ] }\n\t\t\tend\n\n\t\t\treturn status\n\t\tend", "def hosts_eql?(a, b) # rubocop:disable Naming/UncommunicativeMethodParamName\n parse_host(a) == parse_host(b)\n rescue IPAddr::InvalidAddressError\n false\n end", "def mx_records\n @mx_records ||= Resolv::DNS.open do |dns|\n records = dns.getresources(name, Resolv::DNS::Resource::IN::MX)\n records.map(&:exchange).join(\" \")\n end\n end", "def verify_abs_hosts(hosts)\n success = false\n puts \"Verifying ABS hosts: #{hosts}\"\n hosts.each do |host|\n puts\n puts \"Current host: #{host}\"\n\n success = verify_abs_host(host[:hostname])\n break unless success\n end\n\n puts \"Unable to verify the provisioned hosts\" unless success\n return success\n end", "def check_mx\n domain=self.address.split('@')\n @domain = domain[1]\n @name=domain[0]\n #puts \"domain is #{domain}\"\n flag=false\n if @domain!=nil\n begin\n ret = self.resolver.query(@domain, Types.MX)\n if ret.answer!=nil and ret.rcode=='NOERROR'\n @mx=ret.answer.first.exchange.to_s if ret.answer!=nil \n @[email protected]\n msg= \"mx record #{self.mx}\"\n puts msg\n flag=true\n end\n rescue Dnsruby::NXDomain \n msg=\"non existing domain #{@domain}\"\n puts msg\n rescue Exception => e\n msg=\"exception #{e.message}\"\n puts msg\n end\n \n else\n msg=\"nil domain\"\n end\n # puts \"ret is #{ret.inspect}\"\n [flag,msg]\n end", "def records\n dns.getresources domain, Resolv::DNS::Resource::IN::MX\n end", "def wanted_records(records, domains)\n _records = records.select { |m| domains.include?(m[1]) }\n _records = _records.each_cons(2).select { |a, b| a.last == b.last }\n _records = _records.group_by(&:last).keys.map do |v|\n { record_id: v.first, ip_address: v.last }\n end\n _records\n end", "def full_host_record(ip:)\n load_cnames\n \n @forward_host_record ||= {} # Global, as we want to do name lookups.\n return_record = []\n unless( (host_records = @infoblox.get_host_by_ip(ip_address: ip)).nil? )\n host_records.each do |hosts|\n hosts.each do |hn|\n # Assign an empty record, if we haven't seen this host before\n @forward_host_record[hn] ||= { hostname: hn, ip: '', aliases: [], cnames: [] }\n \n # Record the IP. There may be multiple IPs with one hostname.\n @forward_host_record[hn][:ip] = ip\n \n # The hostname might have CNAMES point to it\n unless @reverse_cnames[hn].nil?\n @reverse_cnames[hn].each do |cn| \n @forward_host_record[hn][:cnames] << cn \n end\n end\n \n # The hostname may have alternate hostname A records, stored in IPAM as ALIASES\n @infoblox.get_alias(hostname: hn) do |a| \n short_alias = a.split('.',2)[0]\n @forward_host_record[hn][:aliases] << short_alias\n \n # The ALIASes might have CNAME records pointing to it\n unless @reverse_cnames[a].nil?\n # Record the ALIAS CNAMES against the parent hostname.\n @reverse_cnames[a].each do |cn| \n @forward_host_record[hn][:cnames] << cn \n end\n end\n end\n return_record << @forward_host_record[hn]\n \n # Add forward lookup entries for each ALIAS\n host_domain = hn.split('.',2)[1]\n @forward_host_record[hn][:aliases].each do |a|\n @forward_host_record[\"#{a}.#{host_domain}\"] = @forward_host_record[hn]\n end\n \n # Add forward lookup entries for each CNAME\n @forward_host_record[hn][:cnames].each do |cn|\n @forward_host_record[cn] = @forward_host_record[hn]\n end\n \n end\n end\n end\n return return_record\nend", "def check_hosts(hosts_stat, deep = false, verbose = true)\n\n $stderr.print \"Checking which hosts are up:\\n \" if verbose\n\n tot = hosts_stat.length\n count = 1\n\n hosts_stat.each do |hs|\n\n if verbose\n pct = 100. * count.to_f / tot.to_f\n $stderr.printf( \"\\r\\033[0K[% 3d%%] %s\", pct.round, hs[:names].first )\n $stderr.flush\n end\n\n hn = hs[:names].first\n\n if deep\n rnd = (rand()*10000000).round()\n hs[:up] = %x[ ssh -oUserKnownHostsFile=#{$known_hosts} \\\n -oConnectTimeout=5 #{hn} echo #{rnd} 2> /dev/null ].include?(rnd.to_s)\n else\n hs[:up] = system(\"nc -z #{hn} 22 2> /dev/null > /dev/null\")\n end\n\n count += 1\n\n end\n\n warn \"\\r\\033[0KDone!\" if verbose\n\nend", "def transform_hosts(hosts)\n require 'time'\n\n node_data = []\n\n hosts.each do |host|\n if host[:report_timestamp].nil?\n # This can happen in weird cases. Mark as an expired node, so\n # the expired logic doesn't try to do math on a nil timestamp.\n last_checkin = nil\n formatted_checkin = 'N/A'\n host[:expired] = nil\n else\n last_checkin = Time.now - Time.parse(host[:report_timestamp])\n formatted_checkin = sprintf(\"%#{@options.round_to}f\",(last_checkin * @options.divisor).abs)\n end\n node_data << {\n :last_checkin => last_checkin,\n :expired => host[:expired].nil? ? false : host[:expired],\n :certname => host[:certname],\n :environment => host[:report_environment].nil? ? 'N/A' : host[:report_environment],\n :status => host[:latest_report_status].nil? ? 'N/A' : host[:latest_report_status],\n :formatted_checkin => formatted_checkin\n }\n end\n\n unless @options.environments.empty?\n node_data.delete_if {|node| not @options.environments.include? node[:environment] }\n end\n unless @options.statuses.empty?\n node_data.delete_if {|node| not @options.statuses.include? node[:status] }\n end\n\n node_data\n end", "def bad_hosts\n bad_hosts.collect {|r| r.host }\n end", "def known_host_hash?(hostlist, entries); end", "def server_hosts\n return [:default] if @resource[:server_hosts] &&\n @resource[:server_hosts][0] == :default &&\n @property_hash[:server_hosts] ==\n @aaa_group.default_servers\n @property_hash[:server_hosts]\n end", "def test_resolver_converts_ipaddrs_array\n @parser.log_reader\n assert_equal @parser.ipaddrs_q.size, 12\n assert_equal @parser.records_q.size, 12\n @parser.resolve_names\n assert_equal @parser.domains_hash.map { |k,v| [ k, v[0] ] }.sort, [\n [\"208.77.188.166\", \"www.example.com\"],\n [\"74.125.67.100\", \"gw-in-f100.google.com\"],\n [\"75.119.201.189\", \"apache2-moon.legs.dreamhost.com\"],\n [\"75.146.57.34\", \"greed.zenspider.com\"]\n]\n end", "def parse_dns(dns_raw)\n dns = []\n dns_records = {}\n record_type_A = []\n record_type_A_IP = []\n record_type_CNAME = []\n record_type_CNAME_alias = []\n\n #adds each line to dns array and splipt them with \",\"\n dns_raw.each do |lines_in_files|\n dns.push([lines_in_files.split(\",\")])\n end\n\n #Checks for recordA,IP or recordCNAME and adds them to the respected array\n dns.each do |words_in_files|\n if words_in_files[0][0] == \"A\"\n record_type_A.push(words_in_files[0][1].strip)\n record_type_A_IP.push(words_in_files[0][2].strip)\n elsif words_in_files[0][0] == \"CNAME\"\n record_type_CNAME.push(words_in_files[0][1].strip)\n record_type_CNAME_alias.push(words_in_files[0][2].strip)\n end\n end\n\n #record_A hash stores values of recordA\n record_A = {\n :source => record_type_A,\n :ip => record_type_A_IP,\n }\n\n #recordCNAME hash stores values of recordCNAME\n record_CNAME = {\n :source => record_type_CNAME,\n :alias => record_type_CNAME_alias,\n }\n\n #dns_records gets both Hashes\n dns_records = {\n :A => record_A,\n :CNAME => record_CNAME,\n }\n\n #returns record dns_record with two hashes.\n return dns_records\nend", "def hosts\n @hosts ||= match[5].split(\",\")\n end", "def hosts\n @hosts ||= match[5].split(\",\")\n end", "def find_hosts( fqdn, zone_id = nil)\n if zone_id.nil?\n #look for matching host across all zones\n request(\n :expects => 200,\n :method => 'GET',\n :parser => Fog::Parsers::Zerigo::DNS::FindHosts.new,\n :path => \"/api/1.1/hosts.xml?fqdn=#{fqdn}\"\n )\n else\n #look for hosts in a specific zone\n request(\n :expects => 200,\n :method => 'GET',\n :parser => Fog::Parsers::Zerigo::DNS::FindHosts.new,\n :path => \"/api/1.1/zones/#{zone_id}/hosts.xml?fqdn=#{fqdn}\"\n )\n end\n end", "def hosts(touchAndPrune=false)\n hosts=@vp_lock.synchronize{@hostname2vp.keys}\n if touchAndPrune\n check_up_hosts(hosts)\n else\n hosts\n end\n end", "def get_puppetdb_hosts\n curl = setup_curl(\"#{@puppetdb_url}/v3/nodes\")\n curl.get\n servers_junk = JSON.parse(curl.body_str)\n servers_array = []\n servers_junk.each { |server| servers_array << server['name'] }\n @puppetdb_hosts = servers_array\n end", "def compare_domain(args)\r\n server_list = args[:server_list]\r\n domain = args[:domain_name]\r\n rtype = args[:rtype]\r\n rdata = args[:actual_rdata]\r\n rdata = (rtype == \"NAPTR\") ? rdata : rdata.downcase\r\n r = \"\"\r\n failed_rlist = []\r\n @timeout = 30\r\n sleep 15 if args[:sleepfirst]\r\n server_list.each do |server|\r\n dig_pass = \"succeed to dig @#{server} #{domain} #{rtype} => #{rdata}\"\r\n dig = `dig @#{server} #{domain} #{rtype}`\r\n if dig.include?(rdata)\r\n puts dig_pass\r\n else\r\n puts \"dig @#{server} #{domain} #{rtype} failed as expected!\" if args[:expected_dig_fail]\r\n return \"succeed\" if args[:expected_dig_fail]\r\n begin\r\n Timeout::timeout(@timeout){\r\n while !dig.include?(rdata)\r\n sleep 5\r\n dig_retry = `dig @#{server} #{domain} #{rtype}`\r\n puts dig_pass if dig_retry.include?(rdata)\r\n break if dig_retry.include?(rdata)\r\n end\r\n }\r\n rescue Timeout::Error\r\n puts \"Error => dig @#{server} #{domain} #{rtype} timed out!\"\r\n failed_rlist << \"failed\"\r\n end\r\n end\r\n end\r\n failed_rlist.empty? ? 'succeed' : 'failed'\r\n end", "def valid_dns?\n @host.exchanger.has_dns_a_record?\n end", "def check_up_hosts(hostlisthash, settings={ :retry => true, :maxalert => NO_EMAIL, :timeout => 30})\n if hostlisthash.class==Array\n hostlisthash=hostlisthash.to_h(true)\n end\n if not settings.include?(:timeout)\n settings[:timeout]=30\n end\n if not settings.include?(:retry)\n settings[:retry]=true\n end\n if not settings.include?(:maxalert)\n settings[:maxalert]=NO_EMAIL\n end\n results, unsuccessful_hosts=issue_command_on_hosts(hostlisthash,settings){|h,p| h.backtic(\"hostname --fqdn\").chomp(\"\\n\").strip.downcase}\n uphosts=[]\n results.each{|vp|\n uphosts << ($rename_vp.has_key?(vp.at(0)) ? $rename_vp[vp.at(0)] : vp.at(0))\n if vp.at(0) != vp.at(1)\n log { \"check_up_hosts(): vp.at(0) != vp.at(1): #{vp.join(\" \")}\" }\n end\n }\n # if prune\n # unsuccessful_hosts.each{|h|\n # self.unregister_host(h)\n # }\n # end\n return uphosts\n end", "def load_known_hosts_from_file (f_hosts=@hosts_file)\n\t\tputs \"Loading local hosts from file: #{f_hosts} ...\" if @verbose\n\t\tknown_hosts=Hash.new\n\t\t@alias = Hash.new\n\t\tFile.write(f_hosts, \"\") unless File.exist?(f_hosts)\n\t\tf=File.open(f_hosts, 'r')\n\t\tf.each do |line|\n\t\t\tnext unless line =~ /\\d+\\.\\d+\\.\\d+\\.\\d+/\n\t\t\tentry=line.chomp.split(%r{\\t+|\\s+|\\,})\n\t\t\tkey=entry[0].downcase\n\t\t\tvalue=entry[1]\n\t\t\tputs \"Loading key value pair: #{key} - #{value}\" if @verbose\n\t\t\tknown_hosts[key] = Hash.new unless known_hosts.key?(key)\n\t\t\tknown_hosts[key]= value\n\t\t\t# For reverse host lookup\n\t\t\tknown_hosts[value] = Hash.new unless known_hosts.key?(value)\n\t\t\tknown_hosts[value] = key\n\t\t\t# Count the number of alias for the recorded IP\n\t\t\tif @alias.key?(value)\n\t\t\t\t@alias[value]+=1\n\t\t\telse\n\t\t\t\t@alias[value]=1\n\t\t\tend\n\t\tend\n\t\tf.close\n\t\treturn known_hosts\n\t\t#rescue => ee\n\t\t#\tputs \"Exception on method #{__method__}: #{ee}\"\n\t\t#\treturn known_hosts\n\tend", "def dns_a_record\n @_dns_a_record = \"0.0.0.0\" if @config[:dns_lookup] == :off\n @_dns_a_record ||= Socket.gethostbyname(dns_name)\n rescue SocketError # not found, but could also mean network not work\n @_dns_a_record ||= []\n end", "def domains\n @_domains ||= mxers.map { |m| Host.new(m.first).domain_name }.sort.uniq\n end", "def other_server_hosts\n @other_server_hosts ||= all_server_hosts.reject {|x| x == server_host}\n end", "def get_a2a_hosts\n abs_initialize\n mom =\n { 'role': \"mom\",\n 'size': @mom_size,\n 'volume_size': @mom_volume_size }\n\n metrics =\n { 'role': \"metrics\",\n 'size': @metrics_size,\n 'volume_size': @metrics_volume_size }\n\n hosts = [mom, metrics]\n\n return hosts\n end", "def de_duplicate\n\t\t\t@known_hosts.keys.map do |key|\n\t\t\t\tip=@known_hosts[key]\n\t\t\t\tif @known_ips.key?(ip)\n\t\t\t\t\t@known_hosts.delete(key)\n\t\t\t\telse\n\t\t\t\t\t@known_ips[ip]=true\n\t\t\t\tend\n\t\t\tend\n\t\tend", "def list_records(host)\n domain = find_domain!(host)\n raise IncorrectDomainType, \"Domain #{host} is a #{domain.type} domain\" unless domain.can_have_records?\n \n return @domain_records[domain.host] unless @domain_records.nil? or @domain_records[domain.host].nil?\n \n res = get('/dns.php', domain.list_records_options)\n if response_status_message(res) == (RESPONSE_MESSAGES[:DOMAIN_LIST_RECORDS] % domain.host)\n record_list = RecordList.parse_list(domain, get(URI.join(\"http://#{EVERYDNS_HOSTNAME}/dns.php\", res['location']).to_s).body)\n cache_domain_records(domain.host, record_list)\n return record_list\n else\n return false\n end\n end", "def match_host(hostname)\n hostname === host\n end", "def host_uniqueness?(zone, host_list, vdc_id = -1)\n all_hosts = \"\"\n zone.vdcs.all.each{|vdc|\n if vdc.hosts != nil and !vdc.hosts.empty? and vdc.id != vdc_id\n all_hosts << ',' << vdc.hosts\n end\n }\n\n all_hosts = all_hosts.split(',')\n\n host_list.split(\",\").each{|host|\n return false if all_hosts.include?(host)\n }\n\n return true\n end", "def print_known_hosts\n\t\tputs \"\\nSummary of local hosts Table:\"\n\t\tputs \"Total entries: #{@known_hosts.size}\"\n\t\t(@known_hosts.keys.sort-[\"\",nil]).each do |key|\n\t\t\tvalue=@known_hosts[key]\n\t\t\tputs \"#{key}\\t#{value}\" if is_fqdn?(key)\n\t\tend\n\t\tputs \"End of the summary\"\n\tend", "def resolved?(dns, challenge)\n valid = false\n dns.each_resource(challenge[:acme_domain], Resolv::DNS::Resource::IN::TXT) { |resp|\n resp.strings.each do |curr_resp|\n if curr_resp == challenge[:txt_challenge]\n puts \"✔ #{challenge[:acme_domain]}: Found #{curr_resp}, a match.\"\n return true\n end\n end\n valid = true\n puts \"✘ #{challenge[:acme_domain]}: Found TXT record, but didn't match expected value of #{challenge[:txt_challenge]}\" \n }\n if !valid\n puts \"✘ #{challenge[:acme_domain]}: Found no TXT record\"\n end\n return false\nend", "def servers_in(record, options = {})\n type = case record.to_s.downcase\n when 'exchange'\n Resolv::DNS::Resource::IN::MX\n when 'address'\n Resolv::DNS::Resource::IN::A\n end\n Timeout::timeout(options.fetch(:timeout, 2)) do\n Resolv::DNS.new.getresources(name, type).inject([]) do |servers, s|\n servers << Server.new(s.send(record).to_s)\n end\n end\n rescue Timeout::Error\n nil\n end", "def get_host_by_hostname(hostname, collector)\n host = nil\n if collector\n hosts_json = rpc(\"getHosts\", {\"hostGroupId\" => 1})\n hosts_resp = JSON.parse(hosts_json)\n# p hosts_resp\n collector_resp = JSON.parse(rpc(\"getAgents\", {}))\n if hosts_resp[\"status\"] == 200\n hosts_resp[\"data\"][\"hosts\"].each do |h|\n if h[\"hostName\"].eql?(hostname)\n # puts(\"Found host with matching hostname: #{resource[:hostname]}\")\n # puts(\"Checking agent match\")\n if collector_resp[\"status\"] == 200\n collector_resp[\"data\"].each do |c|\n if c[\"description\"].eql?(collector)\n host = h\n end\n end\n else\n puts(\"Unable to retrieve collector list from server\")\n end\n end\n end\n else\n puts(\"Unable to retrieve host list from server\" )\n end\n end\n host\nend", "def refetch_hosts?\n return false unless vima?\n return true if hosts_updated_at.nil?\n\n hosts_updated_at < Archiving.settings[:skip_host_fetch_time_period].ago\n end", "def hosts\n # prevent original array from being changed\n @hosts.dup\n end", "def sort_hosts(hosts,sort_by)\n case sort_by\n when :certname\n return hosts.sort{ |a,b| a[:certname] <=> b[:certname] }\n when :time\n return hosts.sort{ |a,b| \n if a[:last_checkin].nil? and !b[:last_checkin].nil?\n 1\n elsif !a[:last_checkin].nil? and b[:last_checkin].nil?\n -1\n else\n a[:last_checkin] <=> b[:last_checkin]\n end\n }\n when :status\n return hosts.sort{ |a,b| a[:status].downcase <=> b[:status].downcase }\n when :environment\n return hosts.sort{ |a,b| a[:environment] <=> b[:environment] }\n else\n return hosts\n end\n end", "def compare_values( records, node_data )\n\t\t\ttype = node_data['record_type']\n\n\t\t\tcase type\n\t\t\twhen 'A'\n\t\t\t\treturn self.compare_a_records( records, node_data['values'] )\n\t\t\twhen 'NS'\n\t\t\t\treturn self.compare_ns_records( records, node_data['values'] )\n\t\t\twhen 'MX'\n\t\t\t\treturn self.compare_mx_records( records, node_data['values'] )\n\t\t\telse\n\t\t\t\treturn { dns: \"#{type} not comparable yet.\" }\n\t\t\tend\n\t\tend", "def all_hosts_in(file)\n servers = []\n file.each do |line|\n if line.include?('Host ')\n servers << line.sub('Host ', '').strip\n end\n end\n servers\n end", "def ip_addresses( hostname )\n @@resolve ||= Resolv.new\n @@ip_addresses_cached ||= {}\n\n @@ip_addresses_cached[hostname.to_s] ||= @@resolve.getaddresses( hostname )\n end", "def monitor_hosts_and_vms\n totalmemory = 0\n totalcpu = 0\n\n host_info = \"HYPERVISOR=opennebula\\n\"\n host_info << \"PUBLIC_CLOUD=YES\\n\"\n host_info << \"PRIORITY=-1\\n\"\n host_info << \"CPUSPEED=1000\\n\"\n host_info << \"HOSTNAME=\\\"#{@host['hostname']}\\\"\\n\"\n case @host['host_mon']['type']\n when 'fixed'\n host_info << \"TOTALMEMORY=#{@host['host_mon']['memory']}\\n\"\n host_info << \"TOTALCPU=#{@host['host_mon']['cpu']}\\n\"\n when 'instance_based'\n @host['capacity'].each { |name, size|\n cpu, mem = instance_type_capacity(name)\n totalmemory += mem * size.to_i\n totalcpu += cpu * size.to_i\n }\n host_info << \"TOTALMEMORY=#{totalmemory.round}\\n\"\n host_info << \"TOTALCPU=#{totalcpu}\\n\"\n when 'dynamic'\n host_info << get_remote_quotas\n end\n\n usedcpu = 0\n usedmemory = 0\n\n vms_info = get_all_vms_poll_info\n puts host_info\n puts vms_info\n end", "def validate_dns_a_record(domain, expected_ip_address)\n return true if skip_validations?\n say \"Checking that DNS #{domain.green} resolves to IP address #{expected_ip_address.green}... \", \" \"\n packet = Net::DNS::Resolver.start(domain, Net::DNS::A)\n resolved_a_records = packet.answer.map(&:value)\n if packet.answer.size == 0\n error = \"Domain '#{domain.green}' does not resolve to an IP address\"\n end\n unless resolved_a_records == [expected_ip_address]\n error = \"Domain #{domain} should resolve to IP address #{expected_ip_address}\"\n end\n if error\n say \"ooh no!\".red\n say \"Please setup your DNS:\"\n say \"Subdomain: * \" + \"(wildcard)\".yellow\n say \"IP address: #{expected_ip_address}\"\n err(error)\n else\n say \"ok\".green\n true\n end\n end", "def parse_dns(dns_raw)\n dns_records = {}\n dns_raw.each do |line|\n arr = line.split(\", \")\n if(arr[0] == \"A\" || arr[0] == \"CNAME\")\n dns_records[arr[1]] = {:type => arr[0], :target => arr[2].strip}\n end\n end\n \n return dns_records\n end", "def hosts(args = nil)\n if args and args[:server]\n args[:server].split(';').collect { |server| $hosts[server] ||\n Config.warn_fail(\"#{server} is not a known host\") }\n else\n $hosts.values\n end\nend", "def hosts\n h = []\n r = ('a'..'z')\n r.each do |i|\n r.each do |j|\n r.each do |k|\n h << i.to_s + j + k + \".com\"\n end\n end\n end\n h\n end", "def query_files_hosts(hostlist, hosts)\n report_dir = get_report_dir\n\n existing_nodes = hostlist.map{|x| x[:certname]}\n\n local_host_template = {\n :deactivated=>false,\n :latest_report_hash=>nil,\n :facts_environment=>nil,\n :cached_catalog_status=>\"not_used\",\n :report_environment=>nil,\n :latest_report_corrective_change=>nil,\n :catalog_environment=>nil,\n :facts_timestamp=>nil,\n :latest_report_noop=>nil,\n :expired=>false,\n :latest_report_noop_pending=>nil,\n :report_timestamp=>nil,\n :certname=>nil,\n :catalog_timestamp=>nil,\n :latest_report_job_id=>nil,\n :latest_report_status=>nil\n }.freeze\n\n local_host_reports = []\n\n if File.directory?(report_dir)\n @logger.debug(\"Processing Report Directory: #{report_dir}\")\n\n Dir.glob(\"#{report_dir}/*\").each do |node_dir|\n @logger.debug(\"Processing Node Directory: #{node_dir}\")\n\n latest_report = Dir.glob(\"#{node_dir}/*.yaml\").sort.last\n if latest_report\n @logger.debug(\"Processing YAML Report: #{latest_report}\")\n\n begin\n require 'puppet'\n\n transaction_report = YAML.load_file(latest_report)\n\n unless (hosts.empty? || hosts.include?(transaction_report.host))\n @logger.debug(\"Skipping #{transaction_report.host} since it is not in the host list\")\n next\n end\n\n if existing_nodes.include?(transaction_report.host)\n @logger.debug(\"Skipping #{transaction_report.host} since it already exists\")\n next\n end\n\n local_host_data = Marshal.load(Marshal.dump(local_host_template))\n local_host_data[:latest_report_hash] = transaction_report.catalog_uuid\n local_host_data[:facts_environment] = transaction_report.environment\n local_host_data[:report_environment] = transaction_report.environment\n local_host_data[:latest_report_corrective_change] = transaction_report.corrective_change\n local_host_data[:catalog_environment] = transaction_report.environment\n local_host_data[:facts_timestamp] = transaction_report.time.to_s\n local_host_data[:latest_report_noop] = transaction_report.noop\n local_host_data[:latest_report_noop_pending] = transaction_report.noop_pending\n local_host_data[:report_timestamp] = transaction_report.time.to_s\n local_host_data[:certname] = transaction_report.host\n local_host_data[:catalog_timestamp] = transaction_report.time.to_s\n local_host_data[:latest_report_job_id] = transaction_report.catalog_uuid\n local_host_data[:latest_report_status] = transaction_report.status\n\n hostlist << local_host_data\n\n @logger.debug(\"Processed Host Report: #{local_host_data}\")\n rescue => e\n @logger.warn \"Error processing report at '#{latest_report}': #{e}\"\n end\n else\n @logger.debug \"Could not find latest report in '#{node_dir}'\"\n end\n end\n else\n @logger.debug \"Could not find report directory at '#{report_dir}'\"\n end\n end", "def hosts\n (self.web_hosts.to_a + self.db_hosts.to_a + self.balance_hosts.to_a + self.app_hosts.to_a).uniq.sort\n end", "def valid_mx?\n return true unless dns_enabled?\n if exchangers.nil?\n set_error(:domain_unknown)\n elsif exchangers.mx_ips.size > 0\n if localhost? && !@config[:host_local]\n set_error(:domain_no_localhost)\n else\n true\n end\n elsif @config[:dns_timeout].nil? && valid_dns?\n set_error(:domain_does_not_accept_email)\n else\n set_error(:domain_unknown)\n end\n end", "def valid_dns_record? (hostname)\n\t\tputs \"Validate the hostname record: #{hostname}\" if @verbose\n\t\tbegin\n\t\t\tips=Resolv.getaddresses(hostname)\n\t\t\tif ips.empty?\n\t\t\t\treturn false\n\t\t\telse\n\t\t\t\tputs \"Found: #{hostname}\" if @verbose\n\t\t\t\treturn true\n\t\t\tend\n\t\trescue => ee\n\t\t\tputs \"Exception on method #{__method__} for host #{hostname}: #{ee}\" if @verbose\n\t\t\treturn false\n\t\tend\n\tend", "def required_dns_records\n all_ingress_hostnames + SPECIAL_A_RECORD_NAMES\nend", "def server_structs\n array = []\n if @struct.hosts\n @struct.hosts.count.times do |i|\n array << Lib.memcached_select_server_at(@struct, i)\n end\n end\n array\n end", "def valid_abs_resource_hosts?(abs_resource_hosts)\n is_valid = false\n\n if abs_resource_hosts.nil?\n puts \"A valid hosts array is required; nil was specified\"\n puts\n else\n\n begin\n hosts = JSON.parse(abs_resource_hosts)\n host = hosts[0]\n hostname = host[\"hostname\"]\n if !hostname.nil? && !hostname.empty?\n is_valid = true\n else\n puts \"The specified resource host array is not valid: #{abs_resource_hosts}\"\n puts\n end\n rescue\n # TODO: raise?\n puts \"JSON::ParserError encountered parsing the hosts array: #{abs_resource_hosts}\"\n end\n\n end\n\n is_valid\n end", "def hosts\n `#{cmk} --list-hosts`.split(/\\n/).sort\n end", "def outdated_hosts\n hosts = []\n scanned_hosts = self.date_severity_count\n scanned_hosts.each do |sh|\n sh.hosts.split(\", \").each do |host|\n hosts << host.gsub(/\\s+/, \"\")\n end\n end\n return hosts.uniq.sort\n end", "def update_dns()\n #\n # Handle each host in the config file at a time\n #\n @config['hosts'].each {|h|\n #\n # Skip update if current public IP matches the IP for the host in the cache file\n #\n if @cache[h['host']] && @myip.eql?(@cache[h['host']]['ip'])\n @logger.info \"Skipping #{h['host']} - Already pointing to #{@myip}\"\n else\n url = \"https://domains.google.com/nic/update?hostname=#{h['host']}&myip=#{@myip}\"\n @logger.info \"Updating host [#{h['host']}] - #{url}\"\n\n #\n # Access Google Domains API to update IP\n #\n open(url,\n :http_basic_authentication => [h['username'],h['password']],\n \"User-Agent\" => \"#{@options[:user_agent]}\") {|r|\n if r.status[0] == \"200\"\n r.each_line {|line|\n if (/(?<sts>(good|nochg))\\s+(?<ip>(\\d+\\.\\d+\\.\\d+\\.\\d+)?)/ =~ line)\n #\n # Cache if API call was successful\n #\n @cache[h['host']] = {'ip' => ip}\n @logger.debug \"[#{@responses[sts][0]}][#{sts}] : [#{@responses[sts][1]}]\"\n else\n @logger.warn \"[#{@responses[line][0]}][#{line}] : [#{@responses[line][1]}]\"\n end\n }\n else\n @logger.error \"Error status returned #{r.status.inspect}\"\n end\n }\n write_cache_file\n end\n }\n end", "def create_godaddy_records\n dme.create_mx_record(params[:domain], '', '0', 'smtp.secureserver.net.', {} )\n dme.create_mx_record(params[:domain], '', '5', 'mailstore1.secureserver.net.', {} )\n\n dme.create_record(params[:domain], 'email', 'CNAME', 'email.secureserver.net.', {})\n dme.create_record(params[:domain], 'imap', 'CNAME', 'imap.secureserver.net.', {})\n dme.create_record(params[:domain], 'mail', 'CNAME', 'pop.secureserver.net.', {})\n dme.create_record(params[:domain], 'mobilemail', 'CNAME', 'mobilemail-v01.prod.mesa1.secureserver.net.', {})\n dme.create_record(params[:domain], 'pda', 'CNAME', 'mobilemail-v01.prod.mesa1.secureserver.net.', {})\n dme.create_record(params[:domain], 'pop', 'CNAME', 'pop.secureserver.net.', {})\n dme.create_record(params[:domain], 'smtp', 'CNAME', 'smtp.secureserver.net.', {})\n dme.create_record(params[:domain], 'webmail', 'CNAME', 'webmail.secureserver.net.', {})\n end", "def get_foreman_hosts(per_page = 10000)\n curl = setup_curl(\"#{@foreman_url}/api/hosts?per_page=#{per_page}\", true)\n curl.perform\n servers_junk = JSON.parse(curl.body_str)\n servers_array = []\n servers_junk.each { |server| servers_array << server['host']['name'] }\n @foreman_hosts = servers_array\n end", "def find_hosts!(host_spec)\n if self.groups[host_spec]\n return self.groups[host_spec].host_list.map { |m| self.hosts[m] }\n elsif self.hosts[host_spec]\n return [self.hosts[host_spec]]\n else\n say \"No inventory matching: '#{host_spec}' found. \"\n say ([\"Available hosts:\"] + self.hosts.keys).join(\"\\n\\t\")\n say ([\"Available groups:\"] + self.groups.keys).join(\"\\n\\t\")\n exit\n end\n end", "def dig_mx\n Resolv::DNS.open do |dns|\n txts = dns.getresources(self,Resolv::DNS::Resource::IN::MX).collect { |r| r.exchange.to_s }\n if txts.empty? then nil else txts end\n end\n end", "def hosts(wspace = workspace, only_up = false, addresses = nil)\n\t\tconditions = {}\n\t\tconditions[:state] = [Msf::HostState::Alive, Msf::HostState::Unknown] if only_up\n\t\tconditions[:address] = addresses if addresses\n\t\twspace.hosts.all(:conditions => conditions, :order => :address)\n\tend", "def memcached_servers\n %w(127.0.0.1:11211)\n end", "def parse_hosts (args)\n\n discoveryrc = File.expand_path(\"~/.discoveryrc\")\n aliasmap = {}\n if File.readable?(discoveryrc)\n File.readlines(discoveryrc).each {|line| line.scan(/(\\w+)\\s*=\\s*(.*)/) {|k,v| aliasmap[k]=v}}\n end\n\n if args.size == 0 || args[0] =~ /^-/\n @hosts = aliasmap[\"localhost\"].nil? ? [\"http://localhost:8080\"] : aliasmap[\"localhost\"]\n else\n hostname = args.shift()\n @hosts = (aliasmap[hostname] || hostname).split(',').map() {|host| host.strip()};\n end\n \n return @hosts\n end", "def test_hosts\n assert_equal(['localhost'], cmk.folder('folder1').hosts)\n end", "def is_hosts_configured()\n ary = []\n hosts_file = \"hosts\"\n open(hosts_file, \"r\") {|f|\n f.each_line {|l|\n if not (l =~ /^(\\[|#|\\n)/) # match lines doesn't start from \"[\", \"#\" or \"\\n\"\n ary << l\n end\n }\n }\n if ary.size == 0\n return false\n else\n return true\n end\nend", "def post_connection_check(peer_cert, hostname)\n check_common_name = true\n cert = peer_cert\n cert.extensions.each{|ext|\n next if ext.oid != \"subjectAltName\"\n ext.value.split(/,\\s+/).each{|general_name|\n if /\\ADNS:(.*)/ =~ general_name\n check_common_name = false\n reg = Regexp.escape($1).gsub(/\\\\\\*/, \"[^.]+\")\n return true if /\\A#{reg}\\z/i =~ hostname\n elsif /\\AIP Address:(.*)/ =~ general_name\n check_common_name = false\n return true if $1 == hostname\n end\n }\n }\n if check_common_name\n cert.subject.to_a.each{|oid, value|\n if oid == \"CN\"\n reg = Regexp.escape(value).gsub(/\\\\\\*/, \"[^.]+\")\n return true if /\\A#{reg}\\z/i =~ hostname\n end\n }\n end\n raise OpenSSL::SSL::SSLError, \"hostname not match\"\n end", "def mx_ips\n return [\"0.0.0.0\"] if @dns_disabled\n mxers.map { |m| m[1] }\n end", "def hosts\n if @hosts\n @hosts\n elsif @host\n [@host]\n else\n self.class.hosts\n end\n end", "def dns_responses\n decoded_responses = udp_packets_with_src_port(DNS_PORT).map { |p| Resolv::DNS::Message.decode(p.payload) }\n\n decoded_responses.each_with_object({}) do |response, memo|\n name = response.question.first.first.to_s\n memo[name] ||= []\n response.answer.each do |ans|\n case ans.last\n when Resolv::DNS::Resource::IN::CNAME\n memo[name] << ans.last.name\n when Resolv::DNS::Resource::IN::AAAA, Resolv::DNS::Resource::IN::A\n memo[name] << ans.last.address\n else\n puts ans.last\n end\n end\n end\n end", "def configured_hosts\n\t\troutes = self.configured_routes\n\t\treturn Mongrel2::Config::Host.where( id: routes.select(:host_id) )\n\tend", "def stub_hosts(ip_spec)\n stub_hosts_on(default, ip_spec)\n end", "def verify_host\n @j_del.isVerifyHost\n end", "def lookup_addresses(data)\n return @servers\n end", "def valid_email_dns?(email)\n if email.include?('@') and email.include?('.')\n domain = email.match(/\\@(.+)/)[1]\n Resolv::DNS.open do |dns|\n @mx = dns.getresources(domain, Resolv::DNS::Resource::IN::MX)\n end\n @mx.size > 0 ? true : false\n else\n return false\n end\n end", "def add_hosts?(description, servers)\n !!(member_of_this_set?(description) &&\n (!has_primary?(servers) || description.primary?))\n end", "def parse_dns(dns_raw)\n raw_data=dns_raw.reject { |line| line.empty? or line[0] == \"#\" }#removing hash and empty lines,string\n split_data=raw_data.map { |line| line.strip.split(\", \") }#split the entry into columns using ','\n clean_data=split_data.reject { |record| record.length < 3 }# discarding false entries in zone file\n clean_data.each_with_object({}) do |record, records|# preparing hash for dns entries\n records[record[1]] = {\n type: record[0],\n target: record[2],\n }\n end\n end", "def hosts_delta(puppetdb_hosts = @puppetdb_hosts, foreman_hosts = @foreman_hosts)\n @hosts_delta = foreman_hosts - puppetdb_hosts\n end", "def local_to_remote_check\n #checking locals are consistent with remotes\n obj_ptr_re = /\\[\\d{1},(.*)\\]/\n locRW_re = /\\locRW\\:(.?)\\,/\n recalled_re = /recalled\\:(.?)\\,/\n count = 0\n $arr_local_store.each do |local_file|\n local_file.each do |line|\n unless line.chomp.empty?\n obj_ptr = line.match(obj_ptr_re)\n remote_host = line.match(locRW_re)\n recalled_for = line.match(recalled_re)\n\n #check if remote host knows about this obj_ptr\n if !remote_host[1].eql?\"\" and recalled_for[1].eql?\"\"\n found = false\n $arr_remote_store[remote_host[1].to_i].each do |r_line|\n if r_line.include? obj_ptr[1]\n found = true\n end\n end\n if !found\n p \"OMG! #{count} gave its object #{obj_ptr[1]} to #{remote_host[1]}, but it doesn't seem to know about it\"\n end\n end\n end\n end\n count = count + 1\n end\nend", "def log_hosts_to_affect\n\t\trval = Array( self.log_hosts || Loggability.log_hosts.values ).uniq\n\t\treturn rval.map {|obj| Loggability.log_host_for(obj) }\n\tend", "def cleanup_records\n Fog::DNS[:dreamhost].records.each do |r|\n # Do not delete the 'do-not-delete' record, we need it for the tests\n r.destroy if r.name =~ /#{test_domain}/ and r.name != do_not_delete_record\n end\nend", "def hosts_with_data(resultset)\n resultset.count { |_host, values| !values['data'].empty? }\n end", "def parse_dns(dns_raw)\n dns_records = {}\n dns_raw.each do |rec|\n rec=rec.chomp\n unless rec[0] == \"#\" || rec.empty?\n records = rec.split(/,/)\n records = records.map {|recd| recd.strip()}\n unless dns_records.has_key?(records[0])\n dns_records.store(records[0],[[records[1],records[2]]])\n else\n dns_records[records[0]].push([records[1],records[2]])\n end\n end\n end\n return dns_records\nend", "def parse_zk_hosts(servers)\n servers.join(',')\n end", "def checkHosts(layer)\n @host_facts.each do |f|\n # each host has a list of facts\n f[1].each do |l|\n if l['deploop_category'] == layer\n up = @mchandler.ifHostUp f[0]\n if @opt.verbose\n puts \"checking host #{f[0]} is up: \" \n puts up\n end\n if !up\n msg = \"ERROR: host \\'#{f[0]}\\' is unreachable. Aboring.\"\n @outputHandler.msgError msg\n end\n deplUp = @mchandler.checkIfDeploopHost f[0]\n if @opt.verbose\n puts \"checking Deploop enabled host #{f[0]}: \" \n puts deplUp\n end\n if !deplUp\n msg = \"ERROR: host \\'#{f[0]}\\' is not Deploop enabled, fix this. Aborting.\"\n @outputHandler.msgError msg\n end\n end\n end\n\n end # @host_facts.each\n msg = \"The layer \\'#{layer}\\' has all host Deploop enabled\"\n @outputHandler.msgOutput msg\n end", "def hosts=(hosts)\n @host = nil\n @hosts = hosts\n end", "def hosts\n @hosts ||= []\n end", "def host_exists?(host)\n `#{cmk} --list-hosts`.split(/\\n/).include?(host)\n end", "def test_replace\n\t\tEM::DnsCache.verbose\n\n\t\tEM::DnsCache.add_cache_entry( :mx, \"example.zzz\", [\"1.2.3.4\", \"5.6.7.8\"], -1 )\n\t\tEM::DnsCache.add_cache_entry( :mx, \"example.zzz\", [\"10.11.12.13\"], -1 )\n\t\tout = nil\n\t\tEM.run {\n\t\t\td = EM::DnsCache.resolve_mx \"example.zzz\"\n\t\t\td.errback {EM.stop}\n\t\t\td.callback {|r|\n\t\t\t\tout = r\n\t\t\t\tEM.stop\n\t\t\t}\n\t\t}\n\t\tassert_equal( [\"10.11.12.13\"], out )\n\tend", "def build_hosts_list(env_vms)\n\n int_id = 10\n\n first = true\n env_vms.each do |vm, vmconfig|\n vmconfig[\"networks\"].each do |name, netcfg|\n if netcfg[\"type\"] == \"private\" then\n if netcfg['ip'].nil? then\n netcfg['ip'] = \"192.168.50.\" + int_id.to_s\n #add the default IP to the environment definnition\n env_vms[vm][\"networks\"][name][\"ip\"] = \"192.168.50.\" + int_id.to_s\n int_id += 1\n end\n if first then\n $base_vars = \"vms_hosts={\"\n $base_vars << \"\\\"#{netcfg['ip']}\\\":\\\"#{vm}\\\"\"\n first = false\n elsif\n $base_vars << \",\\\"#{netcfg['ip']}\\\":\\\"#{vm}\\\"\"\n end\n end\n end if vmconfig[\"networks\"]\n end\n $base_vars << \"}\" if $base_vars\nend", "def check_aggregate(summary)\n #puts \"summary is #{summary}\"\n total, ok, silenced, stale, failing = summary.values_at(:total, :ok, :silenced, :stale, :failing)\n return 'OK', 'No servers running the check' if total.zero?\n\n eff_total = total - silenced * (config[:silenced] ? 1 : 0)\n return 'OK', 'All hosts silenced' if eff_total.zero?\n\n ok_pct = (100 * ok / eff_total.to_f).to_i\n\n # Loop through the arrays and split the hostname so we get a short hostname \n message = \"#{ok} OK out of #{eff_total} total.\"\n message << \" #{silenced} silenced.\" if config[:silenced] && silenced > 0\n message << \" #{stale.size} stale.\" unless stale.empty?\n message << \" #{ok_pct}% OK, #{config[:critical]}% threshold\"\n message << \"\\nStale hosts: #{stale.map{|host| host.split('.').first}.sort[0..10].join ','}\" unless stale.empty?\n message << \"\\nFailing hosts: #{failing.map{|host| host.split('.').first}.sort[0..10].join ','}\" unless failing.empty?\n message << \"\\nMinimum number of hosts required is #{config[:min_nodes]} and only #{ok} found\" if ok < config[:min_nodes]\n\n state = ok_pct >= config[:critical] ? 'OK' : 'CRITICAL'\n state = ok >= config[:min_nodes] ? state : 'CRITICAL'\n return state, message\n end", "def hosts\n @hosts ||= begin\n r, h, u = [], (config[:hosts] rescue nil), (config[:user] rescue nil)\n h.each {|host| r << Host.new(host, u) } if h && u; r\n end\n end", "def domains_slaves_sync(domains, masterip)\n existing_domains = domains_slaves_list\n\n # Add any missing domains\n responses = []\n domains.each do |domain|\n unless existing_domains.include?(domain)\n response = domains_slaves_add(domain, masterip)\n response[:domain] ||= domain\n responses << response\n end\n end\n\n # Now check the existing domains\n existing_domains.keys.sort.each do |domain|\n if domains.include?(domain)\n # Update the masterip if it isn't correct\n if existing_domains[domain] != masterip\n response = domains_slaves_updatemasterip(domain, masterip)\n response[:domain] ||= domain\n responses << response\n end\n else\n # Delete domain; not on list\n response = domains_slaves_delete(domain)\n response[:domain] ||= domain\n responses << response\n end\n end\n\n responses\n end" ]
[ "0.72329545", "0.6538074", "0.64268786", "0.6327804", "0.631965", "0.61163247", "0.6058553", "0.6029414", "0.5943307", "0.5832349", "0.5720855", "0.5710602", "0.56563276", "0.5607008", "0.5506667", "0.54989445", "0.5481852", "0.5473348", "0.5465271", "0.5459152", "0.54170203", "0.5307933", "0.5284512", "0.52757454", "0.5271672", "0.52701414", "0.5259608", "0.5215875", "0.5201503", "0.5195242", "0.51695883", "0.5165961", "0.5165835", "0.5134465", "0.5131739", "0.51211256", "0.51084036", "0.5078392", "0.5072763", "0.50675786", "0.50664437", "0.5060806", "0.5054908", "0.5052502", "0.504479", "0.5044738", "0.5038955", "0.5037424", "0.5034453", "0.50313205", "0.5010613", "0.50049675", "0.50038135", "0.49967745", "0.4991948", "0.4990421", "0.49822366", "0.49645007", "0.495901", "0.49534488", "0.495082", "0.49501848", "0.4945674", "0.49439314", "0.49332628", "0.49329138", "0.49324515", "0.49294692", "0.49214336", "0.4919803", "0.49194276", "0.49023986", "0.48917946", "0.4887091", "0.4875843", "0.48750705", "0.48618403", "0.4861282", "0.48553252", "0.48509747", "0.48507422", "0.48475254", "0.48282793", "0.48244092", "0.48233515", "0.48216045", "0.48130357", "0.48056835", "0.48052204", "0.48036128", "0.4803529", "0.48019066", "0.48006654", "0.47990733", "0.47857767", "0.4775027", "0.47613323", "0.4758776", "0.47567767", "0.47561032" ]
0.80028486
0
send a signup email to the user, pass in the user object that contains the user's email address
def signup_email(user) @user = user mail(to: @user.email, subject: 'Thanks for signing up for our amazing app') end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def send_signup_email(user)\n @user = user\n mail( to: @user.email,\n subject: \"Thanks for signing up, #{@user.email}!\")\n end", "def send_signup_email(user)\n @user = user\n mail(to: @user.email, subject: 'Thanks for signing up!')\n end", "def signup_email(user)\n mail( :to => user.email, :subject => \"Thanks for signing up!\" )\n end", "def signup_email(user)\n mail( :to => user.email, :subject => \"Thanks for signing up\" )\n end", "def signup_email(user)\n mail( :to => user.email,\n :subject => 'Thanks for signing up' )\n end", "def signup_email(user)\n @user = user\n mail to: @user.email, subject: 'Welcome to Open Door'\n end", "def send_signup_email(user)\n @user = user\n mail( to: @user.email,\n subject: 'Thanks for signing up for our amazing app' )\n end", "def signed_up(user)\n @user = user\n \n mail to: @user.email, subject: 'Sign Up Confirmation.'\n end", "def signup_email(user)\n mail(\n to: \"[email protected]\",\n subject: 'Thanks for signing up'\n )\n end", "def send_signup_email(user)\n @user = user\n mail( :to => @user.email,\n :subject => 'Thanks for signing up to EY Time' )\n end", "def send_signup_email(user)\n @user = user\n mail( :to => @user.email,\n :subject => 'Thanks for signing up to EY Time' )\n end", "def send_signup_email(user)\n @user = user\n mail( :to => @user.email,\n :subject => 'Thanks for signing up for our amazing app' )\n end", "def signup(new_user)\n mail(to: new_user.email, subject: \"Congratulations on Signing up!\")\n end", "def signup_confirmation(user)\n @user = user\n\n mail to: user.email, subject: \"Sign Up Confirmation\"\n end", "def signup_confirmation(user)\n @user = user\n mail to: user.email, subject: \"Sign Up Confirmation\"\n end", "def send_signup_email(user)\n @user = user\n mail(\n to: @user.email,\n subject: 'Thanks for signing up for our amazing app'\n )\n end", "def signed_up(user)\n @user = user\n\n mail to: user.email\n end", "def signed_up(user)\n @user = user\n mail to: @user.email\n end", "def send_signup_email(user)\n @user = user\n mail( :to => @user.email,\n :subject => 'Thanks for signing up to SCUDERIA.COM!' )\n end", "def signup_email(user)\n mail( :to => user.email, :subject => \"Thanks for signing up for Sublets at Penn!\" )\n end", "def send_signup_email(user)\n @user = user\n mail(to: @user.email, subject: \"Welcome to blah blah blah #{@user.first_name}\")\n end", "def send_signup_email(user)\n @user = user\n mail( :to => @user.email,\n :subject => 'Thanks for signing up with Linchpin' )\n end", "def send_signup_email(user)\n @user = user\n mail( :to => @user.email,\n :subject => 'Thanks for signing up for Lend.io' )\n end", "def send_signup_email(user)\n @user = user\n mail( :to => @user.email,\n :subject => 'Muchas gracias por haberte registrado a Juego de Teorías!' )\n end", "def signup_successful(user)\n @user = user\n mail to: \"#{user.full_name} <#{user.email}>\"\n end", "def signup_confirmation(user)\n # create an instance variable so that the view has access\n # to the user.\n @user = user\n\n # send email to the user\n mail to: user.email, subject: \"Sign Up Confirmation\"\n end", "def user_sign_up_notification(user)\n @user = user\n\n mail(to: \"<#{user.email}>\", subject: \"Success! You did it.\")\n end", "def send_signup_email()\n\n\t# @user = user\n\tmail( :to => '[email protected]',\n\t\t :subject => '!Thanks for signing up')\n \nend", "def signup_confirmation(user)\n @user = user\n mail to: user.email, subject: \"Signup confirmation from FreeLance\"\n end", "def send_signup_email(user)\n @user = user\n mail( :to => @user.email,\n :subject => 'Welcome To WeHaul' )\n end", "def send_signup_verify_email(email, user)\n @user = user\n mail( :to => email,\n :subject => 'Welcome to WatchIoT!!')\n end", "def signup(user_that_just_signed_up)\n mail to: user_that_just_signed_up.email,\n subject: \"You signed up for YardSale\"\n end", "def send_signup_email(@user)\n @user= user\n mail( :to @user.email,\n subject: 'Thanks for signing up for our amazing app',\n\t:from => '[email protected]'\n )\n end", "def send_signup_email(user)\n @user = user\n mail( :to => '[email protected]',\n :subject => 'Thanks for signing up for our amazing app' )\n end", "def send_signup_email(user)\n @user = user\n mail(:from => self.moovi_email, :to => @user.email,:subject => 'Thanks for signing up to Moovi' )\n end", "def signup_confirmation(user)\n @user = user\n mail to: user.email, subject: \"Welcome\"\n end", "def signup_confirmation(user)\n @greeting = \"Hi\"\n @user = user\n\n mail to: user.email, subject: \"Sign up confirmation!\"\n end", "def signup_email(user)\n @user = user\n @url = 'http://localhost:3000/signup'\n mail( :to => @user.email,\n :subject => 'Thanks for signing up for piazza!' )\n end", "def new_signup(user, email)\n @greeting = \"Hi\"\n @user = user\n @email = email\n mail to: @email, subject: \"Your Account has been created on blogApp\"\n end", "def send_signup_email(user)\n\t @user = user\n\t mail( :to => @user.email,\n\t :subject => 'New Ticket' )\n\t end", "def send_signup_email(user)\n @user = user\n mail( :to => @user.email ,\n :subject => '[Todo Manager] Welcome!' )\n end", "def signup_activation(user)\n @user = user\n @url = 'http://localhost:3000/login'\n mail to: @user.email, subject: \"Match Point validate email\"\n end", "def send_signup_email(user)\n\t\tuser = user\n subject = \"Thank you for sign up on miniflix.\"\n merge_vars = {\n \"USER_NAME\" => user.name\n }\n body = mandrill_template(\"Paid-user-signup-mail\", merge_vars)\n\n send_mail(user.email, subject, body)\n\tend", "def finish_signup_later(user)\n @user = user\n mail to: \"#{user.full_name} <#{user.email}>\"\n end", "def signup_confirmation(user_name, user_email)\n @user_name = user_name\n @user_email = user_email\n mail to: user_email, subject: \"Dum dum de dum... get started with Bridled!\"\n end", "def signup_confirmation(user_id)\n # Will make the newly registered user available to the view that will generate the email content\n @user = User.find(user_id)\n\n mail to: @user.email, subject: \"Sign up confirmation\"\n end", "def signup_confirmation user\n @user = user\n mail to: user.email, subject: \"Welcome to the Clone\"\n end", "def signup_confirmation(user)\n @user= user\n debugger\n mail to: @user.email , subject: \"MovieBox Sign Up Confirmation\"\n \n end", "def registration(user)\n @user = user\n\n mail to: @user.email_id, subject: \"Successful Registration\"\n end", "def send_signup_email(user)\n @user = user\n @appointment = Appointment.where(id: @user.appointment_id).first\n mail(\n :to => @user.email,\n :bcc => \"[email protected]\",\n :subject => \"Ace CPR SD Sign-Up Confirmation ##{@user.id}\"\n )\n end", "def signup \n @user = params[:user]\n @link = params[:link]\n Merb.logger.info \"Sending Signup to #{@user.email} with code #{@user.activation_code}\"\n render_mail :text => :signup, :layout => :core\n end", "def user_sign_up(user)\n @user = user\n\n set_attachments\n\n case APP_CONFIG['app_country']\n when 'AR'\n mail(to: '[email protected]', subject: \"Notificaciones @ Social Target - Nuevo usuario registrado\")\n when 'CO'\n mail(to: '[email protected]', subject: \"Notificaciones @ Social Target - Nuevo usuario registrado\")\n when 'MX'\n mail(to: '[email protected]', subject: \"Notificaciones @ Social Target - Nuevo usuario registrado\")\n end\n\n end", "def sign_up(user)\n headers(:content_type => \"text/html\", :charset => \"UTF-8\")\n @user = user\n mail(to: user.email, subject: 'Credenciales del Usuario')\n end", "def signup\n\n\t\temail = params[:email] # Extract the email from the params of the signup form\n\t\ttimezone = params[:timezone] # Extract the timezone from the params of the signup form\n\n\t\t@url = uniqueUrlKeyGenerator # Generate a unique url key\n\t\told_user = User.find_by_email(email)\n\n\t\t# If user exists\n\t\tif !old_user.nil?\n\t\t # If user is not registered\n\t\t if !old_user.registered?\n\t\t # Send welcome email again and save him\n\t\t old_user.sendWelcomeEmail\n\t\t old_user.save\n\t\t end\n\t\tend\n\n\t\t# Find the user in the user db with the same email as extracted in the params\n\t\tcheck_users = User.find_by_email(email)\n\n\t\t#create a new PotentialUser object with the extarcted email, timezone and url key\n\t\tuser = User.new(email: email, url: @url, timezone: timezone, day: 1, registered: false)\n\n\t\t# If no such user exists\n\t\tif check_users.nil?\n\n\t\t#If the new user is valid and can be saved\n\t\t if user.save\n\t\t user.sendWelcomeEmail\n\t\t @title = \"Thank you for signing up\"\n\t\t @result = \"A confirmation email with instructions has been sent to you\"\n\t\t @result2 = \"Your unique access key is: \" + @url\n\n\t\t#If not valid\n\t\t else\n\t\t #Set @result as the error message\n\t\t @title = \"Looks like something went wrong ...\"\n\t\t @result = \"Email #{user.errors[:email][0]}.\".html_safe\n\t\t end\n\n\t\t#User by this email already exists\n\t\telse\n\n\t\t if !check_users.registered?\n\t\t\t # Result instance variable for the view\n\t\t\t @title = \"Looks like something went wrong ...\"\n\t\t\t @result = \"User by this email already exists, but we sent another confirmation email just in case\"\n\t\t\t else\n\t\t\t @title = \"Looks like something went wrong ...\"\n\t\t\t @result = \"User by this email already exists\"\n\t\t end\n\n\tend\n\n\t\t# Respond to only javascript, set for AJAX\n\t\trespond_to do |format|\n\t\t\tformat.js\n\t\tend\n\tend", "def register(user) \n @user = user \n mail(:to => user.email, :subject => \"Welcome to the site\") \n end", "def signup_confirmation_advisee(user)\n @user = user\n mail to: @user.email, subject: \"Advisee new registration\"\n end", "def signup\n return if generate_blank\n params[:user].delete('form')\n params[:user].delete('verified') # you CANNOT pass this as part of the request\n @user = User.new(params[:user])\n begin\n User.transaction(@user) do\n @user.new_password = true\n unless LoginEngine.config(:use_email_notification) and LoginEngine.config(:confirm_account)\n @user.verified = 1\n end\n if @user.save\n key = @user.generate_security_token\n url = url_for(:action => 'home', :user_id => @user.id, :key => key)\n flash[:notice] = 'Signup successful!'\n if LoginEngine.config(:use_email_notification) and LoginEngine.config(:confirm_account)\n UserNotify.deliver_signup(@user, params[:user][:password], url)\n flash[:notice] << ' Please check your registered email account to verify your account registration and continue with the login.'\n else\n flash[:notice] << ' Please log in.'\n end\n redirect_to :action => 'login'\n end\n end\n rescue Exception => e\n flash.now[:notice] = nil\n flash.now[:warning] = 'Error creating account: confirmation email not sent'\n logger.error \"Unable to send confirmation E-Mail:\"\n logger.error e\n end\n end", "def registration_succeed(user)\n @user = user\n\n mail to: user.email, :subject => 'Welcome to Real Thematics'\n end", "def registration_confirmation(user)\n @user = user\n mail(to: \"#{user.name} #{user.last_name} <#{user.email}>\",\n subject: '(IMI-Map) Registered')\n end", "def signup_confirmation(user)\n @user = user\n @greeting = \"Hi\"\n\n mail to: @user.email, subject: \"Hello new friend!\"\n end", "def registration_confirmation(user)\n @user = user\n mail :to => \"#{user.name} <#{user.email}>\", :subject => \"Thanks for Registering\"\n end", "def transaction_created(user)\n @user = user\n mail to: user.email, subject: \"Sign Up Confirmation\"\n end", "def registration_confirmation(user)\n \t@user = user\n \tmail(:to => \"#{user.name} #{user.last_name} <#{user.email}>\", :subject => \"(IMI-Map) Registered\")\n end", "def signup(email)\n \n @greeting = \"Hi\"\n\n mail to: email\n end", "def newsignup(email)\n @greeting = \"Hi\"\n @email = email['email']\n mail(to: @email, subject: \"You've signed up\")\n end", "def signup_with_email\n @status, @msg, @data = UserValidator.signup_with_email(params)\n @status, @msg, @data = UserManager.signup_with_email(params) if @status\n end", "def registration_confirmation(user)\n @account = user\n mail(:to => named_email(user), :subject => \"Outcircle - Welcome\")\n end", "def signup_confirmation(user, root_url)\n @user = user\n @url = root_url + 'login'\n mail(to: user.username, subject: 'Sign Up Confirmation')\n end", "def registration_confirmation(user)\n @user = user\n\n mail to: \"[email protected]\", subject: \"Success! You did it.\"\n end", "def sign_up_notification\n # @greeting = \"Hi\"\n\n # mail to: \"[email protected]\"\n # @admin = User.where(is_admin: true).last\n @user = params[:user]\n mail(to: @user.email, subject: 'Sign Up successfull')\n end", "def signup_email\n MnoEnterprise::SystemNotificationMailer.registration_instructions(params.require(:user).require(:email)).deliver_later\n\n head :no_content\n end", "def signup_email\n @greeting = \"Hi\"\n\n mail to: @user.email, subject: \"Welcome to CheeseBae! Please confim your email!\"\n end", "def register(user)\n if user\n @user = user\n mail(to: user.email, subject: \"Welcome to AccomMate!\")\n end\n end", "def signup(email)\n @email = email\n \n mail :to => \"[email protected]\", :cc => KMCD_EMAIL, \n :subject => \"New signup for RailsDojo.com\"\n end", "def signup_notification(user)\n provider = self.provider_account = user.account.provider_account\n\n subject = user.account.provider? ? \"3scale account confirmation\" : \"#{account_name(user)} API account confirmation\"\n\n headers(\n 'Return-Path' => from_address(user),\n 'X-SMTPAPI' => '{\"category\": \"Signup Notification\"}'\n )\n\n if user.activation_code\n activate_url = if user.account.provider?\n provider_activate_url(activation_code: user.activation_code, host: domain(user))\n else\n developer_portal.activate_url(activation_code: user.activation_code, host: domain(user))\n end\n end\n\n assign_drops user: Liquid::Drops::User.new(user),\n domain: Liquid::Drops::Deprecated.wrap(domain(user)),\n account_name: Liquid::Drops::Deprecated.wrap(account_name(user)),\n account: Liquid::Drops::Account.wrap(user.account),\n provider: Liquid::Drops::Provider.wrap(user.account.provider_account),\n url: activate_url,\n admin_url: Liquid::Drops::Deprecated.wrap(admin_url(user))\n\n mail(\n template_name: 'signup_notification_email',\n subject: subject,\n to: user.email,\n from: from_address(user)\n )\n end", "def create\n @user = User.new(user_params)\n if @user.save\n @user.send_activation_email\n flash[:info] = \"Please check your email to activate your account.\"\n redirect_to signin_path\n else\n render 'new'\n end\n end", "def signup_successful\n user = User.first\n UserMailer.signup_successful(user)\n end", "def admin_create_user_email(user)\n @user = user\n mail :to => \"#{user.name} <#{user.email}>\", :subject => \"An Account Has Been Created For You\"\n end", "def create\n @user = User.new(user_params)\n if @user.save\n @user.send_activation_email\n flash[:info] = \"Please check your email to activate your account.\"\n redirect_to root_url\n else\n render \"users/new\"\n end\n end", "def signup_confirmation(new_user)\n # @greeting = \"Hi\"\n @resource = new_user\n \n # mail to: new_user.email\n mail(to: @resource.email, subject: \"Welcome!\", :tag => 'password-reset', :content_type => \"text/html\") do |format|\n format.html { render \"user_mailer/signup_confirmation\" }\n end\n \n end", "def employee_signup_email(employee)\n @employee = employee\n mail( :to => @employee.email,\n :subject => 'Thanks for signing up for Shiift' )\n end", "def user_created(user)\n @user = user\n user_email_id = user.email\n mail to: \"#{user_email_id}\",subject: \"Account Has been created #{user.firstname + \" \" + user.lastname}\"\n end", "def signup_notification(user)\n\t\tsetup_email(user)\n\t\t subject self.site_name+\" : \"+I18n.t('mailer.signup_notification.subject')\n\t\t body :url => self.daurl+\"/admin/activate/#{user.activation_code}\",\n\t\t\t:site => self.site_name,\n\t\t\t:user_login => user.login,\n\t\t\t:user_password => user.password\n end", "def send_signup_email(user, token)\n @user = user\n @token = token\n mail(\n to: @user.email,\n subject: 'Thanks for signing up for SocialDeck!'\n )\n end", "def send_signup_email(user,command)\n @user = user\n @command= command\n subjectMail= 'Nouvelle commande de ' + @user.username\n mail( :to => @user.email,\n :subject => subjectMail )\n end", "def notify_about_sign_up(user, admin)\n @user = user\n mail(:to => admin.email, :subject => \"New User | #{user.name}\", :reply_to => \"[email protected]\")\n end", "def act_signup_confirmation(user, act)\n @user = user\n @act = act\n\n mail to: user.email, subject: 'Act Sign Up Confirmation'\n end", "def signup_notification(user)\n setup_email(user)\n @subject += I18n.t 'mailer.signup.subject'\n \n @body[:url] = \"http://www.dripplet.com/#{user.locale}/activate/#{user.activation_code}\"\n \n end", "def create\n @user = User.new(user_params)\n if @user.save\n @user.send_activation_email\n flash[:info] = \"Please check your email to activate your account.\"\n redirect_to root_url\n else\n render 'new'\n end\nend", "def create\n # @user holds values from submitted form\n @user = User.new(user_params)\n \n # attempt to save the new user, log the user in if successful and redirect\n # to user details page\n if @user.save\n @user.send_activation_email\n flash[:info] = \"Please check your email to activate your account.\"\n redirect_to root_url\n else\n # something went wrong, re-render the new user form with previously entered\n # information, errors will be displayed above form\n render 'new'\n end # if\n end", "def register_user(org_id, user_id)\n # @greeting = \"Hi\"\n @organisation = Organisation.find(org_id)\n @user = User.find(user_id)\n mail(to: @organisation.contact_email, subject: 'An new organisation host in MOBEEAS is added, waiting for your approval.')\n end", "def signup\n @greeting = \"Hi\"\n\n mail :to => \"[email protected]\"\n end", "def registration_confirmation(user) \n @user=user\n mail(:to => user.email, :subject => \"[给力百货]注册邮箱验证\")\n end", "def send_signup_email(taxpayer)\n @taxpayer = taxpayer\n mail( :to => @taxpayer.email,\n :subject => 'Thanks for signing up for our amazing app' )\n end", "def activation_needed_email(user)\n # TODO: Implement actual functionality -- Wed Jun 13 15:26:53 2012\n @greeting = \"Hi\"\n\n mail to: \"[email protected]\"\n end", "def user_created(user)\n @user = user\n\n mail to: @user.email, subject: \"Bienvenue sur l'application Hotel Tycoon !\"\n end", "def activate_account_email(user)\n @user = user\n @url = validate_account_url(@user.signup_token)\n mail(to: @user.email, subject: \"Welcome to Report It!\")\n end", "def paid_signup(user)\n DelayedKiss.alias(user.full_name, user.email)\n DelayedKiss.record(user.email, 'Sent Paid Signup Email')\n @user = user\n mail to: user.email, subject: \"You've Upgraded to our Business Pro plan!\"\n end", "def send_user\n UserProfileCertificationMailer.send_user\n end", "def sign_up(new_user)\n @user = new_user\n @notify_subject = strip_tags \"NEW SIGN UP AT #{ENV['APPLICATION_CONFIG_name']}\"\n mail( :to => ENV['APPLICATION_CONFIG_admin_notification_address'], :subject => @notify_subject)\n end" ]
[ "0.85147613", "0.84859043", "0.84804153", "0.8466327", "0.8461068", "0.84345907", "0.83614045", "0.83595186", "0.8347467", "0.83424085", "0.83424085", "0.833232", "0.83076817", "0.8304023", "0.830207", "0.82883614", "0.8278261", "0.8244641", "0.8227462", "0.8217185", "0.82115686", "0.8186127", "0.8184473", "0.8169037", "0.8164784", "0.8115332", "0.81034887", "0.8087642", "0.8073181", "0.80600786", "0.80537444", "0.80268663", "0.8018853", "0.8001688", "0.7996627", "0.7990092", "0.79893917", "0.7980662", "0.7908799", "0.783899", "0.7838258", "0.7806558", "0.77841765", "0.77764285", "0.7764272", "0.7753993", "0.7734519", "0.773258", "0.77097994", "0.76994973", "0.76886654", "0.7687104", "0.7638845", "0.76326376", "0.76273924", "0.7624016", "0.7595984", "0.75943875", "0.75938946", "0.7576436", "0.755074", "0.7504255", "0.7463151", "0.7459934", "0.74446577", "0.7440399", "0.74104816", "0.74057657", "0.7396637", "0.7393193", "0.7373357", "0.73685783", "0.7352618", "0.7348828", "0.73414797", "0.7331905", "0.7319844", "0.7315538", "0.7311486", "0.7301379", "0.72923684", "0.7289409", "0.7282561", "0.72773904", "0.72768456", "0.7271391", "0.72696483", "0.7265541", "0.7255632", "0.72355664", "0.7227985", "0.7224936", "0.7209405", "0.7208635", "0.720556", "0.7203871", "0.71825826", "0.71517205", "0.7136196", "0.7135684" ]
0.8546309
0
GET /jobs GET /jobs.xml
def index respond_to do |format| format.html { @groups = Group.get_groups(current_user, params) } format.xml { render :xml => Group.get_groups(params.merge({:show => 'all'})) } end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def jobs\n doc = Nokogiri::XML open(@url)\n\n doc.search('//job').map { |node|\n Job.new(attributes_from(node))\n }\n end", "def index\n @jobs = Job.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @jobs }\n end\n end", "def getJobApi (options)\n uri = options[:job] + \"?depth=\" + options[:depth].to_s\n job_uri = URI.parse(uri)\n http = Net::HTTP.new(job_uri.host, job_uri.port)\n http.use_ssl = true\n http.verify_mode = OpenSSL::SSL::VERIFY_NONE\n request = Net::HTTP::Get.new(job_uri.request_uri)\n request.basic_auth @username, @password\n response = http.request(request)\n job_xml=XmlSimple.xml_in(response.body)\n return job_xml\n end", "def index\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @jobs }\n end\n end", "def index\n @jobtimes = current_company.jobtimes.find_all_by_job_id(params[:job_id])\n respond_to do |format|\n format.xml {render :xml => @jobtimes }\n format.json { render :json => @jobtimes }\n end\n end", "def index\n @jobs = Job.find(:all, :conditions => params_to_conditions(params), :order => \"id\")\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @jobs }\n end\n end", "def show\n @job = Job.find(params[:id])\n @tasks = @job.tasks\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @job }\n end\n end", "def find_jobs(params:)\n response = HTTParty.get(\"#{@host}/api/jobs\", query: params)\n\n return response[\"jobs\"] \n end", "def get_jobs_sample(client)\n response = client['jobs'].get\n\n p ''\n p 'Get jobs'\n p response\nend", "def getCurrentJobs\n getJobs('0/')\n end", "def show\n @job = Job.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @job }\n end\n end", "def show\n @job = Job.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @job }\n end\n end", "def show\n @job = Job.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @job }\n end\n end", "def show\n @job = Job.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @job }\n end\n end", "def show\n @job = Job.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @job }\n end\n end", "def show\n @job = Job.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @job }\n end\n end", "def show\n @job = Job.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @job }\n end\n end", "def index\n @nodes = @job.nodes.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @nodes }\n format.json { render :json => @nodes }\n end\n end", "def show\n @job = Job.find(params[:id])\n\n respond_to do |format|\n format.html # show.rhtml\n format.xml { render :xml => @job.to_xml }\n end\n end", "def index\n @jobs = Job.paginate :page => params[:page], :order => 'created_at DESC', :per_page =>10\n @job = Job.find(:last)\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @jobs }\n format.json { render :json => @jobs }\n end\n end", "def show\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @jobs_queue }\n end\n end", "def job\n fetch('games.final_fantasy_xiv.jobs')\n end", "def index\n @jobs = Job.all\n end", "def index\n @jobs = Job.all\n end", "def index\n @jobs = Job.all\n end", "def index\n @jobs = Job.all\n end", "def index\n @jobs = Job.all\n end", "def index\n @jobs = Job.all\n end", "def index\n @jobs = Job.all\n end", "def index\n @job_requests = JobRequest.all\n end", "def show\n @job_status = JobStatus.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @job_status }\n end\n end", "def index\n respond_to do |format|\n format.html { @jobs_queues = JobsQueue.get_jobs_queues(current_user, params) }\n format.xml { render :xml => JobsQueue.get_jobs_queues(current_user, params.merge({:show => 'all'})) }\n end\n end", "def listjobs(project=self.project)\n get('listjobs.json', project: project).reject{|k,v| k=='status'}\n end", "def index\n @job_items = @job.job_items.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @job_items }\n end\n end", "def jobs\n\t\t# ...\n\tend", "def show\n @job = Job.find(params[:id])\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @job.to_xml(:include => { :job_parameters => { :include => :data_set } }) }\n end\n end", "def show\n @job = Job.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @job }\n format.json { render :json => @job }\n end\n end", "def index\n respond_to do |format|\n format.html \n format.js { render :partial => 'list', :locals => { :jobs => Job.get_jobs(current_user, params) } }\n format.xml { render :xml => Job.get_jobs(current_user, params.merge({:show => 'all'})) }\n end\n end", "def index\n @jobs = Job.paginate(:page => params[:page])\n\n respond_to do |format|\n format.html # index.html.erb\n format.rss\n end\n end", "def show\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @job_runner }\n end\n end", "def list_jobs(json_payload={})\n conn = @client.get do |req|\n req.url '/api/v2/job/list?'\n req.headers[\"Authorization\"] = @token\n req.params = json_payload\n end\n conn.body\n end", "def index\n @jobs = current_user.jobs\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @jobs }\n end\n end", "def index\n @jobs = Job.page(params[:page])\n end", "def index\n @job = Job.find(params[:jid]) if params[:jid] and Job.exists?(params[:jid])\n redirect_to jobs_path unless @job\n @job = Job.find(params[:jid]) if params[:jid] and Job.exists?(params[:jid])\n redirect_to jobs_path unless @job\n @job_scope_additions = JobScopeAddition.find_all_by_jobs_id(@job.id)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @job_scope_additions }\n end\n end", "def index\n @jobs = Job.all\n\n render json: @jobs\n end", "def build_jobs_url\n \"http://#{host_name}:#{port}/jobs\"\n end", "def index\n @jobs = Job.all\n render json: @jobs\n end", "def get_single_job_sample(client)\n response = client[\"jobs/#{$job_id}\"].get\n\n p ''\n p 'Get single job'\n p response\nend", "def status(*job_id)\n #take default job_id if not specified\n if job_id.empty?\n job_id = @job_id\n else\n job_id = job_id[0]\n end\n\n \n url=\"#{@base_url}/#{@tool}/status/#{URI.encode(job_id)}\"\n uri = URI.parse(url)\n\n resp = Net::HTTP.get_response(uri)\n #puts resp.body\n\n #params = XmlSimple.xml_in(resp.body)\n\n return resp.body\n\n\n end", "def jobs(opts = {})\n api(\n @client.list_jobs(\n @project_id,\n deep_symbolize_keys(opts)\n )\n )\n end", "def index\n @inventories = current_company.inventories.find_all_by_job_id(params[:job_id])\n respond_to do |format|\n format.xml{ render :xml => @inventories }\n format.json{ render :json => @inventories }\n end\n end", "def index\n\t@jobs = (@user.username == \"Admin\")? Job.all : @user.jobs\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @jobs }\n end\n end", "def get_job_info(page)\n job_api = \"#{BASE_URL}/v4/projects/#{PROJECT_ID}/jobs?page=#{page}&per_page=#{PER_PAGE}\"\n begin\n response = RestClient::Request.new(\n :method => :get,\n :url => job_api,\n :verify_ssl => false,\n :headers => {\"PRIVATE-TOKEN\" => API_TOKEN}\n ).execute\n\n response.headers\n\n rescue RestClient::ExceptionWithResponse => err\n puts \"jobs info error: #{err.response}\"\n return nil\n end\nend", "def fetch_job\n JSON.parse(RestClient.get(url).body)\n end", "def index\n @jobs = Job.paginate(page: params[:page])\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @jobs }\n end\n end", "def listJobsForProject(project_mame)\n uri = URI(RUNDECKSERVER + ':' + RUNDECKPORT + '/api/1/jobs')\n params = { 'project' => project_mame }\n headers = {\n 'Content-Type'=> 'application/json',\n 'X-RunDeck-Auth-Token'=> API_KEY \n}\n connection = Excon.new('http://build01:4440/api/1/jobs')\n return connection.get(:query => { 'project' => project_mame },:headers => {\n 'Content-Type'=> 'application/json',\n 'X-RunDeck-Auth-Token'=> API_KEY \n}).body.force_encoding(\"UTF-8\")\n\nend", "def index\n per_page = params[:per_page] ||= PER_PAGE\n @jobs = Job.paginate :page => params[:page],\n :per_page => params[:per_page],\n :order=>'created_at DESC'\n respond_to do |format|\n format.html # index.html.erb\n format.xml {render :xml => @jobs} \n format.json {render :json => @jobs}\n end\n end", "def get_jobs(url)\n result = JSON.parse(get_data(url))\n job_list = []\n result[\"jobs\"].each do |job|\n job = JenkinsJob.new job[\"name\"], job[\"color\"], job[\"url\"]\n job_list << job\n end\n job_list\nend", "def job(id, options = {})\n objectify get(\"/job/#{id}\", options)['joblist']['job']\n end", "def index\n @jobs = Job.all\n render :index\n end", "def get_job(id)\n conn = @client.get do |req|\n req.url \"/api/v2/job/#{id}\"\n req.headers[\"Authorization\"] = @token\n end\n conn.body\n end", "def list_jobs(username, password, uuid = nil)\n jobs = get_json('jobs.json', username, password)\n puts \"\"\n jobs[\"jobs\"].each do |job|\n next if uuid && job['uuid'] != uuid\n if job['jobURL']\n job.merge!(get_json(job['jobURL'], username, password, ''))\n end\n puts summarise_job(job, 2)\n puts \"\"\n end\n del = jobs['delivered']\n puts \"#{del['jobCount']} jobs, #{del['activityCount']} activities delivered since #{del['since']}\"\nend", "def find_job(job_id)\n response = HTTParty.get(\"#{@host}/api/jobs/#{job_id}\")\n\n return response['job']\n end", "def index\n @jobs = Job.all\n # @jobs = ScriptedClient::Job.all\n end", "def job_results(jobid)\r\n wait_on_status(jobid)\r\n puts \"Retrieving results for job [#{jobid}]\"\r\n uri = URI(\"http://api.idolondemand.com/1/job/result/\" + jobid)\r\n uri.query = URI.encode_www_form(:apikey => $api_key)\r\n res = Net::HTTP.get_response(uri, p_addr = $proxy_host, p_port = $proxy_port)\r\n return JSON.parse(res.body)['actions']\r\nend", "def index\n @jobs = Job.all\n @paginated_jobs = @jobs.paginate(:page => params[:page], :per_page => Settings.Pagination.NoOfEntriesPerPage)\n end", "def index\n @jobs = Job.with_hires(nil).all\n end", "def get_jobs_list(status = :all, page = 1, reload = false)\n Bitmovin::Job.list(status, page, reload)\n end", "def index \n @jobs = Job.all.find_all{ |job| job.user_id == current_user.user_id }\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @jobs }\n end\n end", "def index\n @training_active_jobs = Training::ActiveJob.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @training_active_jobs }\n end\n end", "def show\n @job = Job.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @job }\n end\n end", "def show\n @job = Job.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @job }\n end\n end", "def show\n @job = Job.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @job }\n end\n end", "def show\n @job = Job.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @job }\n end\n end", "def show\n @job = Job.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @job }\n end\n end", "def show\n @job = @user.jobs.find_by_id!(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @job }\n end\n end", "def index\n @completed_jobs = DialJob.where(:status => 'completed').paginate(\n\t\t:page => params[:page], \n\t\t:order => 'id DESC',\n\t\t:per_page => 30\n\n\t)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @dial_results }\n end\n end", "def getExecutionsForAJob(job_id)\n uri = URI(RUNDECKSERVER + ':' + RUNDECKPORT + '/api/1/job/' + job_id + '/executions')\n http = Net::HTTP.new(uri.host, uri.port)\n headers = {\n 'Content-Type'=> 'application/json',\n 'X-RunDeck-Auth-Token'=> API_KEY \n}\n r = http.get(uri.path, headers)\n return r.body.force_encoding(\"UTF-8\")\nend", "def get(jid)\n results = @client.call('get', jid)\n Job.new(@client, JSON.parse(results)) unless results.nil?\n end", "def show\n allow :get, :delete; vary_on :accept\n job = OAR::Job.expanded.includes(:job_types, :job_events, :gantt).find(\n params[:id]\n )\n job.links = links_for_item(job)\n\n render_opts = { methods: %i[resources_by_type assigned_nodes] }\n render_result(job, render_opts)\n end", "def jobs(opts = {})\n api(api_method: @bq.jobs.list,\n parameters: opts)\n end", "def getDeadJobs\n getJobs('1/')\n end", "def list_jobs\n jobs = if unsafe_params[:editable]\n Job.editable_by(@context).accessible_by_private\n else\n Job.accessible_by(@context)\n end\n\n if unsafe_params[:scopes].present?\n check_scope!\n jobs = jobs.where(scope: unsafe_params[:scopes])\n end\n\n if unsafe_params[:space_uid].present?\n jobs = jobs.terminal\n end\n\n result = jobs.eager_load(user: :org).order(id: :desc).map do |job|\n describe_for_api(job, unsafe_params[:describe])\n end\n\n render json: result\n end", "def show\n @items_print_job = ItemsPrintJob.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @items_print_job }\n end\n end", "def show\n @job = @user.jobs.find_by_id!(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render_for_api :checkins_with_job, json: @job, root: :job }\n end\n end", "def show\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @launched_job }\n end\n end", "def show\n @job = Job.find(params[:id])\n @page_title = \"Print job for #{@job.user.full_name}\"\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @job }\n end\n end", "def index\n \t@jobs = Job.all\n \t# if i wanted to find all of my jobs\n \t# @jobs = current_user.jobs.all\n end", "def get_jobs(from, to)\n\n job_info = get_job_info(from)\n total_page = job_info[:x_total_pages].to_i\n new_to = (to == nil || to < total_page) ? to : total_page\n puts \">> total page : \" + total_page.to_s\n\n jobs = []\n (from..new_to).each do |page|\n job_api = \"#{BASE_URL}/v4/projects/#{PROJECT_ID}/jobs?page=#{page}&per_page=#{PER_PAGE}\"\n puts \">>start:page:\" + page.to_s\n\n begin\n response = RestClient::Request.new(\n :method => :get,\n :url => job_api,\n :verify_ssl => false,\n :headers => {\"PRIVATE-TOKEN\" => API_TOKEN}\n ).execute\n\n if response != nil && response.code == 200\n res = JSON.parse(response.to_str)\n jobs += res\n end\n\n rescue RestClient::ExceptionWithResponse => err\n puts \"jobs error: #{err.response}\"\n end\n end\n\n jobs\nend", "def index\n\t\t@page = 'browse'\n if params[:search].blank?\n\t\t case params[:type]\n\t\t\t\twhen \"featured\"\n\t\t\t\t\t@jobs = Job.featured\n when \"latest\"\n @jobs = Job.recently_submited(9)\n\t\t\t\twhen \"closed\"\n\t\t\t\t\t@jobs = Job.closed\n\t\t\t\telse\n\t\t\t\t\t@jobs = Job.open\n end\n else\n\t\t\t@page = 'search'\n @jobs = Job.active.find(:all, :conditions => ['role LIKE ? OR company LIKE ?', \"%#{params[:search]}%\", \"%#{params[:search]}%\"])\n\t\t\tif @jobs.empty?\n @page_header_line = \"Your search by <strong>#{h(params[:search])}</strong> did not return any job. Try a different search or <a href='#{jobs_url}'>go browse</a>.\"\n else\n @page_header_line = \"Your search by <strong>#{h(params[:search])}</strong> returned the following results: \"\n end\n end\n\n @jobs = @jobs.paginate :per_page => 10, :page => params[:page]\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @jobs }\n end\n end", "def index\n @jobs = Job.all\n\n # change layout for index\n #render layout: \"application\"\n end", "def search\n data = Job.search(params)\n\n # Respond with :json, :txt (tab delimited Blast results), or GFF3.\n respond_with data.flatten!(1) do |format|\n format.json {\n render :json => Quorum::JobSerializer.as_json(data)\n }\n format.gff {\n render :text => Quorum::JobSerializer.as_gff(data)\n }\n format.txt {\n render :text => Quorum::JobSerializer.as_txt(data)\n }\n end\n end", "def new\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @job }\n end\n end", "def index\n self.limit\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @jobs }\n end\n end", "def get_job name\n\t\t\tputs \"==> Pulling config.xml for job: '#{name}'\"\n\t\t\tret = {}\n\t\t\tbegin\n\t\t\t\tresponse = RestClient.get \"#{@jenkins_host}:#{@jenkins_port}/job/#{name}/config.xml\"\n\t\t\t\tif response.code == 200\n\t\t\t\t\tret[:success] = true\n\t\t\t\t\tret[:job] = response.body\n\t\t\t\telse\n\t\t\t\t\traise '==> Job does not exist'\n\t\t\t\tend\n\t\t\trescue Exception => e\n\t\t\t\tret[:success] = false\n\t\t\tend\n\t\t\treturn ret\n\t\tend", "def index\n @jobs = PeriodicJob.list params[:page], current_user.row_limit\n end", "def show\n @print_job_status = PrintJobStatus.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @print_job_status }\n end\n end", "def show\n @user = User.find(params[:id])\n @jobs = @user.jobs\n end", "def index\n @job_results = JobResult.all\n end", "def jobs\r\n end", "def show\n\t@job = @user.jobs.find(params[:id])\n\t#if find fails, redirect to the controller\n\trescue\n\t\tflash[:error] = 'Record not found'\n\t\tredirect_to :controller => 'jobs'\n\t\treturn\n\t\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @job }\n end\n end" ]
[ "0.73205507", "0.72729135", "0.71821696", "0.7130482", "0.7030642", "0.7021925", "0.6835374", "0.68151474", "0.68074626", "0.673195", "0.6662918", "0.6662918", "0.6662918", "0.6662918", "0.6662918", "0.6662918", "0.6662918", "0.66599715", "0.6652008", "0.6643812", "0.6624499", "0.6596606", "0.65712285", "0.65649736", "0.65649736", "0.65649736", "0.65649736", "0.65649736", "0.65649736", "0.6543234", "0.6521023", "0.6516499", "0.64999545", "0.6498098", "0.64915633", "0.64886385", "0.6486725", "0.6479707", "0.6472146", "0.64497924", "0.64366245", "0.6417849", "0.6385498", "0.63833386", "0.6376101", "0.6364", "0.63552165", "0.63520086", "0.6351964", "0.6329086", "0.63226926", "0.63172597", "0.63074785", "0.63072234", "0.6305683", "0.63021815", "0.62947106", "0.6287044", "0.62604654", "0.6254771", "0.6249901", "0.6240829", "0.6232124", "0.6217892", "0.6216404", "0.6200019", "0.6198023", "0.6190957", "0.6189438", "0.6182502", "0.61785096", "0.61785096", "0.61785096", "0.61785096", "0.61785096", "0.617377", "0.6166016", "0.6165295", "0.6144718", "0.6123253", "0.6122617", "0.61206585", "0.6114656", "0.61031574", "0.6096844", "0.60870177", "0.60804796", "0.6077143", "0.606705", "0.6056486", "0.6054569", "0.6051018", "0.60487735", "0.60469127", "0.6044782", "0.60413784", "0.6037798", "0.60290736", "0.6016896", "0.6015269", "0.6012932" ]
0.0
-1
GET /group/1 GET /group/1.xml
def show @group = Group.find(params[:id]) respond_to do |format| format.html # show.html.erb format.xml { render :xml => @group.to_xml } end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def show\n\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def index\n respond_to do |format|\n format.html { @groups = Group.get_groups(current_user, params) }\n format.xml { render :xml => Group.get_groups(params.merge({:show => 'all'})) }\n end\n end", "def index\n respond_to do |format|\n format.html { @groups = Group.get_groups(current_user, params) }\n format.xml { render :xml => Group.get_groups(params.merge({:show => 'all'})) }\n end\n end", "def index\n\n @groups = Group.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @groups }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @group }\n format.xml { render xml: @group }\n end\n end", "def index\n @groups = Group.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @groups }\n end\n end", "def index\n @groups = Group.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @groups }\n end\n end", "def show\n @user_group = UserGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @user_group }\n end\n end", "def show\n @group = Group.find_by_param(params[:id])\n @title = \"#{@group.name}'s Group\"\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def index\n @groups = Group.all\n @title = \"Groups\"\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @groups }\n end\n end", "def show\n\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n format.json { render :json => @group }\n end\n end", "def show\n @group_list = GroupList.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group_list }\n end\n end", "def show\n\n @group_definition = GroupDefinition.find(params[:id])\n\n render :xml => @group_definition\n end", "def show\n @group = Group.find(params[:id]) #Sicherheitsrisiko ohne @user.groups.find ?\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n group = Group.find(params[:group_id])\n\n @subgroup = group.subgroups.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n\n format.xml { render :xml => @subgroup }\n end\n end", "def show\n @group = WorkGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n @group_of_task = GroupOfTask.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group_of_task }\n end\n end", "def index\n\n @groups = Group.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @groups }\n format.json { render :json => @groups }\n end\n end", "def show #:nodoc:\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.haml\n # format.xml { render :xml => @group }\n end\n end", "def show\n @attribute_group = AttributeGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @attribute_group }\n end\n end", "def index\n\n @user_groups = UserGroup.find(:all)\n\n respond_to do |format|\n format.html { redirect_to :controller => :groups, :action => :index }\n format.xml { render :xml => @user_groups }\n end\n end", "def show\n Group.rebuild! if nil.|Group.find(:first).rgt\n\t #this won't work - it won't find children groups\n\t @group = Group.find_by_id(params[:id])\n\t @group = nil unless current_user.can_access_group?(@group)\n respond_to do |format|\n if @group\n format.html # show.html.erb\n format.xml { render :xml => @group }\n else\n flash[:notice] = 'Group invalid or you do not have access to this group.'\n format.html { redirect_to groups_path}\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def view_group(group)\n @uri = URI.parse(\"#{@api_url}/group/#{group}\")\n body = make_get_request\n @doc = Nokogiri::HTML(body)\n {\n title: get_title,\n description: get_description,\n name: get_name,\n regid: get_regid,\n contact: get_contact\n }\n end", "def show\n @group = Group.find(params[:id])\n @members = @group.member\n @project = @group.project\n @events = @group.event\n \n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n @add_to_group = AddToGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @add_to_group }\n end\n end", "def index\n\n @group_definitions = GroupDefinition.find(:all)\n\n render :xml => @group_definitions\n end", "def index\n @user_groups = UserGroup.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @user_groups }\n end\n end", "def show\n\n @user_group = UserGroup.find(params[:id])\n\n respond_to do |format|\n format.html do\n redirect_to(\n :controller => :groups,\n :action => :show,\n :id => @user_group.group_id)\n end\n format.xml { render :xml => @user_group }\n end\n end", "def show\n @targetgroup = Targetgroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @targetgroup }\n end\n end", "def show\n @giving_group = GivingGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @giving_group }\n end\n end", "def index\n @groups = WorkGroup.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @groups }\n end\n end", "def show\n @lab_group = LabGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @lab_group }\n end\n end", "def show\n @target_group = TargetGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @target_group }\n end\n end", "def show\n\t\tshow_\n\t\trespond_to do |format|\n\t\t\tformat.html # show.html.erb\n\t\t\tformat.xml { render :xml => @group }\n\t\t\tformat.json { render :json => @group }\n\t\tend\n\tend", "def show\n\t\tshow_\n\t\trespond_to do |format|\n\t\t\tformat.html # show.html.erb\n\t\t\tformat.xml { render :xml => @group }\n\t\t\tformat.json { render :json => @group }\n\t\tend\n\tend", "def show\n @record_group = RecordGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @record_group }\n end\n end", "def show\n @grupo = Grupo.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @grupo }\n end\n end", "def show\n @user = User.get!(params[:id])\n @groups = Group.all(:order => [:name.desc])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @user }\n end\n end", "def index\n @user_groups = UserGroup.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @user_groups }\n end\n end", "def show\n @fgroup = Fgroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @fgroup }\n end\n end", "def show\n @contact_group = ContactGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @contact_group }\n end\n end", "def index\n @giving_groups = GivingGroup.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @giving_groups }\n end\n end", "def show\n @routinetoobjectgroup = Routinetoobjectgroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @routinetoobjectgroup }\n end\n end", "def get_group\n send_request(FUNCTION_GET_GROUP, [], '', 4, 'k4')\n end", "def show\n @group_report = GroupReport.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group_report }\n end\n end", "def show\n @objectgrouptoobjectgroup = Objectgrouptoobjectgroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @objectgrouptoobjectgroup }\n end\n end", "def index\n #@groupings = Grouping.all\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @groupings }\n end\n end", "def show\n @recruitment_group = RecruitmentGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @recruitment_group }\n end\n end", "def show\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group_retention }\n end\n end", "def index\n @people = Person.all\n @group = Group.find(params[:group_id]) \n # render :layout => 'admin'\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @people }\n end\n end", "def show\n @group_permission = GroupPermission.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group_permission }\n end\n end", "def show\n @personnel_group = PersonnelGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @personnel_group }\n end\n end", "def show\n @expensegroup = Expensegroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @expensegroup }\n end\n end", "def index\n @group = Group.find(params[:group_id])\n @surveys = @group.surveys\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @surveys }\n end\n end", "def show\n @provider_group = ProviderGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @provider_group }\n end\n end", "def show\n @competence_group = CompetenceGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @competence_group }\n end\n end", "def show\n @group_activity = GroupActivity.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group_activity }\n end\n rescue ActiveRecord::RecordNotFound => e\n prevent_access(e)\n end", "def index\n @fgroups = Fgroup.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @fgroups }\n end\n end", "def show\n @jido_grp_rel = JidoGrpRel.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @jido_grp_rel }\n end\n end", "def show\n @my_group_comment = My::GroupComment.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @my_group_comment }\n end\n end", "def show\n @group = SuperSimpleCms::Group.find(params[:id])\n\n respond_to do |format|\n format.html { render :template=>'admin/groups/show' }\n format.js { render :template=>'admin/groups/show', :layout=>false}\n format.xml { render :xml => @group }\n end\n end", "def index\n @groups = SuperSimpleCms::Group.find(:all, :order=>:position)\n\n respond_to do |format|\n format.html { render :template => 'admin/groups/index' }\n format.xml { render :xml => @groups }\n end\n end", "def new\n\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @group }\n end\n end", "def show\n @groups_happening = GroupsHappening.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @groups_happening }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n @list_group_subgroups = @group.group_subgroups.paginate :page => params[:subgroups_page], :per_page => 10\n @list_group_members = @group.group_members.paginate :page => params[:members_page], :per_page => 10\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n @slicegroup = Slicegroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @slicegroup }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @group }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @group }\n end\n end", "def new\n @group = Group.new\n \n respond_to do |format|\n format.html\n format.xml { render :xml => @group.to_xml }\n end\n end", "def new\n @group = Group.new\n \n respond_to do |format|\n format.html\n format.xml { render :xml => @group.to_xml }\n end\n end", "def index\n @group_users = @parent.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @group_users }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n @server_group = ServerGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @server_group.to_json(:include => {:servers => {:include => :vpn_network_interfaces}, :clients => {:include => :vpn_network_interfaces}}) }\n format.xml { render :xml => @server_group.to_xml(:include => {:servers => {:include => :vpn_network_interfaces}, :clients => {:include => :vpn_network_interfaces}}) }\n end\n end", "def index\n @rebalancegroup = Rebalancegroup.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @rebalancegroups }\n format.xml {rendder xml: @rebalancegroups}\n end\n end", "def index\n @contact_groups = ContactGroup.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @contact_groups }\n end\n end", "def getGroup( group_id)\n params = Hash.new\n params['group_id'] = group_id\n return doCurl(\"get\",\"/group\",params)\n end", "def index\n @forum_groups = ForumGroup.find(:all)\n\n respond_to do |format|\n format.html # index.rhtml\n format.xml { render :xml => @forum_groups.to_xml }\n end\n end", "def show\n @group_user = @parent.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group_user }\n end\n end", "def show\n @group = @authorized_group\n @user = User.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @user }\n end\n end", "def show\n @group = Group.find(params[:id])\n respond_to do |format|\n # format.html # show.html.erb\n format.json { render :json => @group}\n end\n end", "def show\n @group_problem = GroupProblem.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group_problem }\n end\n end", "def edit\n respond_to do |format|\n format.html\n format.xml { render :xml => @group.to_xml }\n end\n end", "def edit\n respond_to do |format|\n format.html\n format.xml { render :xml => @group.to_xml }\n end\n end", "def show\n @group = active_group \n @events = @group.get_all_events\n @title = \"#{@group.name} group |\"\n \n @actions << 'delete_group' if @group.is_owned_by? current_user\n \n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n @group = Group.find(params[:id])\n return render 'index'\n end" ]
[ "0.7056438", "0.7051215", "0.7051215", "0.7051215", "0.7051215", "0.7051215", "0.69633293", "0.69633293", "0.6911578", "0.6901699", "0.6867055", "0.6867055", "0.682145", "0.6771966", "0.67499894", "0.6715324", "0.6703261", "0.6700914", "0.6695819", "0.66550297", "0.6605144", "0.6598196", "0.6594404", "0.65851367", "0.65807694", "0.6577735", "0.65348697", "0.6522248", "0.6503694", "0.64866453", "0.64802146", "0.6467303", "0.6442841", "0.6433277", "0.6427655", "0.6424756", "0.64220726", "0.6410526", "0.640261", "0.640261", "0.6397554", "0.63957703", "0.6383462", "0.63709766", "0.6328747", "0.63281417", "0.6312786", "0.6289894", "0.62711024", "0.62585795", "0.6229387", "0.62155193", "0.6212136", "0.62005043", "0.619395", "0.6185411", "0.6178005", "0.61686635", "0.6155773", "0.61529267", "0.6152842", "0.6151898", "0.61461693", "0.6141757", "0.61247593", "0.61223745", "0.6100041", "0.6085927", "0.6080038", "0.60662425", "0.6063272", "0.6059857", "0.6058303", "0.6058303", "0.60579985", "0.60579985", "0.60430986", "0.6032834", "0.6032834", "0.6032834", "0.6032834", "0.6032834", "0.6032834", "0.6032834", "0.6032834", "0.6032834", "0.6018376", "0.6013506", "0.6010883", "0.60012454", "0.5991665", "0.5988253", "0.5985684", "0.5981191", "0.5978287", "0.59740525", "0.59740525", "0.5971282", "0.5969692" ]
0.7224538
0
GET /group/new GET /group/new.xml
def new @group = Group.new respond_to do |format| format.html format.xml { render :xml => @group.to_xml } end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def new\n @group = Group.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @group }\n format.xml { render xml: @group }\n end\n end", "def new\n @group = Group.new\n @title = 'Create Group'\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @add_to_group = AddToGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @add_to_group }\n end\n end", "def new #:nodoc:\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.haml\n # format.xml { render :xml => @group }\n end\n end", "def new\n Group.rebuild! if nil.|Group.find(:first).rgt\n\t @group = Group.new\n\t @groups = current_user.get_unique_group_branches.map {|g| g.get_self_and_children?}.flatten\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n format.json { render :json => @group }\n end\n end", "def new\n @group = GROUP.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n add_breadcrumb \"Social\", social_path()\n add_breadcrumb \"Create group\"\n \n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @group = Group.new(:owner => current_user)\n authorize @group, :new?\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n\t\t@group = Group.new\n\t\trespond_to do |format|\n\t\t\tformat.html # new.html.erb\n\t\t\tformat.xml { render :xml => @group }\n\t\t\tformat.json { render :json => @group }\n\t\tend\n\tend", "def new\n\t\t@group = Group.new\n\t\trespond_to do |format|\n\t\t\tformat.html # new.html.erb\n\t\t\tformat.xml { render :xml => @group }\n\t\t\tformat.json { render :json => @group }\n\t\tend\n\tend", "def new\n @user_group = UserGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @user_group }\n end\n end", "def new\n @group = WorkGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @record_group = RecordGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @record_group }\n end\n end", "def new\n @attribute_group = AttributeGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @attribute_group }\n end\n end", "def new\n @groups = Group.all\n respond_to do |format|\n format.html # new.html.haml\n format.xml { render :xml => @person }\n format.json { render :json => @person }\n end\n end", "def new\n @giving_group = GivingGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @giving_group }\n end\n end", "def new\n @group = Group.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.haml\n format.js # new.js.rjs\n format.xml { render :xml => @group }\n format.json { render :json => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @group }\n end\n end", "def new\n @group = Group.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @group }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @group }\n end\n end", "def new\n @lab_group = LabGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @lab_group }\n end\n end", "def new\n @group = SuperSimpleCms::Group.new\n\n respond_to do |format|\n format.html {render :template=>'admin/groups/new'}\n format.js {render :template=>'admin/groups/new', :layout=>false}\n format.xml { render :xml => @group }\n end\n end", "def new\n @targetgroup = Targetgroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @targetgroup }\n end\n end", "def new\n @group = Group.new\n\n respond_to do |format|\n #format.html # new.html.erb\n #format.xml { render :xml => @group }\n format.js { render :action => 'new' }\n end\n end", "def new\n @routinetoobjectgroup = Routinetoobjectgroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @routinetoobjectgroup }\n end\n end", "def new\n @fgroup = Fgroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @fgroup }\n end\n end", "def new\n @group_list = GroupList.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group_list }\n end\n end", "def new\n @group_of_task = GroupOfTask.new\n @projects = Project.find(:all)\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group_of_task }\n end\n end", "def new\n @ail_group = AilGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @ail_group }\n end\n end", "def new\n @grupo = Grupo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @grupo }\n end\n end", "def new\n @target_group = TargetGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @target_group }\n end\n end", "def new\n @provider_group = ProviderGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @provider_group }\n end\n end", "def new\n @contact_group = ContactGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @contact_group }\n end\n end", "def new\n @title = \"Добавление группы характеристик\"\n @group = Group.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @group }\n end\n end", "def create\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n flash[:notice] = 'Group was successfully created.'\n format.html { redirect_to(@group) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n flash[:notice] = 'Group was successfully created.'\n format.html { redirect_to(@group) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n flash[:notice] = 'Group was successfully created.'\n format.html { redirect_to(@group) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to(view_group_path(@group.label), :notice => 'Group was successfully created.') }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def new\n @objectgrouptoobjectgroup = Objectgrouptoobjectgroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @objectgrouptoobjectgroup }\n end\n end", "def new\n @jido_grp_rel = JidoGrpRel.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @jido_grp_rel }\n end\n end", "def create\n\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n flash[:notice] = 'Group was successfully created.'\n format.html { redirect_to(@group) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n flash[:notice] = 'Group was successfully created.'\n format.html { redirect_to(@group) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def new\n @polco_group = PolcoGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @polco_group }\n end\n end", "def create\n \n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n flash[:notice] = 'Group was successfully created.'\n format.html { redirect_to(@group) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def new\n @personnel_group = PersonnelGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @personnel_group }\n end\n end", "def new\n @groups_happening = GroupsHappening.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @groups_happening }\n end\n end", "def new\n @transaction_group = TransactionGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @transaction_group }\n end\n end", "def new\n @group = Group.new\n @membership = Membership.new\n @group_permission = GroupPermission.new\n @metro_areas = MetroArea.find(:all)\n @states = State.find(:all)\n \n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group }\n end\n end", "def new\n @user = User.new\n @groups = Group.all(:order => [:name.desc])\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @user }\n end\n end", "def new\n @group = Group.new\n respond_to do |format|\n #format.html # new.html.erb\n format.json { render json: @group }\n end\n end", "def new\n @slicegroup = Slicegroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @slicegroup }\n end\n end", "def new\n @group_permission = GroupPermission.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group_permission }\n end\n end", "def create #:nodoc:\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n flash[:notice] = I18n.t(\"{{value}} was successfully created.\", :default => \"{{value}} was successfully created.\", :value => I18n.t(\"Group\", :default => \"Group\"))\n if params[:create_and_new_button]\n format.html { redirect_to new_group_url }\n else\n format.html { redirect_to groups_url }\n # format.xml { render :xml => @group, :status => :created, :location => @group }\n end\n else\n format.html { render :action => \"new\" }\n # format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def new\n @group = Group.new\n render json: @group\n end", "def new\n @server_group = ServerGroup.new\n @account = User.find(session[:user_id]).account\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @server_group }\n end\n end", "def new\n @group = Group.new\n\n render json: @group\n end", "def new\n @group_request = GroupRequest.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @group_request }\n end\n end", "def new\n @volunteer = Volunteer.new\n @groups = Group.find(:all)\n \n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @volunteer }\n end\n end", "def new\n @group_outing = GroupOuting.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group_outing }\n end\n end", "def new\n \n @group = Group.find(params[:group_id])\n @title=\"Подать заявку на обучение в группе: \"[email protected] \n @person = Person.new(:group=>@group)\n #@groups=Group.all(:conditions=>['open=?',true])\n # render :layout => 'admin'\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @person }\n end\n end", "def new\n @expensegroup = Expensegroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @expensegroup }\n end\n end", "def new\n @recruitment_group = RecruitmentGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @recruitment_group }\n end\n end", "def new\n @image_gallery_group = ImageGalleryGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @image_gallery_group }\n end\n end", "def new\n @my_group_comment = My::GroupComment.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @my_group_comment }\n end\n end", "def new\n @groupaddrobj = Groupaddrobj.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @groupaddrobj }\n end\n end", "def new\n @resource = Resource.new\n @resource_groups = ResourceGroup.alphabetical.all\n \n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @resource }\n end\n end", "def new\n @pgroup = Pgroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @pgroup }\n end\n end", "def new\n @pgroup = Pgroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @pgroup }\n end\n end", "def new\n @laboratory_test_group = LaboratoryTestGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @laboratory_test_group }\n end\n end", "def new\n @group.parent_id = params[:group_id]\n \n add_breadcrumb 'Your hubs', :hubs_path\n add_breadcrumb @hub.name, hub_path(@hub)\n unless params[:group_id]\n add_breadcrumb 'New group', new_hub_group_path(@hub)\n else\n add_breadcrumb 'New sub group', hub_group_subgroup_path(@hub, @group.parent)\n end\n \n append_title 'New group'\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @group }\n end\n end", "def create\n @group = Group.new(params[:group])\n @group.user = current_user\n respond_to do |format|\n if @group.save\n flash[:notice] = 'Group was successfully created.'\n format.html { redirect_to(@group) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def new\n @group_activity = GroupActivity.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @group_activity }\n end\n rescue ActiveRecord::RecordNotFound => e\n prevent_access(e)\n end", "def new\n @group_user = GroupUser.new(params[:group_user])\n\n respond_to do |format|\n format.html\n format.xml { render :xml => @group_user.to_xml }\n end\n end", "def new\n @group = Group.find(params[:group_id])\n @match = Match.clone_match_from_last_one(@group)\n \n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @match }\n end\n end", "def new\n @groupon = Groupon.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @groupon }\n end\n end", "def create\n group = params[:group] || {}\n group.delete(:locales)\n group.delete(:domains)\n @group = GROUP.new(group)\n @group.current_user = current_user\n\n respond_to do |format|\n if @group.save\n flash[:notice] = 'Group was successfully created.'\n format.html { redirect_to(group_url(@group.id)) }\n format.xml { render :xml => @group, :status => :created, :location => group_url(@group.id) + \".xml\" }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def newAluGroup\n @competence_group = CompetenceGroup.new\n\t\t@competence_group.competence_id = params[:competence_id]\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @competence_group }\n end\n end" ]
[ "0.8140932", "0.81277347", "0.81277347", "0.81277347", "0.81277347", "0.81277347", "0.81277347", "0.81277347", "0.81277347", "0.81277347", "0.80326277", "0.80315775", "0.802249", "0.7899247", "0.7870291", "0.7855039", "0.7848567", "0.782753", "0.779655", "0.7669039", "0.7666556", "0.7666556", "0.75986224", "0.75758636", "0.7539097", "0.75285995", "0.7515892", "0.7499183", "0.7475186", "0.7468895", "0.7464599", "0.7464599", "0.7464599", "0.7464599", "0.7464599", "0.7464599", "0.7464599", "0.7464599", "0.7461118", "0.7454922", "0.7453669", "0.7453669", "0.74489003", "0.7437012", "0.7424491", "0.74003476", "0.73922366", "0.738045", "0.7361784", "0.7353186", "0.7345198", "0.7336693", "0.73306286", "0.73048466", "0.72692966", "0.7268671", "0.72374773", "0.72374773", "0.72374773", "0.7225862", "0.7225415", "0.72203714", "0.7219843", "0.721945", "0.7186919", "0.7183844", "0.7170758", "0.7162096", "0.715485", "0.7126282", "0.7119855", "0.7111789", "0.70905125", "0.70578206", "0.70490414", "0.70464545", "0.7035948", "0.7034225", "0.7032716", "0.7018325", "0.7009851", "0.699471", "0.6992327", "0.69677186", "0.6965621", "0.6944156", "0.6941948", "0.69392204", "0.69232583", "0.69232583", "0.6912423", "0.68882823", "0.6888152", "0.68871737", "0.6874584", "0.6863314", "0.68554467", "0.6855383", "0.68462324" ]
0.8025474
12
POST /groups POST /groups.xml
def create @group = Group.new(params[:group]) respond_to do |format| if @group.save group_user = GroupUser.create(:group_id => @group.id, :user_id => current_user.id, :role_id => Role.find_by_name('Owner').id) group_user.approve! flash[:notice] = "Group has been created!" format.html { redirect_to groups_path } format.xml { render :xml => @group.to_xml } else format.html { render :action => :new } format.xml { render :xml => @group.errors, :status => :unprocessable_entity } end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate(groups)\n groups_params = groups.inject({}) do |params, (k, v)|\n params[\"groups[#{k}]\"] = 1\n params\n end\n\n response = RouteNGN.put self.class.base_url, {:id => self.id}.merge!(groups_params)\n response.success?\n end", "def create\n #logger.info \"Post parameters: #{params}\"\n @group = Group.new(name: params[:group][:name], expiration: params[:group][:expiration])\n if @group.save\n params[:group][:users].each do |u|\n Membership.create(group: @group, user: User.where(\"id = ? OR email = ?\", u[:id], u[:email]).first, admin:u[:admin])\n end\n render json: @group, status: :created, location: @group\n else\n render json: @group.errors, status: :unprocessable_entity\n end\n end", "def createGroup(groupName, gid)\r\n uri = sprintf(\"/api/v1/group_categories/%d/groups\", gid) \r\n \r\n dbg(\"POST #{uri}\")\r\n dbg(\"name=#{groupName}\")\r\n newGroup = $canvas.post(uri, {'name' => groupName})\r\n dbg(newGroup)\r\n return newGroup\r\nend", "def createGroup(groupName, gid)\r\n uri = sprintf(\"/api/v1/group_categories/%d/groups\", gid) \r\n \r\n dbg(\"POST #{uri}\")\r\n dbg(\"name=#{groupName}\")\r\n newGroup = $canvas.post(uri, {'name' => groupName})\r\n dbg(newGroup)\r\n return newGroup\r\nend", "def create\n #logger.info \"Post parameters: #{params}\"\n @group = Group.new(name: params[:group][:name], expiration: params[:group][:expiration], owner: current_user)\n if @group.save\n @group.memberships.create!(user: current_user, admin: true)\n if params[:group][:users]\n params[:group][:users].each do |u|\n @group.memberships.create!(user: User.where(\"id = ? OR email = ?\", u[:id], u[:email]).first, admin:u[:admin])\n end\n end\n render json: @group, status: :created, location: @group\n else\n render json: @group.errors, status: :unprocessable_entity\n end\n end", "def create\n ip = request.location\n @user = current_user\n @group = @user.groups_as_owner.new(params[:group])\n params[:group][:member_ids] = (params[:group][:member_ids] << @group.member_ids).flatten\n @group.school_id = @user.school_id\n respond_to do |format|\n if @group.save\n format.html { redirect_to @group, notice: 'Group was successfully created.' }\n format.json { render json: @group, status: :created, location: @group }\n else\n format.html { render action: \"new\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def CreateGroup params = {}\n \n APICall(path: 'groups.json',method: 'POST',payload: params.to_json)\n \n end", "def create\n \n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n flash[:notice] = 'Group was successfully created.'\n format.html { redirect_to(@group) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def add_new_groups(params = {})\n post(\"/groups\", params)\n end", "def create\n\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n flash[:notice] = 'Group was successfully created.'\n format.html { redirect_to(@group) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n flash[:notice] = 'Group was successfully created.'\n format.html { redirect_to(@group) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def postEntityGroup( entity_id, group_id)\n params = Hash.new\n params['entity_id'] = entity_id\n params['group_id'] = group_id\n return doCurl(\"post\",\"/entity/group\",params)\n end", "def create\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to(view_group_path(@group.label), :notice => 'Group was successfully created.') }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n flash[:notice] = 'Group was successfully created.'\n format.html { redirect_to(@group) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n flash[:notice] = 'Group was successfully created.'\n format.html { redirect_to(@group) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n flash[:notice] = 'Group was successfully created.'\n format.html { redirect_to(@group) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n group = params[:group] || {}\n group.delete(:locales)\n group.delete(:domains)\n @group = GROUP.new(group)\n @group.current_user = current_user\n\n respond_to do |format|\n if @group.save\n flash[:notice] = 'Group was successfully created.'\n format.html { redirect_to(group_url(@group.id)) }\n format.xml { render :xml => @group, :status => :created, :location => group_url(@group.id) + \".xml\" }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create_group(path, name)\n puts \"creating #{name} on path #{path}\"\n \n ret = RestClient.post \"#{@url}/groups\", \n { path: path, name: name }, \n { \"Private-Token\": @token } \n json = JSON.parse(ret.body)\n\n json['id']\n end", "def add_ce_group\n self.refresh_access_token!\n\n haml_template = File.read(File.join(TEMPLATES_DIR, 'group.xml.haml'))\n request_body = Haml::Engine.new(haml_template, remove_whitespace: true).render(Object.new)\n\n @response = @oauth_access_token.post(\n 'https://www.google.com/m8/feeds/groups/default/full',\n {\n body: request_body,\n headers: {\n 'Content-type' => 'application/atom+xml',\n 'GData-Version' => '3.0'\n }\n }\n )\n\n group_id = GROUP_REGEX.match(@response.body)[1]\n\n @response.status == 201 ? group_id : nil\n end", "def atest_ID_25862_new_post_in_group_you_manage()\n login_as_user1\n read_all_updates\n groupName = create_any_new_group(\"Open Group\", \"Family\")\n logout_common\n login_as_user2\n post_to_any_group(\"Family\",groupName)\n logout_common\n login_as_user1\n verify_updates\n end", "def create\n @user_group = UserGroup.new(params[:user_group])\n\n respond_to do |format|\n if @user_group.save\n flash[:notice] = \"User group <em>#{@user_group.name}</em> created.\"\n format.html { redirect_to(@user_group) }\n format.xml { render :xml => @user_group, :status => :created, :location => @user_group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @user_group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(group_params)\n respond_to do |format|\n if @group.save\n @group.users.push(current_user)\n UserGroup.set_is_admin(@group.id, current_user.id, true)\n invite_members\n format.html { redirect_to @group, notice: t('flash.notice.groups.successfully_created') }\n format.json { render :show, status: :created, location: @group }\n else\n format.html { render :new }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @add_to_group = AddToGroup.new(params[:add_to_group])\n\n respond_to do |format|\n if @add_to_group.save\n format.html { redirect_to(@add_to_group, :notice => 'Add to group was successfully created.') }\n format.xml { render :xml => @add_to_group, :status => :created, :location => @add_to_group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @add_to_group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(group_params)\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to @group, notice: 'Le groupe a été créé.' }\n format.json { render :show, status: :created, location: @group }\n else\n format.html { render :new }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def to_groups\n message = AssignTasksToGroups.new(params).perform\n json_response({message: message}, :created)\n end", "def create\n @group = Group.new(params[:group])\n @group.user = current_user\n respond_to do |format|\n if @group.save\n flash[:notice] = 'Group was successfully created.'\n format.html { redirect_to(@group) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create_group(attributes)\n post(\"/v1/groups\", attributes)\n end", "def create\n @group = Group.new(params[:group])\n\n if @group.save\n redirect_to(list_groups_path(:page => params[:page]), :notice => t(\"group.created\"))\n else\n render :action => :new\n end\n end", "def create #:nodoc:\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n flash[:notice] = I18n.t(\"{{value}} was successfully created.\", :default => \"{{value}} was successfully created.\", :value => I18n.t(\"Group\", :default => \"Group\"))\n if params[:create_and_new_button]\n format.html { redirect_to new_group_url }\n else\n format.html { redirect_to groups_url }\n # format.xml { render :xml => @group, :status => :created, :location => @group }\n end\n else\n format.html { render :action => \"new\" }\n # format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @attribute_group = AttributeGroup.new(params[:attribute_group])\n\n respond_to do |format|\n if @attribute_group.save\n format.html { redirect_to(@attribute_group, :notice => 'Attribute group was successfully created.') }\n format.xml { render :xml => @attribute_group, :status => :created, :location => @attribute_group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @attribute_group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to @group, :notice => 'Group was successfully created.' }\n format.json { render :json => @group, :status => :created, :location => @group}\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def new_nodegroup(nodegroup_json)\n nodemgr_rest_call(\"POST\", \"classifier\", \"groups\", $credentials, id=\"\", nodegroup_json)\nend", "def create\n\n @user_group = UserGroup.new(params[:user_group])\n\n respond_to do |format|\n\n if @user_group.save\n\n #flash[:notice] = 'UserGroup was successfully created.'\n format.html do\n if request.env['HTTP_REFERER']\n redirect_to :back\n else\n redirect_to(\n :controller => :groups,\n :action => :show,\n :id => @user_group.group_id)\n end\n end\n format.xml do\n render(\n :xml => @user_group,\n :status => :created,\n :location => @user_group)\n end\n\n else\n\n format.html {\n render :controller => :groups, :action => :index }\n format.xml {\n render :xml => @user_group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(group_params)\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to groups_path, notice: 'Group was successfully created.' }\n format.json { render :show, status: :created, location: @group }\n else\n format.html { render :new }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(group_params)\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to @group, notice: 'Group was successfully created.' }\n format.json { render :show, status: :created, location: @group }\n else\n format.html { render :new }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(group_params)\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to @group, notice: 'Group was successfully created.' }\n format.json { render :show, status: :created, location: @group }\n else\n format.html { render :new }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(group_params)\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to @group, notice: 'Group was successfully created.' }\n format.json { render :show, status: :created, location: @group }\n else\n format.html { render :new }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(group_params)\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to @group, notice: 'Group was successfully created.' }\n format.json { render :show, status: :created, location: @group }\n else\n format.html { render :new }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(group_params)\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to @group, notice: 'Group was successfully created.' }\n format.json { render :show, status: :created, location: @group }\n else\n format.html { render :new }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(group_params)\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to @group, notice: 'Group was successfully created.' }\n format.json { render :show, status: :created, location: @group }\n else\n format.html { render :new }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n if params[:member_id].nil?\n @group = Group.new(params[:group])\n respond_to do |format|\n if @group.save\n flash[:notice] = 'Group was successfully created.'\n format.html { redirect_to(@group) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n else\n # Add member to an group.\n # POST /members/:member_id/groups\n RAILS_DEFAULT_LOGGER.debug(\"Add member #{params[:member_id]} to group #{params[:post][:group_id]}.\")\n @member = Member.find(params[:member_id])\n @member.group << Group.find(params[:post][:group_id])\n\n respond_to do |format|\n flash[:notice] = 'Group was successfully added.'\n format.html { redirect_to(@member) }\n format.xml { render :xml => @member, :status => :created, :location => @member }\n end\n end\n end", "def create\n @group = Group.new(group_params)\n respond_to do |format|\n if @group.save\n @membership = Membership.create!(group_id: @group.id, user_id: current_user.id, admin: true)\n format.html { redirect_to @group, notice: 'Group was successfully created.' }\n format.json { render :show, status: :created, location: @group }\n else\n format.html { render :new }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to @group, notice: 'Group was successfully created.' }\n format.json { render json: @group, status: :created, location: @group }\n else\n format.html { render action: \"new\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to @group, notice: 'Group was successfully created.' }\n format.json { render json: @group, status: :created, location: @group }\n else\n format.html { render action: \"new\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to @group, notice: 'Group was successfully created.' }\n format.json { render json: @group, status: :created, location: @group }\n else\n format.html { render action: \"new\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to @group, notice: 'Group was successfully created.' }\n format.json { render json: @group, status: :created, location: @group }\n else\n format.html { render action: \"new\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to @group, notice: 'Group was successfully created.' }\n format.json { render json: @group, status: :created, location: @group }\n else\n format.html { render action: \"new\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(params[:group])\n @group.user = current_user\n\n respond_to do |format|\n if @group.save\n @group.add_member current_user\n flash[:notice] = '{object} was successfully {action}.'[:object_action_notice, \"Group\"[], \"created\"[]]\n format.html { redirect_to(@group) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def groups\r\n save_default_group(params)\r\n delete_group(params)\r\n end", "def group_create(element)\n name = element[\"group\"]\n auth = element[\"auth\"]\n body = {\n \"jsonrpc\" => \"2.0\",\n \"method\" => \"hostgroup.create\",\n \"params\" => {\n \"name\" => name\n },\n \"auth\" => auth,\n \"id\" => rand(9999)\n }\n result = json_body(body)\n # puts result\n result = result[\"groupids\"]\n result = result[0]\n # puts result\n # return will be group id\n return result\nend", "def create\n group = Group.new(group_params)\n if group.save\n render json: group\n else\n render json: group.errors.full_messages, status: :unprocessable_entity\n end\n end", "def create\n @group = Group.new(group_params)\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to @group, notice: 'Group was successfully created.' }\n format.json { render action: 'show', status: :created, location: @group }\n else\n format.html { render action: 'new' }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(group_params)\n @group.create_robotsurvey()\n @group.create_signupsurvey()\n @group.create_poststudysurvey()\n respond_to do |format|\n if @group.save\n \n format.html { redirect_to @group, notice: 'Group was successfully created.' }\n format.json { render action: 'show', status: :created, location: @group }\n else\n format.html { render action: 'new' }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(params[:group])\n\n if @group.save\n flash[:notice] = t('flash_msg46')\n @groups = Group.all\n else\n @error = true\n end\n end", "def create\n #should expire groups page cache\n \n # expire the cache of the grouplist of this user\n Rails.cache.delete(Person.groups_cache_key(@current_user.id, session[:cookie]))\n \n @group = Group.new\n begin\n @group = Group.create(params[\"group\"], session[:cookie])\n flash[:notice] = :group_created_successfully\n redirect_to group_path(@group) and return\n rescue RestClient::RequestFailed => e\n @group.add_errors_from(e)\n @group.form_title = params[:group][:title]\n @group.form_description = params[:group][:description]\n render :action => :new and return\n rescue RestClient::Unauthorized => e\n @group.add_errors_from(e)\n @group.form_title = params[:group][:title]\n @group.form_description = params[:group][:description]\n render :action => :new and return \n end\n end", "def addGroupToServer(group)\n path = \"/user/\" + @@conf[\"username\"] + \"/group/\" + group.strip\n res = HttpRequest.new(:put, path).send(@@host)\n puts res\n puts \"CODE: \" + res.code\n\nend", "def create\n @group = Group.new(group_params)\n\n if @group.save\n render_json_message({:success => t('.success')}, 201, {id: @group.id})\n else\n render_json_message({:errors => @group.errors.messages}, 422)\n end\n\n end", "def group_call(params)\n path = @version + '/Call/Group/'\n method = 'POST'\n return request(path, method, params)\n end", "def create\n @group = WorkGroup.new(params[:group])\n\n respond_to do |format|\n if @group.save\n flash[:notice] = 'Group was successfully created.'\n format.html { redirect_to(@group) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(params[:group])\n\t\[email protected]_id = session[:user_id]\n\n respond_to do |format|\n if @group.save\n\n\t\t\t\t@page = Page.create(:owner => @group.id, :category => 'group')\n\t\t\t\t@group_member = GroupMember.create(:group_id => @group.id, :user_id => session[:user_id], :moderator => true)\n format.html { redirect_to groups_url, :notice => 'Group was successfully created.' }\n format.json { render :json => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n #redirect_to new_grouping_path if params[:grouping][:name] == ''\n #@grouping = Grouping.new(params[:grouping])\n @grouping.company_id = current_user.company_id\n\n if @grouping.save\n gflash :success => 'Group created.' \n else\n @users = []\n @root = false\n end\n \n #redirect_to edit_grouping_path(@grouping)\n respond_with(@grouping)\n end", "def create\n @user.create_group!(new_group_params[:group_user_ids], {name: new_group_params[:name]})\n end", "def create\n @group = Group.new(group_params)\n #On vérifie que la liste de droits d'un groupe est effacé\n @group.rights.clear\n\n #On ajoute les droits choisis par un utilisateur\n params[:group][:right_ids] ||= []\n params[:group][:right_ids].each do |right|\n if !(right.blank?)\n @group.add_right(Right.find_by_id(right))\n end\n end\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to @group, notice: t('group.created_msg') }\n format.json { render action: 'show', status: :created, location: @group }\n else\n format.html { render action: 'new' }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(group_params)\n\n respond_to do |format|\n if @group.save\n format.json { render json: @group, status: :created }\n else\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(group_params)\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to @group, notice: 'Новая группа создана!' }\n format.json { render :show, status: :created, location: @group }\n else\n format.html { render :new }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(params[:group])\n @group.users << current_user\n \n respond_to do |format|\n if @group.save\n @group.groups_users.first.update_attribute :level, 2\n flash[:notice] = 'Group was successfully created.'\n format.html { redirect_to(@group) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def move_users_to_group(group_id, set_of_user_ids)\n set_of_user_ids.each do |id|\n @url = \"http://#{$canvas_host}/api/v1/groups/#{group_id}/memberships\"\n puts \"@url is #{@url}\"\n \n @payload={'user_id': id}\n puts(\"@payload is #{@payload}\")\n \n @postResponse = HTTParty.post(@url, :body => @payload.to_json, :headers => $header )\n puts(\" POST to move user to group has Response.code #{@postResponse.code} and postResponse is #{@postResponse}\")\n end\nend", "def create\n @group = Group.new(group_params)\n @group.user = @user\n respond_to do |format|\n if @group.save\n format.html { redirect_to @group, notice: 'Group was successfully created.' }\n format.json { render :show, status: :created, location: @group }\n else\n format.html { render :new }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @user = current_user\n @group = Group.new(group_params)\n @group.save\n respond_with(@group)\n end", "def create\n @target_group = TargetGroup.new(params[:target_group])\n\n respond_to do |format|\n if @target_group.save\n flash[:notice] = 'TargetGroup was successfully created.'\n format.html { redirect_to([:admin, @target_group]) }\n format.xml { render :xml => @target_group, :status => :created, :location => @target_group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @target_group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(params[:group])\n new_sort = Configurations.get_sort('group')\n @group.sort = new_sort\n @group.deleted = 0\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to :groups, notice: 'Group was successfully created.' }\n format.json { render json: @group, status: :created, location: @group }\n else\n format.html { render action: \"new\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @request_group = RequestGroup.new(request_group_params)\n\n respond_to do |format|\n if @request_group.save\n format.html { redirect_to @request_group, \n\t\t\t\t\t\t\t\t\t\t\tnotice: 'Request group was successfully created.' }\n format.json { render action: 'show', status: :created, \n\t\t\t\t\t\t\t\t\t\t\tlocation: @request_group }\n else\n format.html { render action: 'new' }\n format.json { render json: @request_group.errors, \n\t\t\t\t\t\t\t\t\t\t\tstatus: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n flash[:success] = \"Группа успешно добавлена.\"\n format.html { redirect_to @group }\n format.json { render json: @group, status: :created, location: @group }\n else\n flash.now[:error] = \"Группа с таким названием не может быть добавлена!\"\n format.html { render action: \"new\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @resource = Group.new(group_params)\n\n respond_to do |format|\n if @resource.save\n index\n\n flash[:success] = t('notices.saved_successfully')\n format.html { redirect_to @resource, notice: 'Group was successfully created.' }\n format.json { render :show, status: :created, location: @resource }\n else\n format.html { render :new }\n format.json { render json: @resource.errors, status: :unprocessable_entity }\n end\n format.js\n end\n end", "def create\n new_group = Group.new(name: params[:name])\n\n if new_group.save\n render json: { \"notice\"=>\"new group #{params[:name]} successfully created\" }\n else\n render json: { \"alert\"=>\"group creation failed. check params.\" }\n end\n end", "def create\n @targetgroup = Targetgroup.new(params[:targetgroup])\n\n respond_to do |format|\n if @targetgroup.save\n flash[:notice] = 'Targetgroup was successfully created.'\n format.html { redirect_to(@targetgroup) }\n format.xml { render :xml => @targetgroup, :status => :created, :location => @targetgroup }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @targetgroup.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n group = Group.find(params[:group_id])\n\n @subgroup = group.subgroups.create(subgroup_params)\n\n respond_to do |format|\n if @subgroup.save\n format.html { redirect_to([@subgroup.group, @subgroup], :notice => 'Comment was successfully created.') }\n format.xml { render :xml => @subgroup, :status => :created, :location => [@subgroup.group, @subgroup] }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @subgroup.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(permitted_params)\n @group.owner ||= current_user\n authorize @group, :create?\n respond_to do |format|\n if @group.save\n format.html { redirect_to(@group, :notice => 'Group was successfully created.') }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(params[:group])\n\n respond_to do |format|\n if !current_user || (!current_user.is_admin)\n format.html { redirect_to(@group, :notice => 'No permissions to create groups.')}\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n elsif @group.save\n format.html { redirect_to(@group, :notice => 'Group was successfully created.') }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(group_params)\n token = params[:token]\n\n # use the user login instance and match emails to find current user\n @user_login = UserLogin.where(token: token).take\n @current_user = User.where(email: @user_login.email).take\n\n respond_to do |format|\n if @group.save\n\n # create a new group membership for new group w/ current user as admin\n @new_membership = GroupMembership.create(group_id: @group.id, user_id: @current_user.id, is_admin: true)\n\n # associate new membership with the group and the user\n @group.group_memberships << @new_membership\n @current_user.group_memberships << @new_membership\n\n format.html { redirect_to group_path(:id => @group.id), notice: 'Group was successfully created.' }\n format.json { render :show, status: :created, location: @group }\n else\n format.html { render :new }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(group_params)\n respond_to do |format|\n if @group.save\n @group.memberships.create(user_id: current_user.id, state: \"owner\")\n format.html { redirect_to @group, notice: 'Group was successfully created.' }\n format.json { render action: 'show', status: :created, location: @group }\n else\n format.html { render action: 'new' }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = current_user.groups.new(group_params)\n\n if (@group.save)\n flash[:success] = \"Found a new group!\"\n else\n flash[:warning] = \"Could not create group\"\n end\n\n redirect_to @group\n end", "def create\n @group = @current_user.create_group(params[:group])\n # @group = @current_user.groups.build(params[:group])\n # @group = Group.new(params[:group])\n\n respond_to do |format|\n if @group.valid?\n format.html { redirect_to circle_groups_path, notice: 'Group was successfully created.' }\n format.json { render json: @group, status: :created, location: @group }\n else\n format.html { render action: \"new\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(group_params)\n @group.owner = current_user\n @group.users << current_user\n\n respond_to do |format|\n if @group.save\n membership = Membership.find_by_group_id_and_user_id(@group.id, current_user)\n membership.update_attributes :acceptance_status => true\n\n format.html { redirect_to group_path(@group), alert: 'Group was successfully created.' }\n format.json { render action: 'show', status: :created, location: @group }\n else\n format.html { render action: 'new' }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(new_group_params)\n if @group.save\n redirect_to groups_url\n else\n render :template => \"groups/new\"\n end\n end", "def create\n @group = Group.new(group_params)\n @group.admin_id = current_user.id\n respond_to do |format|\n if @group.save\n\tUserGroup.create(admin: true, user_id: current_user.id, group_id: @group.id)\n\tflash[:success] = \"Group was successfully created!\"\n format.html { redirect_to @group }\n format.json { render action: 'show', status: :created, location: @group }\n else\n format.html { render action: 'new' }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = Group.new(group_params)\n unless @group.save\n render :new and return\n end\n msg = [\"Created group.\", rebuild_configs].join(\" \")\n redirect_to groups_url, notice: msg\n end", "def create\n\t\t@group = current_user.groups.create(group_params)\n\t\tif params[:users].present?\n\t\t\tparams[:users].each do |user_id|\n\t\t\t\[email protected]_groups.create(user_id: user_id)\n\t\t\tend\n\t\tend\n\t\tredirect_to :groups, notice: 'Group created successfully'\n\tend", "def create\n @group = @group_model.new(group_params)\n respond_to do |format|\n if @group.save\n @course.groups<< @group\n format.html { redirect_to course_group_path(@course,@group), notice: \"#{@group_model.to_s} was successfully created.\" }\n format.json { render :show, status: :created, location: @group }\n else\n format.html { render :new }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @s_group = SGroup.new(s_group_params)\n\n respond_to do |format|\n if @s_group.save\n format.html { redirect_to @s_group, notice: 'S group was successfully created.' }\n format.json { render :show, status: :created, location: @s_group }\n else\n format.html { render :new }\n format.json { render json: @s_group.errors, status: :unprocessable_entity }\n end\n end\n end", "def add_to_group\n ids = params[:ids]\n group = Group.find(params[:group_id])\n if ids.present?\n users = User.where(id: ids)\n users.each do |u|\n GroupUser.create(user: u, group: group, created_by: current_user)\n if current_user != u\n u.notify(\n \"#{current_user.to_s} has added you to user group: #{group.name}\",\n group, \n \"group\"\n )\n else\n group.admin.each do |admin|\n admin.notify(\n \"#{current_user.to_s} has requested to be added to the user group: #{group.name}\",\n group,\n \"group\"\n )\n end\n end\n end\n if users.count === 1 && users.first === current_user\n flash[:notice] = \"Request sent!\"\n else\n flash[:notice] = \"#{\"Invitation\".pluralize(users)} sent!\"\n end\n end\n respond_to do |format|\n format.json { render :json => {}, :status => :ok }\n format.html {\n if users\n flash.now[:success] = \"Group membership pending for #{users.map(&:username).join(', ')}.\"\n end \n redirect_to group_path(group) \n }\n end\n end", "def create\n @group = Group.new(params[:group])\n\n @group.group_locales = []\n\n params[:group_locales].each do |locale, values|\n @group.group_locales << GroupLocale.new(:locale => Locale.new(values), :group => @group)\n end\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to @group, notice: 'Group was successfully created.' }\n format.json { render json: @group, status: :created, location: @group }\n format.xml { render xml: @group, status: :created, location: @group }\n else\n format.html { render action: \"new\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n format.xml { render xml: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @group = SuperSimpleCms::Group.new(params[:group])\n\n respond_to do |format|\n if @group.save\n flash[:notice] = 'Group was successfully created.'\n format.html { redirect_to super_simple_group_path(@group) } \n format.js { redirect_to formatted_super_simple_group_path(@group, :js) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.js { render :action => \"new\", :layout=>false}\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n params[:group].delete(:domain) unless current_group.shapado_version.has_custom_domain?\n @group = Group.new\n if params[:group][:languages]\n params[:group][:languages].reject! { |lang| lang.blank? }\n end\n @group.safe_update(%w[languages name legend description default_tags subdomain logo forum enable_mathjax enable_latex custom_favicon language theme signup_type custom_css wysiwyg_editor], params[:group])\n\n @group.safe_update(%w[isolate domain private], params[:group]) if current_user.admin?\n\n @group.owner = current_user\n @group.state = \"active\"\n\n respond_to do |format|\n if @group.save\n @group.create_default_widgets\n\n Jobs::Images.async.generate_group_thumbnails(@group.id)\n @group.add_member(current_user, \"owner\")\n flash[:notice] = I18n.t(\"groups.create.flash_notice\")\n format.html { redirect_to(domain_url(:custom => @group.domain, :controller => \"admin/manage\", :action => \"properties\")) }\n format.json { render :json => @group.to_json, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create_group( group_name )\n check_user_pass\n # First we need to clean the group_name since jangosmtp only allows alphanumeric characters\n group_name.tr!('^A-Za-z0-9 ', '')\n options = {\n 'Username' => @username,\n 'Password' => @password,\n 'GroupName' => group_name\n }\n\n response = post_with_attempts( 'AddTransactionalGroup', options )\n if response != false\n new_group_id = Nokogiri::XML.parse(response.body).xpath(\"*\").first.content.split(\"\\n\")[2]\n end\n return new_group_id\n end", "def create\n @group = current_user.groups.build(:name => params[:group][:name])\n @group.set_members(params[:member])\n\n if @group.save\n redirect_to root_path\n else\n render 'new'\n end\n\n end", "def create\n if(params[:group][:name].nil?) or (params[:group][:name] == \"\")\n flash[:notice] = \"Group must have a name and description\"\n redirect_to new_group_path\n else\n \n #create a new group\n @group = Group.new(group_params)\n user = User.find(session[:user_id]) \n respond_to do |format|\n if @group.save\n #generate a code for the group\n o = [('a'..'z'), ('A'..'Z')].map(&:to_a).flatten\n new_code = (0...8).map { o[rand(o.length)] }.join\n @group.update(code: new_code)\n #after group is created add creator to group as leader\n Membership.create!(user_id: session[:user_id], group_id: @group.id, member_type: 'leader', username: user.username)\n format.html {redirect_to @group, notice: \"Group was successfully created.\"}\n format.json {render :show, status: :created, location: @group}\n else\n format.html {render :new, status: :unprocessable_entity}\n format.json {render json: @group.errors, status: :unprocessable_entity}\n end\n end\n end\n end", "def create\n @group = Group.new(group_params)\n member = Member.create\n @group.members << member\n current_user.members << member\n\n @group.owner = member\n\n respond_to do |format|\n if @group.save\n format.html { redirect_to @group, notice: 'Group was successfully created.' }\n format.json { render action: 'show', status: :created, location: @group }\n else\n format.html { render action: 'new' }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @pages_group = PagesGroup.new(pages_group_params)\n respond_to do |format|\n if @pages_group.save\n format.html { redirect_to @pages_group, notice: 'Pages group was successfully created.' }\n format.json { render :show, status: :created, location: @pages_group }\n else\n format.html { render :new }\n format.json { render json: @pages_group.errors, status: :unprocessable_entity }\n end\n end\n end" ]
[ "0.660965", "0.6602641", "0.6588023", "0.6588023", "0.6567241", "0.6546185", "0.6518592", "0.63975435", "0.6392506", "0.6383147", "0.63828105", "0.6363887", "0.6356866", "0.6355277", "0.6355277", "0.6355277", "0.6323264", "0.6296784", "0.6214893", "0.61911434", "0.6170261", "0.6165762", "0.61538535", "0.6148982", "0.61486745", "0.6147544", "0.61419165", "0.6129121", "0.6107307", "0.6095698", "0.60951096", "0.60940045", "0.60731614", "0.60720384", "0.6055251", "0.6055251", "0.6055251", "0.6055251", "0.6055251", "0.6055251", "0.6024726", "0.6022986", "0.60017073", "0.60017073", "0.60017073", "0.60017073", "0.60017073", "0.5999224", "0.5999193", "0.5992597", "0.5989645", "0.59895074", "0.59861255", "0.59847", "0.5975781", "0.5970683", "0.5962466", "0.5957395", "0.5943825", "0.59435093", "0.593904", "0.5937637", "0.5936947", "0.59266716", "0.59266263", "0.5921032", "0.5913539", "0.59114826", "0.59107834", "0.5910229", "0.5906941", "0.5906329", "0.5888946", "0.5884174", "0.58675593", "0.5851238", "0.584762", "0.58442557", "0.5831421", "0.5829952", "0.5829241", "0.5826764", "0.5825881", "0.58211225", "0.58188015", "0.58141994", "0.5804252", "0.57967293", "0.57964134", "0.5795734", "0.5791287", "0.57875603", "0.5786038", "0.57811576", "0.5769494", "0.57692444", "0.5760862", "0.5756153", "0.57552373" ]
0.60212445
42
GET /groups/1/edit GET /groups/1/edit.xml
def edit respond_to do |format| format.html format.xml { render :xml => @group.to_xml } end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def edit\n respond_to do |format|\n format.html\n format.xml { render :xml => @group_user.to_xml }\n end\n end", "def edit\n @group = Group.find(params[:id])\n end", "def edit\n @group = Group.find(params[:id])\n end", "def edit\n @group = Group.find_by_id params[:id]\n end", "def update\n \n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to(@group, :notice => 'Group was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n \n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n \n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(admin_groups_url) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find_by_param(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to(@group, :notice => 'Group was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n Group.rebuild! if nil.|Group.find(:first).rgt\n\t @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def edit\n @group = current_user.created_groups.find(params[:id])\n end", "def update\n @group = Group.find(params[:id])\n\n if @group.update_attributes(params[:group])\n flash[:notice] = t('flash_msg47')\n @groups = Group.all\n # format.html { redirect_to(@group) }\n # format.xml { head :ok }\n else\n # format.html { render :action => \"edit\" }\n # format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end", "def update\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = '{object} was successfully {action}.'[:object_action_notice, \"Group\"[], \"updated\"[]]\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to groups_path }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def edit\n @iogroup = Iogroup.find(params[:id])\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to( view_group_path(@group.label), :notice => 'Group was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def edit\n @user = User.find_by_id(params[:id])\n @user_groups = UserGroup.get_all_user_groups_in_org(params[:organization_id])\n render :edit\n end", "def edit\n @item_group= Vger::Resources::Suitability::ItemGroup.find(params[:id])\n respond_to do |format|\n format.html\n end\n end", "def edit\n @grupo = Grupo.find(params[:id])\n end", "def edit\n @group = LunchGroup.find_by_id params[:id]\n end", "def edit\n @user = User.find_by_id(params[:id])\n @user_groups = UserGroup.get_all_user_groups_in_org(params[:organization_id])\n render :edit_temp\n end", "def update\n if @group.update_attributes(params[:group])\n flash[:notice] = t(\"group.updated\")\n redirect_to list_groups_path(:page => params[:page])\n return\n end\n\n render :action => :edit\n end", "def edit\n @patients_groups = PatientsGroup.find(params[:id])\n @group = Group.find(@patients_groups.group_id)\n @patient = Patient.find(@patients_groups.patient_id)\n session[:return_to] = request.referer\n @title = \"Edit Patient to Group Relationship\"\n end", "def update\n @show_element=\"admin\"\n (l1id,id) = params[:org_id].split(',')\n @org = Org.find([l1id,id])\n @rolesgroups = @org.roles_groups.all(:order=>:blue_groups_name)\n \n respond_to do |format|\n if @org.update_attributes(params[:org])\n flash[:notice] = 'Roles Groups was successfully updated.'\n format.html{redirect_to(:action=>\"edit\",:id=>@org )}\n else\n flash[:notice] =\"There was a problem updating this record.\"\n format.html{ render :action => \"edit\" }\n end\n end\n \n end", "def edit_group(id, options)\n params = get_group(id)\n params.update(options)\n post(EDIT_GROUP_URI, params)\n end", "def edit_group(id, options)\n params = get_group(id)\n params.update(options)\n post(EDIT_GROUP_URI, params)\n end", "def update\n @grupo = Grupo.find(params[:id])\n\n respond_to do |format|\n if @grupo.update_attributes(params[:grupo])\n flash[:notice] = \"Los datos del Grupo #{@grupo.nombre} se han actualizado.\"\n format.html { redirect_to(@grupo) }\n format.xml { head :ok }\n else\n flash[:error] = \"Hubo un error actualizando el grupo.\"\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @grupo.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @add_to_group = AddToGroup.find(params[:id])\n\n respond_to do |format|\n if @add_to_group.update_attributes(params[:add_to_group])\n format.html { redirect_to(@add_to_group, :notice => 'Add to group was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @add_to_group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group.to_xml }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group.to_xml }\n end\n end", "def edit\n @show_element=\"admin\"\n (l1id, id) = params[:id].split(',')\n @org = Org.find(params[:id])\n @rolesgroups = @org.roles_groups.all(:order=>:blue_groups_name)\n \n @rolesgroups.each do |group|\n @org.roles_groups.build\n end\n #@rolesgroup = RolesGroup.new\n end", "def update #:nodoc:\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = I18n.t(\"{{value}} was successfully updated.\", :default => \"{{value}} was successfully updated.\", :value => I18n.t(\"Group\", :default => \"Group\"))\n format.html { redirect_to groups_url }\n # format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n # format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @attribute_group = AttributeGroup.find(params[:id])\n\n respond_to do |format|\n if @attribute_group.update_attributes(params[:attribute_group])\n format.html { redirect_to(@attribute_group, :notice => 'Attribute group was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @attribute_group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def edit\n\t\trespond_to do |format|\n\t\t\tformat.html # edit.html.erb\n\t\t\tformat.xml { render :xml => @permission }\n\t\tend\n end", "def update\n @group = WorkGroup.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def show\n @add_to_group = AddToGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @add_to_group }\n end\n end", "def update\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to((current_user and current_user.is_site_admin? and current_user != @group.users.owners.first) ? by_user_groups_path(:user_id => @group.users.owners.first.id) : groups_path) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n if @group.update_attributes(params[:group])\n respond_with(@group, only: [:id, :name, :creator_id, :admin_id])\n else\n render_error(404, request.path, 20103, \"Failed to update group info\")\n end\n end", "def update\n @esol_group = EsolGroup.find(params[:id])\n\n respond_to do |format|\n if @esol_group.update_attributes(params[:esol_group])\n format.html { redirect_to @esol_group, notice: 'Esol group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @esol_group.errors, status: :unprocessable_entity }\n end\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def update\n @group = Group.find(params[:id])\n authorize @group, :update?\n respond_to do |format|\n if @group.update_attributes(permitted_params)\n format.html { redirect_to(@group, :notice => 'Group was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def show\n\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def update\n @user_group = UserGroup.find(params[:id])\n\n respond_to do |format|\n if @user_group.update_attributes(params[:user_group])\n flash[:notice] = \"User group <em>#{@user_group.name}</em> updated.\"\n format.html { redirect_to(@user_group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @user_group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def edit2\n @show_element=\"admin\"\n @action=\"update2\"\n @rolesgroup = RolesGroup.find(params[:id])\n @role = Role.find_by_role_name(@rolesgroup.role_name)\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to @group, :notice => 'Group was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to @group, :notice => 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def edit #$$~used\n puts \"xxxxxxx NodesC a:edit xxxxxxx\"\n @node = Node.find(params[:id])\n end", "def edit\n\n @student = Student.find(params[:id])\n\n respond_to do |format|\n\n format.html\n format.xml {render :xml => @student}\n\n end #end do\n\n end", "def show\n @group = Group.find(params[:id]) #Sicherheitsrisiko ohne @user.groups.find ?\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:success] = \"Группа успешно отредактирована.\"\n format.html { redirect_to @group }\n format.json { head :no_content }\n else\n flash.now[:error] = \"Введены некорректные данные!\"\n format.html { render action: \"edit\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @ail_group = AilGroup.find(params[:id])\n\n respond_to do |format|\n if @ail_group.update_attributes(params[:ail_group])\n flash[:notice] = 'AilGroup was successfully updated.'\n format.html { redirect_to(@ail_group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @ail_group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @expensegroup = Expensegroup.find(params[:id])\n\n respond_to do |format|\n if @expensegroup.update_attributes(params[:expensegroup])\n flash[:notice] = 'Expensegroup was successfully updated.'\n format.html { redirect_to(@expensegroup) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @expensegroup.errors, :status => :unprocessable_entity }\n end\n end\n end", "def show\n @user_group = UserGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @user_group }\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @group = SuperSimpleCms::Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to super_simple_group_path(@group) } \n format.js { redirect_to formatted_super_simple_group_path(@group, :js) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.js { render :action => \"new\", :layout=>false}\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def edit\n render partial: \"user_groups/form\",\n locals: { user_group: @user_group }\n end", "def edit\n\n end", "def update\n @contact_group = ContactGroup.find(params[:id])\n\n respond_to do |format|\n if @contact_group.update_attributes(params[:contact_group])\n format.html { redirect_to(@contact_group, :notice => 'Contact group was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @contact_group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if !current_user || (!current_user.is_admin && [email protected]?(current_user))\n format.html { redirect_to(@group, :notice => 'No permissions to edit group.')}\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n\n elsif @group.update_attributes(params[:group])\n format.html { redirect_to(@group, :notice => 'Group was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group_of_task = GroupOfTask.find(params[:id])\n\n respond_to do |format|\n if @group_of_task.update_attributes(params[:group_of_task])\n flash[:notice] = 'GroupOfTask was successfully updated.'\n format.html { redirect_to(@group_of_task) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group_of_task.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @lab_group = LabGroup.find(params[:id])\n\n respond_to do |format|\n if @lab_group.update_attributes(params[:lab_group])\n flash[:notice] = 'LabGroup was successfully updated.'\n format.html { redirect_to(@lab_group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @lab_group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n\t\t@group = Group.find(params[:id])\n\t\[email protected]_accessor(current_user)\n\t\trespond_to do |format|\n\t\t\tif @group.update_attributes(params[:group])\n\t\t\t\tflash[:notice] = t(:ctrl_object_updated, :typeobj => t(:ctrl_group), :ident => @group.name)\n\t\t\t\tformat.html { redirect_to(@group) }\n\t\t\t\tformat.xml { head :ok }\n\t\t\telse\n\t\t\t\tflash[:error] = t(:ctrl_object_not_updated, :typeobj => t(:ctrl_group), :ident => @group.name)\n\t\t\t\tformat.html { render :action => \"edit\" }\n\t\t\t\tformat.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n\t\t\tend\n\t\tend\n\tend", "def update\n @group = GROUP.first_or_get!(params[:id])\n @group.current_user = current_user\n\n @group.update_children((params[:group] || {}).delete(:locales), :locale)\n\n respond_to do |format|\n if @group.update(params[:group]) or not @group.dirty?\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(group_url(@group.id)) }\n format.xml { render :xml => @group }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to params[:back_to], notice: 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def edit\r\n end", "def update\n redirect_to :action => :index and return unless is_owner?\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @group }\n format.xml { render xml: @group }\n end\n end", "def update\n @provider_group = ProviderGroup.find(params[:id])\n\n respond_to do |format|\n if @provider_group.update_attributes(params[:provider_group])\n flash[:notice] = 'ProviderGroup was successfully updated.'\n format.html { redirect_to(@provider_group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @provider_group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n if @group.update_attributes(params[:group])\n flash[:notice] = \"Grupo actualizado.\"\n redirect_to groups_path\n else\n render :action => 'edit'\n end\n end", "def update\n @routinetoobjectgroup = Routinetoobjectgroup.find(params[:id])\n\n respond_to do |format|\n if @routinetoobjectgroup.update_attributes(params[:routinetoobjectgroup])\n flash[:notice] = 'Routinetoobjectgroup was successfully updated.'\n format.html { redirect_to(@routinetoobjectgroup) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @routinetoobjectgroup.errors, :status => :unprocessable_entity }\n end\n end\n end", "def edit \n\n @review_group = ReviewGroup.find(params[:id])\n\n end", "def update\n @record_group = RecordGroup.find(params[:id])\n @record_group.accessible = :all if admin?\n respond_to do |format|\n if @record_group.update_attributes(params[:record_group])\n flash[:notice] = 'RecordGroup was successfully updated.'\n format.html { redirect_to(@record_group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @record_group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def edit\r\n \r\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Le groupe a été modifié.' }\n format.json { render :show, status: :ok, location: @group }\n else\n format.html { render :edit }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def edit_groups=(groups)\n set_edit_groups(groups, edit_groups)\n end", "def update\n @group = Group.find(by_id)\n if @group.update_attributes(group_params)\n flash[:success] = \"Group updated\"\n redirect_to @group\n else\n render 'edit'\n end\n end", "def show\n Group.rebuild! if nil.|Group.find(:first).rgt\n\t #this won't work - it won't find children groups\n\t @group = Group.find_by_id(params[:id])\n\t @group = nil unless current_user.can_access_group?(@group)\n respond_to do |format|\n if @group\n format.html # show.html.erb\n format.xml { render :xml => @group }\n else\n flash[:notice] = 'Group invalid or you do not have access to this group.'\n format.html { redirect_to groups_path}\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group_user = @parent.find(params[:id]) #get the record\n\n respond_to do |format|\n if @group_user.update_attributes(params[:group_user])\n format.html { redirect_to(@group, :notice => 'Group user was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group_user.errors, :status => :unprocessable_entity }\n end\n end\n end", "def edit\n\n end", "def update\n\t\t@group = Group.find(params[:id])\n\t\[email protected]_accessor(current_user)\n\t\trespond_to do |format|\n\t\t\tif @group.update_attributes(params[:group])\n\t\t\t\tflash[:notice] = t(:ctrl_object_updated, :typeobj => t(:ctrl_group), :ident => @group.name)\n\t\t\t\tshow_\n\t\t\t\tformat.html { render :action => \"show\" }\n\t\t\t\tformat.xml { head :ok }\n\t\t\telse\n\t\t\t\tflash[:error] = t(:ctrl_object_not_updated, :typeobj => t(:ctrl_group), :ident => @group.name, :error => @group.errors.full_messages)\n\t\t\t\tformat.html { render :action => \"edit\" }\n\t\t\t\tformat.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n\t\t\tend\n\t\tend\n\tend", "def edit\n end", "def edit\n end", "def edit\n end", "def edit\n end" ]
[ "0.7694557", "0.7006799", "0.69988525", "0.6979855", "0.6634475", "0.6613017", "0.6613017", "0.66038346", "0.66038346", "0.66038346", "0.66038346", "0.66038346", "0.66038346", "0.6591789", "0.65787894", "0.6555638", "0.65346366", "0.6505652", "0.6492212", "0.6462138", "0.6460182", "0.64474607", "0.64373493", "0.6412601", "0.6359052", "0.6348409", "0.6332467", "0.6311929", "0.6294865", "0.6267722", "0.62664235", "0.6263281", "0.6263281", "0.62620294", "0.6232334", "0.6224042", "0.6224042", "0.6186554", "0.6180548", "0.61734974", "0.6169087", "0.6165977", "0.61631733", "0.61589026", "0.6156488", "0.6139902", "0.6132752", "0.6132752", "0.6132752", "0.6132752", "0.6132752", "0.6131728", "0.612046", "0.6076097", "0.6069929", "0.60693145", "0.6067571", "0.6067456", "0.6063346", "0.60563153", "0.6052862", "0.6030646", "0.60292053", "0.60292053", "0.6028419", "0.60221046", "0.6021374", "0.6021374", "0.6021374", "0.60093665", "0.599167", "0.59808064", "0.59806204", "0.59793293", "0.5975338", "0.59567", "0.59556776", "0.5953781", "0.59478277", "0.5942651", "0.5924986", "0.5922401", "0.59223104", "0.59222436", "0.5917976", "0.59012955", "0.5897806", "0.5887822", "0.58862823", "0.58855957", "0.5881063", "0.5876749", "0.58763033", "0.5870339", "0.58643353", "0.58543044", "0.58543044", "0.58543044", "0.58543044" ]
0.7953539
0
PUT /groups/1 PUT /groups/1.xml
def update respond_to do |format| if @group.update_attributes(params[:group]) flash[:notice] = 'Group was successfully updated.' format.html { redirect_to groups_path } format.xml { head :ok } else format.html { render :action => "edit" } format.xml { render :xml => @group.errors, :status => :unprocessable_entity } end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update\n logger.info \"Put parameters: #{params.to_json}\"\n @group = Group.find(params[:id])\n\n if @group.update_attributes(params[:group])\n head :no_content\n else\n render json: @group.errors, status: :unprocessable_entity\n end\n end", "def update\n \n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to(@group, :notice => 'Group was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n \n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def UpdateGroup params = {}\n \n APICall(path: 'groups.json',method: 'PUT',payload: params.to_json)\n \n end", "def update\n @group = Group.find(params[:id])\n \n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(admin_groups_url) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = '{object} was successfully {action}.'[:object_action_notice, \"Group\"[], \"updated\"[]]\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def put(obj, which=:groups)\n path = \"/#{which}\"\n path += \"/#{obj['ID']}\" unless obj['ID'].nil? \n resp = self.class.post(path, { :body => obj })\n check_errors resp\n res = resp.parsed_response['Response']['Entry']\n rebuild_groups! res\n res\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to( view_group_path(@group.label), :notice => 'Group was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find_by_param(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to(@group, :notice => 'Group was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n if @group.update_attributes(params[:group])\n flash[:notice] = t('flash_msg47')\n @groups = Group.all\n # format.html { redirect_to(@group) }\n # format.xml { head :ok }\n else\n # format.html { render :action => \"edit\" }\n # format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end", "def update\n Group.rebuild! if nil.|Group.find(:first).rgt\n\t @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def addGroupToServer(group)\n path = \"/user/\" + @@conf[\"username\"] + \"/group/\" + group.strip\n res = HttpRequest.new(:put, path).send(@@host)\n puts res\n puts \"CODE: \" + res.code\n\nend", "def update\n @group.update(group_params)\n respond_with(@group)\n end", "def update\n if @group.update_attributes(params[:group])\n respond_with(@group, only: [:id, :name, :creator_id, :admin_id])\n else\n render_error(404, request.path, 20103, \"Failed to update group info\")\n end\n end", "def update\n @group.name = params['name']\n @group.save\n respond_to do |format|\n format.json { render :show, status: :ok, location: @group }\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to @group, :notice => 'Group was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @user_group = UserGroup.find(params[:id])\n\n respond_to do |format|\n if @user_group.update_attributes(params[:user_group])\n flash[:notice] = \"User group <em>#{@user_group.name}</em> updated.\"\n format.html { redirect_to(@user_group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @user_group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to @group, :notice => 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = SuperSimpleCms::Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to super_simple_group_path(@group) } \n format.js { redirect_to formatted_super_simple_group_path(@group, :js) }\n format.xml { render :xml => @group, :status => :created, :location => @group }\n else\n format.html { render :action => \"new\" }\n format.js { render :action => \"new\", :layout=>false}\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @add_to_group = AddToGroup.find(params[:id])\n\n respond_to do |format|\n if @add_to_group.update_attributes(params[:add_to_group])\n format.html { redirect_to(@add_to_group, :notice => 'Add to group was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @add_to_group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n group = Group.find(params[:id])\n if group.update(group_params)\n render json: group\n else\n render json: group.errors.full_messages, status: :unprocessable_entity\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update #:nodoc:\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = I18n.t(\"{{value}} was successfully updated.\", :default => \"{{value}} was successfully updated.\", :value => I18n.t(\"Group\", :default => \"Group\"))\n format.html { redirect_to groups_url }\n # format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n # format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def test_should_update_group_user_via_API_JSON\r\n # lookup user's membership\r\n get \"memberships/find.json?api_key=testapikey&user_id=4&group_id=1\"\r\n membership = JSON.parse(response.body)\r\n membership_id = membership['id']\r\n assert membership_id == 3, 'Incorrect membership id'\r\n assert membership['role_id'] == Role.find_by_rolename('user').id, 'Incorrect role id'\r\n \r\n # promote user to group admin\r\n put \"/memberships/#{membership_id}.xml\", :api_key => 'testapikey',\r\n :membership => {:user_id => 4,\r\n :group_id => 1,\r\n :role_id => Role.find_by_rolename('group_admin') }\r\n assert_response :success\r\n end", "def update\n respond_to do |format|\n if @api_v1_group_update.update(api_v1_group_update_params)\n format.html { redirect_to @api_v1_group_update, notice: 'Group update was successfully updated.' }\n format.json { render :show, status: :ok, location: @api_v1_group_update }\n else\n format.html { render :edit }\n format.json { render json: @api_v1_group_update.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Le groupe a été modifié.' }\n format.json { render :show, status: :ok, location: @group }\n else\n format.html { render :edit }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n\n if @group.update(group_params)\n head :no_content\n else\n render json: @group.errors, status: :unprocessable_entity\n end\n end", "def update\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to((current_user and current_user.is_site_admin? and current_user != @group.users.owners.first) ? by_user_groups_path(:user_id => @group.users.owners.first.id) : groups_path) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n if request.post?\n if @group.update_attributes(params[:group])\n redirect_to :action => 'list'\n else\n render :action => 'rename'\n end\n end\n end", "def update\n @attribute_group = AttributeGroup.find(params[:id])\n\n respond_to do |format|\n if @attribute_group.update_attributes(params[:attribute_group])\n format.html { redirect_to(@attribute_group, :notice => 'Attribute group was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @attribute_group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Группа обновлена!' }\n format.json { render :show, status: :ok, location: @group }\n else\n format.html { render :edit }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update_group(id, params = {})\n put(\"/groups/#{id}\", params)\n end", "def update\n @group = WorkGroup.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n invite_members\n format.html { redirect_to @group, notice: t('flash.notice.groups.successfully_updated') }\n format.json { render :show, status: :ok, location: @group }\n else\n format.html { render :edit }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @grupo = Grupo.find(params[:id])\n\n respond_to do |format|\n if @grupo.update_attributes(params[:grupo])\n flash[:notice] = \"Los datos del Grupo #{@grupo.nombre} se han actualizado.\"\n format.html { redirect_to(@grupo) }\n format.xml { head :ok }\n else\n flash[:error] = \"Hubo un error actualizando el grupo.\"\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @grupo.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n authorize @group, :update?\n respond_to do |format|\n if @group.update_attributes(permitted_params)\n format.html { redirect_to(@group, :notice => 'Group was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n @group.name = params[:group][:name]\n @group.set_members(params[:member])\n\n if @group.save\n redirect_to root_path\n else\n render 'edit'\n end\n end", "def update_group(group_id, attributes)\n put(\"/v1/groups/#{group_id}\", attributes)\n end", "def generate(groups)\n groups_params = groups.inject({}) do |params, (k, v)|\n params[\"groups[#{k}]\"] = 1\n params\n end\n\n response = RouteNGN.put self.class.base_url, {:id => self.id}.merge!(groups_params)\n response.success?\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to params[:back_to], notice: 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @objectgrouptoobjectgroup = Objectgrouptoobjectgroup.find(params[:id])\n\n respond_to do |format|\n if @objectgrouptoobjectgroup.update_attributes(params[:objectgrouptoobjectgroup])\n flash[:notice] = 'Objectgrouptoobjectgroup was successfully updated.'\n format.html { redirect_to(@objectgrouptoobjectgroup) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @objectgrouptoobjectgroup.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.json { head :no_content }\n else\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update_group(id, params)\n put(\"groups/#{id}\", group: params)\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:success] = \"Группа успешно отредактирована.\"\n format.html { redirect_to @group }\n format.json { head :no_content }\n else\n flash.now[:error] = \"Введены некорректные данные!\"\n format.html { render action: \"edit\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(params[:group])\n format.html { redirect_to [@hub, @group], :notice => 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update_nodegroup(nodegroup_json, nodegroup_id)\n nodemgr_rest_call(\"POST\", \"classifier\", \"groups\", $credentials, id=nodegroup_id, nodegroup_json)\nend", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { render :show, status: :ok, location: @group }\n else\n format.html { render :edit }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { render :show, status: :ok, location: @group }\n else\n format.html { render :edit }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { render :show, status: :ok, location: @group }\n else\n format.html { render :edit }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { render :show, status: :ok, location: @group }\n else\n format.html { render :edit }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { render :show, status: :ok, location: @group }\n else\n format.html { render :edit }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { render :show, status: :ok, location: @group }\n else\n format.html { render :edit }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { render :show, status: :ok, location: @group }\n else\n format.html { render :edit }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { render :show, status: :ok, location: @group }\n else\n format.html { render :edit }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { render :show, status: :ok, location: @group }\n else\n format.html { render :edit }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { render :show, status: :ok, location: @group }\n else\n format.html { render :edit }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { render :show, status: :ok, location: @group }\n else\n format.html { render :edit }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { render :show, status: :ok, location: @group }\n else\n format.html { render :edit }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @esol_group = EsolGroup.find(params[:id])\n\n respond_to do |format|\n if @esol_group.update_attributes(params[:esol_group])\n format.html { redirect_to @esol_group, notice: 'Esol group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @esol_group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n @users = @group.users\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to edit_user_registration_path, notice: 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n audit(@group, \"update\", @group.name)\n format.html { redirect_to group_path(@group), notice: 'Group was successfully updated.' }\n format.json { render :show, status: :ok, location: @group }\n else\n format.html { render :edit }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n\n respond_to do |format|\n if @group.update_attributes(group_params)\n format.html { redirect_to @group.becomes(Group), notice: 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n if @group.update_attributes(params[:group])\n flash[:notice] = \"Grupo actualizado.\"\n redirect_to groups_path\n else\n render :action => 'edit'\n end\n end", "def update\n authorize! :update, @group\n @group.creator = current_user\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n flash[:notice] = t('flash.notice.group_update') # 'Group was successfully updated.'\n format.any(:html,:iphone) { redirect_to(@group) }\n format.xml { head :ok }\n else\n format.any(:html,:iphone) { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to [@group], notice: 'group was successfully updated.' }\n format.json { render :show, status: :ok, location: [@group] }\n else\n format.html { render :edit }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find_by_guid(params[:id])\n respond_to do |format|\n if @group.update_attributes(update_params)\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { render json: @group }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: I18n.t(:group_update) }\n format.json { render :show, status: :ok, location: @group }\n else\n flash[:alert] = @group.errors.full_messages.to_sentence\n format.html { render :edit }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n\t\t@group = Group.find(params[:id])\n\t\[email protected]_accessor(current_user)\n\t\trespond_to do |format|\n\t\t\tif @group.update_attributes(params[:group])\n\t\t\t\tflash[:notice] = t(:ctrl_object_updated, :typeobj => t(:ctrl_group), :ident => @group.name)\n\t\t\t\tformat.html { redirect_to(@group) }\n\t\t\t\tformat.xml { head :ok }\n\t\t\telse\n\t\t\t\tflash[:error] = t(:ctrl_object_not_updated, :typeobj => t(:ctrl_group), :ident => @group.name)\n\t\t\t\tformat.html { render :action => \"edit\" }\n\t\t\t\tformat.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n\t\t\tend\n\t\tend\n\tend", "def update\n authorize @group\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to group_path(@group), notice: \"Group was successfully updated.\" }\n format.json { render :show, status: :ok, location: @group }\n else\n format.html { render :edit }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @routinetoobjectgroup = Routinetoobjectgroup.find(params[:id])\n\n respond_to do |format|\n if @routinetoobjectgroup.update_attributes(params[:routinetoobjectgroup])\n flash[:notice] = 'Routinetoobjectgroup was successfully updated.'\n format.html { redirect_to(@routinetoobjectgroup) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @routinetoobjectgroup.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n\t\tparams[:group][:member_ids] = (params[:group][:member_ids] << @group.member_ids).flatten\n\t\t#special method update_attribute only updates explicitly stated attribute\n\t\tif @group.update_attributes(params[:group])\n\t\t\tredirect_to @group\n\t\t\tflash[:success] = \"group updated\"\n\t\tend\n\tend", "def edit\n respond_to do |format|\n format.html\n format.xml { render :xml => @group.to_xml }\n end\n end", "def edit\n respond_to do |format|\n format.html\n format.xml { render :xml => @group.to_xml }\n end\n end", "def update\n @group_of_task = GroupOfTask.find(params[:id])\n\n respond_to do |format|\n if @group_of_task.update_attributes(params[:group_of_task])\n flash[:notice] = 'GroupOfTask was successfully updated.'\n format.html { redirect_to(@group_of_task) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group_of_task.errors, :status => :unprocessable_entity }\n end\n end\n end", "def set_api_v1_group_update\n @api_v1_group_update = Api::V1::GroupUpdate.find(params[:id])\n end", "def update\n @group = GROUP.first_or_get!(params[:id])\n @group.current_user = current_user\n\n @group.update_children((params[:group] || {}).delete(:locales), :locale)\n\n respond_to do |format|\n if @group.update(params[:group]) or not @group.dirty?\n flash[:notice] = 'Group was successfully updated.'\n format.html { redirect_to(group_url(@group.id)) }\n format.xml { render :xml => @group }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @group.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n params[:grouping].delete :parent_id if (@grouping.root? or (params[:grouping][:parent_id].to_i == params[:id].to_i))\n \n respond_to do |format|\n if @grouping.update_attributes(params[:grouping])\n gflash :success => \"Group updated.\"\n format.html { redirect_to edit_grouping_path(@grouping) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @grouping.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n redirect_to :action => :index and return unless is_owner?\n respond_to do |format|\n if @group.update(group_params)\n format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @group = Group.find(params[:id])\n params[:group][:member_ids] = (params[:group][:member_ids] << @group.member_ids).flatten\n\n respond_to do |format|\n if @group.update_attributes(params[:group])\n format.html { redirect_to root_path }\n format.js\n # format.html { redirect_to @group, notice: 'Group was successfully updated.' }\n # format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @user_group = UserGroup.find(params[:id])\n\n respond_to do |format|\n if @user_group.update_attributes(params[:user_group])\n format.html { redirect_to @user_group, notice: 'User group was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @user_group.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @user_group = UserGroup.find(params[:id])\n\n respond_to do |format|\n if @user_group.update_attributes(params[:user_group])\n format.html { redirect_to @user_group, notice: 'User group was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @user_group.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @api_v1_group_update = Api::V1::GroupUpdate.new(api_v1_group_update_params)\n\n respond_to do |format|\n if @api_v1_group_update.save\n format.html { redirect_to @api_v1_group_update, notice: 'Group update was successfully created.' }\n format.json { render :show, status: :created, location: @api_v1_group_update }\n else\n format.html { render :new }\n format.json { render json: @api_v1_group_update.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @user = User.get!(params[:id])\n set_groups(@user, ids_to_groups(params[:groups]))\n \n respond_to do |format|\n if @user.update_attributes(params[:user]) or not @user.dirty?\n flash[:notice] = 'User was successfully updated.'\n format.html { redirect_to(user_url(@user.id)) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @user.errors, :status => :unprocessable_entity }\n end\n end\n end" ]
[ "0.6685341", "0.66725194", "0.66642857", "0.66642857", "0.664555", "0.6640031", "0.6640031", "0.6640031", "0.6640031", "0.6640031", "0.6640031", "0.6574576", "0.65679026", "0.6565279", "0.6562684", "0.654085", "0.6539202", "0.65080595", "0.6467505", "0.6426014", "0.6406329", "0.63854784", "0.63630456", "0.6327648", "0.6317496", "0.63127583", "0.630189", "0.6292167", "0.62910557", "0.62910557", "0.6288207", "0.6280016", "0.6280016", "0.6280016", "0.6277831", "0.62667733", "0.62587786", "0.62565166", "0.6216031", "0.6215447", "0.62073094", "0.62034154", "0.61934716", "0.61810184", "0.6180011", "0.6162974", "0.6145865", "0.6131345", "0.61169547", "0.6116627", "0.6109276", "0.61077535", "0.61057776", "0.61008286", "0.6093779", "0.6085668", "0.60707325", "0.60612494", "0.6056241", "0.6056241", "0.6056241", "0.6056241", "0.6056241", "0.6056241", "0.6056241", "0.6056241", "0.6056241", "0.6056241", "0.6056241", "0.6056241", "0.60561574", "0.60542125", "0.60542125", "0.60542125", "0.60542125", "0.6048727", "0.6035825", "0.6032815", "0.6031189", "0.6027062", "0.60254246", "0.60234284", "0.601099", "0.5996366", "0.59760416", "0.5968964", "0.5968598", "0.596485", "0.5964043", "0.5964043", "0.59621656", "0.5960418", "0.5960405", "0.59485", "0.5945214", "0.59318787", "0.59292614", "0.5924251", "0.5917732", "0.5914038" ]
0.6516348
17
DELETE /groups/1 DELETE /groups/1.xml
def destroy @group.destroy unless @group.default respond_to do |format| format.html { redirect_to(groups_url) } format.xml { head :ok } end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def destroy\n Group.rebuild! if nil.|Group.find(:first).rgt\n\t @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to(groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n Group.destroy(params[:id])\n\n respond_to do |format|\n format.html { redirect_to(groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to(groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to(groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n \n respond_to do |format|\n format.html { redirect_to(groups_url) }\n format.xml { head :ok }\n end\n end", "def test_set3_04b_delete_group()\n group = \"test_group\"\n user = \"test_user\"\n \n @test_acl.create_principal(user)\n @test_acl.create_group(group, [\"ALL\"], [user])\n \n query = \"doc(\\\"#{@col_path}Principals.xml\\\")//node()[@id=\\\"#{user}\\\"]/membership/mgroup[@idref=\\\"#{group}\\\"]\"\n #puts query\n handle = @db.execute_query(query)\n hits = @db.get_hits(handle)\n assert_equal(1, hits)\n \n @test_acl.delete_principal(group)\n \n query = \"doc(\\\"#{@col_path}Principals.xml\\\")//node()[@id=\\\"#{user}\\\"]/membership/mgroup[@idref=\\\"#{group}\\\"]\"\n handle = @db.execute_query(query)\n hits = @db.get_hits(handle)\n assert_equal(0, hits)\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to(groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to(groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to(groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to(groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to(groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to(groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to(groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to(groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to(admin_groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @add_to_group = AddToGroup.find(params[:id])\n @add_to_group.destroy\n\n respond_to do |format|\n format.html { redirect_to(add_to_groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @attribute_group = AttributeGroup.find(params[:id])\n @attribute_group.destroy\n\n respond_to do |format|\n format.html { redirect_to(attribute_groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n authorize @group, :destroy?\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to(browse_groups_url) }\n format.xml { head :ok }\n end\n end", "def deleteGroup( group_id)\n params = Hash.new\n params['group_id'] = group_id\n return doCurl(\"delete\",\"/group\",params)\n end", "def destroy\n @ail_group = AilGroup.find(params[:id])\n @ail_group.destroy\n\n respond_to do |format|\n format.html { redirect_to(ail_groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy #:nodoc:\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to groups_url }\n # format.xml { head :ok }\n end\n end", "def destroy\n @user_group = UserGroup.find(params[:id])\n @user_group.destroy\n\n respond_to do |format|\n format.html { redirect_to(user_groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n\n\t\t@group = Group.find(params[:id])\n\t\[email protected]\n\n\t\trespond_to do |format|\n\t\t\tformat.html { redirect_to(groups_url) }\n\t\t\tformat.xml { head :ok }\n\t\tend\n\tend", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to groups_url }\n format.json { head :ok }\n format.xml { head :ok }\n end\n end", "def DeleteGroup id\n \n APICall(path: \"groups/#{id}.json\",method: 'DELETE')\n \n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n head :no_content\n end", "def destroy\n group = Group.find(params[:group_id])\n\n @subgroup = group.subgroups.find(params[:id])\n @subgroup.destroy\n\n respond_to do |format|\n format.html { redirect_to(group_subgroups_url) }\n format.xml { head :ok }\n end\n end", "def delete_group(group)\n\t\t\tend", "def delete_group(group)\n\t\t\tend", "def del(id, which=:groups)\n resp = self.class.delete(\"/#{which}/#{id}\")\n check_errors resp\n end", "def destroy\n @group_of_task = GroupOfTask.find(params[:id])\n @group_of_task.destroy\n\n respond_to do |format|\n format.html { redirect_to(group_of_tasks_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @grupo = Grupo.find(params[:id])\n @grupo.destroy\n\n respond_to do |format|\n format.html { redirect_to(grupos_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group = GROUP.first_or_get(params[:id])\n @group.current_user = current_user\n @group.destroy if @group\n\n respond_to do |format|\n flash[:notice] = 'Group was successfully deleted.'\n format.html { redirect_to(groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @grouping.destroy\n\n respond_to do |format|\n format.html { redirect_to(groupings_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group.destroy\n\n head :no_content\n end", "def destroy\n @record_group = RecordGroup.find(params[:id])\n @record_group.destroy\n\n respond_to do |format|\n format.html { redirect_to(record_groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n\n mbs = Membership.where \"group_id = ?\", @group.id\n\n mbs.each do |m|\n m.destroy\n end\n\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to groups_url }\n format.json { head :ok }\n end\n end", "def destroy\n\n @user_group = UserGroup.find(params[:id])\n @user_group.destroy\n\n respond_to do |format|\n format.html do\n if request.env['HTTP_REFERER']\n redirect_to :back\n else\n redirect_to :controller => :user_groups, :action => :index\n end\n end\n format.xml do\n head :ok\n end\n end\n end", "def destroy\n GroupPermission.destroy(params[:id])\n\n respond_to do |format|\n format.html { redirect_to(group_permissions_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @contact_group = ContactGroup.find(params[:id])\n @contact_group.destroy\n\n respond_to do |format|\n format.html { redirect_to(admin_contact_groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group = WorkGroup.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to(groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @contact_group = ContactGroup.find(params[:id])\n @contact_group.destroy\n\n respond_to do |format|\n format.html { redirect_to(contact_groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @lab_group = LabGroup.find(params[:id])\n @lab_group.destroy\n\n respond_to do |format|\n format.html { redirect_to(lab_groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to groups_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to groups_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to groups_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to groups_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to groups_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to groups_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @group = Group.find_by_guid(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to groups_url }\n format.json { head :no_content }\n end\n end", "def deleteEntityGroup( entity_id, gen_id)\n params = Hash.new\n params['entity_id'] = entity_id\n params['gen_id'] = gen_id\n return doCurl(\"delete\",\"/entity/group\",params)\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to groups_url }\n format.json { head :ok }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to groups_url }\n format.json { head :ok }\n end\n end", "def destroy\n @fgroup = Fgroup.find(params[:id])\n @fgroup.destroy\n\n respond_to do |format|\n format.html { redirect_to(fgroups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @targetgroup = Targetgroup.find(params[:id])\n @targetgroup.destroy\n\n respond_to do |format|\n format.html { redirect_to(targetgroups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n redirect_to groups_path\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n redirect_to groups_path\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.any(:html,:iphone) { redirect_to(groups_url) }\n format.xml { head :ok }\n end\n end", "def delete_group(client, options)\n if options[:directory].nil? or options[:group].nil?\n puts \"Missing arguments\"\n return\n end\n\n groups = client.groups\n group = groups.get options[:group]\n group.delete\n puts \"Group deleted.\"\n return\nend", "def destroy\n @jido_grp_rel = JidoGrpRel.find(params[:id])\n @jido_grp_rel.destroy\n\n respond_to do |format|\n format.html { redirect_to(jido_grp_rels_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group_activity = GroupActivity.find(params[:id])\n @group_activity.destroy\n\n respond_to do |format|\n format.html { redirect_to(group_activities_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @expensegroup = Expensegroup.find(params[:id])\n @expensegroup.destroy\n\n respond_to do |format|\n format.html { redirect_to(expensegroups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @groups = Group.find(params[:id])\n @groups.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to groups_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to groups_url }\n format.json { head :no_content }\n end\n end", "def destroy\n\n @group_definition = GroupDefinition.find(params[:id])\n @group_definition.destroy\n\n respond_to do |format|\n format.html do\n if request.env['HTTP_REFERER']\n redirect_to(:back)\n else\n redirect_to(group_definitions_url)\n end\n end\n format.xml do\n head :ok\n end\n end\n end", "def destroy\n #@s_group.destroy\n @s_group = SGroup.find(params[:id].to_i)\n @s_group.delete_flg = 1\n @s_group.save!\n\n respond_to do |format|\n format.html { redirect_to s_groups_url, notice: 'S group was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n # @group = @hub.groups.get(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to hub_groups_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @group.destroy\n respond_to do |format|\n format.html { redirect_to groups_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @group.destroy\n respond_to do |format|\n format.html { redirect_to groups_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @group.destroy\n respond_to do |format|\n format.html { redirect_to groups_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @group.destroy\n respond_to do |format|\n format.html { redirect_to groups_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @group.destroy\n respond_to do |format|\n format.html { redirect_to groups_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to(groups_url) }\n format.xml { head :ok }\n format.js { render :nothing => true }\n end\n end", "def destroy\n @routinetoobjectgroup = Routinetoobjectgroup.find(params[:id])\n @routinetoobjectgroup.destroy\n\n respond_to do |format|\n format.html { redirect_to(routinetoobjectgroups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n group = @event.group\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to group }\n format.xml { head :ok }\n end\n end", "def destroy\n @objectgrouptoobjectgroup = Objectgrouptoobjectgroup.find(params[:id])\n @objectgrouptoobjectgroup.destroy\n\n respond_to do |format|\n format.html { redirect_to(objectgrouptoobjectgroups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @esol_group = EsolGroup.find(params[:id])\n @esol_group.destroy\n\n respond_to do |format|\n format.html { redirect_to esol_groups_url }\n format.json { head :no_content }\n end\n end", "def delete_group \n Group.destroy(params[:id])\n\n respond_to do |format|\n format.html {redirect_to dashboard_path}\n end\n end", "def delete_group\r\n if request.post?\r\n @group=Group.find_by_id(params[:id], :conditions=>['account_id = ?',session[:account_id]])\r\n if @group.nil?\r\n flash[:error] = \"Invalid action.\"\r\n else\r\n flash[:success]= \"Group \" + @group.name + \" was deleted successfully \"\r\n @group.destroy\r\n @group_devices = Device.find(:all, :conditions=>['group_id=?',@group.id])\r\n for device in @group_devices\r\n device.icon_id =\"1\"\r\n device.group_id = nil\r\n device.save\r\n end\r\n end\r\n end\r\n redirect_to :action=>\"groups\"\r\n end", "def destroy\n @target_group = TargetGroup.find(params[:id])\n @target_group.destroy\n\n respond_to do |format|\n format.html { redirect_to(admin_target_groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @transaction_group = TransactionGroup.find(params[:id])\n @transaction_group.destroy\n\n respond_to do |format|\n format.html { redirect_to(transaction_groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group = Group.find_by_slug_or_id(params[:id])\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to(groups_url) }\n format.json { head :ok }\n end\n end", "def destroy\n @image_gallery_group = ImageGalleryGroup.find(params[:id])\n @image_gallery_group.destroy\n\n respond_to do |format|\n format.html { redirect_to( admin_image_gallery_groups_url ) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n @group.destroy\n\n respond_to do |format|\n flash[:success] = \"Группа успешно удалена.\"\n format.html { redirect_to groups_url }\n format.json { head :no_content }\n end\n end", "def destroy\n Group.delete_groups_and_acls([id])\n end", "def delete(group)\n url = build_url(group)\n response = rest_delete(url)\n response.return!\n end", "def destroy\n group = Group.find(params[:id])\n group.destroy\n render json: {}\n end", "def group_delete(attribs, dir_info)\n attribs = group_record_name_alternatives(attribs)\n\n check_critical_attribute( attribs, :record_name )\n\n command = {action: 'delete', scope: 'Groups', attribute: nil, value: nil}\n user_attrs = attribs.merge(command)\n\n dscl( user_attrs, dir_info )\n end", "def destroy\n @group = SuperSimpleCms::Group.find(params[:id])\n respond_to do |format| \n if SuperSimpleCms::Group.find(:all).length > 1\n @group.destroy \n format.html { redirect_to(super_simple_groups_url) }\n format.js { head :ok }\n format.xml { head :ok }\n else\n format.html { redirect_to(super_simple_groups_url) }\n format.js { head :failure }\n format.xml { head :failure }\n end\n end\n end", "def destroy\n @outgoing_sms_content_group = OutgoingSmsContentGroup.find(params[:id])\n @outgoing_sms_content_group.destroy\n\n respond_to do |format|\n format.html { redirect_to(outgoing_sms_content_groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group.destroy\n respond_to do |format|\n format.html { redirect_to root_path, notice: 'Le groupe a été supprimé.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @giving_group = GivingGroup.find(params[:id])\n @giving_group.destroy\n\n respond_to do |format|\n format.html { redirect_to(giving_groups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group = @authorized_group\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to group_users_path(@group) }\n format.xml { head :ok }\n end\n end", "def destroy\n @group = Group.find(params[:id])\n\t\t@pages = Page.where(:category => 'group', :owner => @group.id)\n\t\[email protected] do |p|\n\t\t\tp.destroy\n\t\tend\n @group.destroy\n\n respond_to do |format|\n format.html { redirect_to groups_url }\n format.json { head :ok }\n end\n end", "def service_group_delete(a10, name)\n a10.send(:axapi, 'slb.service_group.delete', 'post', {name: name, format: 'json'})\nend", "def destroy\n @forum_group = ForumGroup.find(params[:id])\n @forum_group.destroy\n\n respond_to do |format|\n format.html { redirect_to forum_groups_url }\n format.xml { head :ok }\n end\n end", "def destroy\n @api_v1_group_update.destroy\n respond_to do |format|\n format.html { redirect_to api_v1_group_updates_url, notice: 'Group update was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @group.destroy\n respond_to do |format|\n format.html { redirect_to admin_groups_url, notice: t('activerecord.models.group') +'删除成功!' }\n format.json { head :no_content }\n end\n end", "def destroy\n begin\n group = Group.find(params[:id])\n group.destroy\n render json: { \"notice\"=>\"group deleted successfully\" }\n rescue ActiveRecord::RecordNotFound\n render json: { \"alert\"=>\"did not specify a valid id. no record deleted.\" }\n end\n end" ]
[ "0.7521902", "0.74687797", "0.74159586", "0.74159586", "0.7407865", "0.73870426", "0.738377", "0.738377", "0.738377", "0.738377", "0.738377", "0.738377", "0.738377", "0.738377", "0.73704463", "0.72693354", "0.7252211", "0.72371393", "0.71676433", "0.7165356", "0.7137911", "0.7119386", "0.7109941", "0.70823926", "0.7052093", "0.70249873", "0.70184875", "0.69859457", "0.69859457", "0.69661504", "0.69652516", "0.6954662", "0.694713", "0.6946572", "0.69286007", "0.69218946", "0.69023937", "0.6896861", "0.6879919", "0.686702", "0.68474114", "0.68417305", "0.68280894", "0.68171406", "0.68171406", "0.68171406", "0.68171406", "0.68171406", "0.68171406", "0.68135226", "0.680875", "0.68085915", "0.68085915", "0.6804624", "0.6802455", "0.68015444", "0.68015444", "0.6800033", "0.6797567", "0.6780067", "0.6777921", "0.67771596", "0.6773811", "0.67720604", "0.67720604", "0.6769902", "0.6760096", "0.67595655", "0.6758469", "0.6758469", "0.6758469", "0.6758469", "0.6758469", "0.67562836", "0.67419356", "0.67418295", "0.6739622", "0.6731823", "0.6730053", "0.67291164", "0.6728051", "0.67249596", "0.6722271", "0.67183", "0.6715347", "0.67143536", "0.67143154", "0.66992515", "0.66988796", "0.6697257", "0.66952074", "0.6693825", "0.6690623", "0.66862327", "0.6685305", "0.66814846", "0.66787124", "0.66668814", "0.66628736", "0.6635879" ]
0.7185538
18
GET /groups/1/join GET /groups/1/join.xml
def join @group = Group.find(params[:id]) if group_user = GroupUser.first(:conditions => ['group_id = ? AND user_id = ?', @group.id, current_user.id]) if ['cancelled', 'invite_declined'].include?(group_user.status) group_user.request! flash[:notice] = 'Your membership request was sent to group owner.' elsif group_user.status == 'approved' flash[:error] = 'You are already member of this group.' else flash[:error] = 'You cannot join this group.' end else group_user = GroupUser.create(:group_id => @group.id, :user_id => current_user.id, :role_id => Role.find_by_name('User').id) group_user.request! flash[:notice] = 'Your membership request was sent to group owner.' end respond_to do |format| format.html { redirect_to(groups_url) } format.xml { head :ok } end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def join\n @group = Group.find_by_id(params[:id])\n if @group\n @user.join_group(@group)\n flash[:success] = \"Join It Successed\"\n redirect_to :action => \"reports\", :id => @group\n else\n flash[:warning] = \"Not Exist Group to Join\"\n end\n end", "def join\n @person = Person.find(params[:person_id])\n @group = Group.find(params[:id])\n begin\n @person.join_group(@group.id, session[:cookie])\n flash[:notice] = [ :you_have_joined_to_group, @group.title(session[:cookie]) ]\n rescue RestClient::RequestFailed => e\n flash[:error] = message_from_error(e)\n end\n redirect_to group_path(params[:id])\n end", "def joinGroup\n id = params[:id]\n line = \"https://gtcollab.herokuapp.com/api/groups/\" + id + \"/join/\"\n \n require \"net/http\"\n require \"uri\"\n\n parsed_url = URI.parse(line)\n\n http = Net::HTTP.new(parsed_url.host, parsed_url.port)\n http.use_ssl = true\n\n req = Net::HTTP::Post.new(parsed_url.request_uri)\n\n req.add_field(\"authorization\", $token)\n\n response = http.request(req)\n response.inspect\n\n redirect_to course_path(params[:course_id], :name => params[:name], :joined => params[:joined])\n end", "def join_group(id)\n\t\t\t#id: ID of the group to join\n\t\t\toptions = {\"id\" => id}\n\t\t\tdata = oauth_request(\"/group/join\", options, \"post\")\n\t\tend", "def join_group\n @assignment = Assignment.find(params[:id])\n @grouping = Grouping.find(params[:grouping_id])\n @user = Student.find(session[:uid])\n @user.join(@grouping.id)\n m_logger = MarkusLogger.instance\n m_logger.log(\"Student '#{@user.user_name}' joined group '#{@grouping.group.group_name}'\" +\n '(accepted invitation).')\n redirect_to action: 'student_interface', id: params[:id]\n end", "def join\n @group = Group.find(params[:id])\n\n if !current_user.is_member_of?(@group)\n current_user.join!(@group)\n flash[:notice] = \"Successfully joined the group!\"\n else\n flash[:warning] = \"You are a member of this group!\"\n end\n\n redirect_to group_path(@group)\n end", "def join\n if request.post?\n if [email protected]?(current_user)\n @group.users << current_user\n flash[:success] = \"Thanks! Your membership must now be approved by an admin of #{@group.name}.\"\n\n #SEND OUT NOTIFICATION EMAILS\n @group.admins.each do |admin|\n Notifier.pending_user(admin, @group, current_user).deliver\n end\n\n redirect_to @group\n else\n flash[:error] = \"Error requesting to join #{@group.name}. Please try again.\"\n redirect_to @group\n end\n else #it fell back to GET (no js)\n flash[:error] = \"Please enable javascript to join.\"\n redirect_to @group\n end\n end", "def student_join_grp(course, group)\n load_course_grps course\n logger.info \"Joining group '#{group.title}'\"\n wait_for_update_and_click link_element(xpath: \"//a[contains(@aria-label,'Join group #{group.title}')]\")\n list_item_element(xpath: '//li[contains(.,\"Joined Group\")]').when_present Utils.short_wait\n end", "def join_request\n \n end", "def group_join!(group)\n affiliations.create!( :group_id => group.id )\n end", "def index\n @groups = WorkGroup.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @groups }\n end\n end", "def index\n\n @groups = Group.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @groups }\n end\n end", "def join_group\n @disc_group = DiscussionGroup.find(params[:id])\n @join = DiscussionGroupUser.new(:discussion_group_id => @disc_group.id, :user_id => @login_user.id, :is_member => 1)\n if @join.save\n respond_to do |format|\n format.js\n end\n else\n render :text=>\"Fail\"\n end\n end", "def index\n @jido_grp_rels = JidoGrpRel.find(:all)\n if params[:sort]==\"grp\"\n @jido_grp_rels = JidoGrpRel.find(:all, :order => :group_id )\n end\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @jido_grp_rels }\n end\n end", "def index\n groups = get_collection(assignment.groups) || return\n\n group_data = include_memberships(groups)\n\n respond_to do |format|\n format.xml do\n render xml: group_data.to_xml(root: 'groups', skip_types: 'true')\n end\n format.json { render json: group_data }\n end\n end", "def join\n network_invite = current_organization.network_invitations.find_by_network_id(params[:id], :include => :network)\n if network_invite.nil? then\n flash[:notice] = \"You must get an invite before joining that network.\"\n redirect_to networks_path\n else\n network_invite.accept!\n flash[:notice] = \"You have joined the network!\"\n redirect_to network_path(network_invite.network)\n end\n end", "def index\n\n @user_groups = UserGroup.find(:all)\n\n respond_to do |format|\n format.html { redirect_to :controller => :groups, :action => :index }\n format.xml { render :xml => @user_groups }\n end\n end", "def show\n @jido_grp_rel = JidoGrpRel.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @jido_grp_rel }\n end\n end", "def index\n respond_to do |format|\n format.html { @groups = Group.get_groups(current_user, params) }\n format.xml { render :xml => Group.get_groups(params.merge({:show => 'all'})) }\n end\n end", "def index\n respond_to do |format|\n format.html { @groups = Group.get_groups(current_user, params) }\n format.xml { render :xml => Group.get_groups(params.merge({:show => 'all'})) }\n end\n end", "def join_old\n setup_semester_filter\n setup_campuses_filter\n setup_groups\n @join = true\n\n respond_to do |format|\n format.html\n end\n end", "def index\n @groups = Group.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @groups }\n end\n end", "def index\n @groups = Group.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @groups }\n end\n end", "def index\n @giving_groups = GivingGroup.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @giving_groups }\n end\n end", "def show\n @targetgroup = Targetgroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @targetgroup }\n end\n end", "def index\n @group = Group.find(params[:id])\n @members = GroupUser.where(:group_id => params[:id])\n end", "def index\n @forum_groups = ForumGroup.find(:all)\n\n respond_to do |format|\n format.html # index.rhtml\n format.xml { render :xml => @forum_groups.to_xml }\n end\n end", "def join\n circle_id = params[:circle][:circle_id]\n circle_url = File.join(SITE_URL, \"circles/#{circle_id}\")\n\n joined = CircleUser.where(circle_id: circle_id, user_id: current_user.id).first_or_initialize\n\n respond_to do |format|\n if not joined.persisted?\n joined.save\n Circle.update_counters(params[:circle][:circle_id], people_count: 1)\n User.update_counters(current_user.id, circle_count: 1)\n mod = 1\n else\n joined.delete\n Circle.update_counters(params[:circle][:circle_id], people_count: -1)\n User.update_counters(current_user.id, circle_count: -1)\n mod = -1\n end\n format.json { render json: {'status' => mod, 'circle_url' => circle_url}, status: :created }\n end\n end", "def index\n\n @groups = Group.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @groups }\n format.json { render :json => @groups }\n end\n end", "def relation_method\n :join\n end", "def join\n unless @user && @kroog && @kroog.open? && current_actor.can_follow?\n flash[:warning] = \"Error: The specified circle does not exist or cannot be joined\".t\n respond_to do |format|\n format.html do\n redirect_to(:controller => 'kroogi', :action => 'join_circles', :id => params[:id])\n end\n format.js {render :update do |page|; page << 'document.location.reload(false);'; end}\n end\n return\n end\n\n # Add the relationship directly -- no invite intermediary\n Relationship.create_kroogi_relationship(:followed => @user.id, :follower => current_actor.id, :relationshiptype_id => @kroog.relationshiptype_id, :expires_at => Time.end, :skip_activity_message => true)\n\n flash[:success] = \"You have joined the %s circle of %s\" / [@user.circle_name(params[:circle].to_i), @user.login]\n PublicQuestionHelper::set_question_artist_id(@user, self, :force_show => true)\n respond_to do |wants|\n wants.html do\n redirect_to user_url_for(@user)\n end\n wants.js {}\n end\n end", "def show\n @add_to_group = AddToGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @add_to_group }\n end\n end", "def group(xml)\n Nokogiri::XSLT(File.read('assets/xsl/group.xsl')).transform(\n Nokogiri::XSLT(File.read('assets/xsl/join.xsl')).transform(xml)\n )\n end", "def get_membership(group)\n @uri = URI.parse(\"#{@api_url}/group/#{group}/member\")\n body = make_get_request\n doc = Nokogiri::HTML(body)\n members = []\n doc.xpath('//a[@class=\"member\"]').each do |m|\n members << m.text\n end\n members\n end", "def index\n @group_users = @parent.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @group_users }\n end\n end", "def show\n group = Group.find(params[:group_id])\n\n @subgroup = group.subgroups.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n\n format.xml { render :xml => @subgroup }\n end\n end", "def index\n @links = {}\n @link_group_opts.each do |lo|\n links = Link.find_all_by_group(lo)\n @links[lo] = links\n end\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @links }\n end\n end", "def index\n @groups = Group.all\n @title = \"Groups\"\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @groups }\n end\n end", "def test_should_join_a_group_as_user_via_API_JSON\r\n get \"/logout\"\r\n post \"/memberships.json\", :api_key => 'testapikey',\r\n :group_id => 1,\r\n :user_id => 3\r\n assert_response :created\r\n membership = JSON.parse(response.body)\r\n assert membership['user_id'] == 3, 'Incorrect user id'\r\n assert membership['group_id'] == 1, 'Incorrect group id'\r\n assert membership['role_id'].to_i == Role.find_by_rolename('user').id, 'Incorrect role id' \r\n end", "def new\n @group = Group.new\n # @group_user = @current_user.join_group(@group)\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @group_user }\n end\n end", "def show\n @target_group = TargetGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @target_group }\n end\n end", "def join(host, port, only_join=false)\n puts \"Join request is received from (#{host}, #{port})\"\n tmp_remote_nodes = @calendar_network.remote_nodes.dup\n \n # add given node to the list\n @calendar_network.remote_nodes |= [Node.new(host, port)]\n\n # change the calendar_network to online mode if it is not\n @calendar_network.offline_mode = false\n\n if only_join\n # this means requester does not need list of (host, port)\n # it just asks remote node to add given (host, port) pair to their list\n puts \"Joining is done. \" + @calendar_network.remote_nodes.inspect\n return true\n else\n puts \"Informing other remote nodes...\"\n # inform other online nodes\n tmp_remote_nodes.each do |remote_node|\n\tputs \"Informing #{remote_node.to_s}\"\n remote_server = xml_rpc_client(remote_node.address, @calendar_network.config[\"path\"], remote_node.port)\n remote_server.call(\"calendar_network.join\", host, port, true)\n end\n\n puts \"Joining is done. \" + @calendar_network.remote_nodes.inspect\n return tmp_remote_nodes.collect{|node| [node.address, node.port]}\n end\n rescue Exception => e\n puts e.message\n return false\n end", "def index\n @groups = Group.where('owner_id' => current_user.id)\n @memberships = Membership.all\n respond_with(@groups, @memberships)\n end", "def fetch_group\n @group = Group.find(params[:group_id])\n end", "def get_associations(unit_id, group_id)\n debug_msg \"Getting association list for group #{group_id} on node #{unit_id}\"\n self.send_cmd [\n Constants::Framing::PKT_START,\n Constants::FunctionClass::SEND_DATA,\n unit_id,\n 3, # length of command (class, command, one argument)\n Constants::CommandClass::ASSOCIATION,\n Constants::Command::Association::GET,\n group_id,\n 0x25, # from reversing, no idea what this is\n next_callback_id\n ]\n p read_response(true) # this one is somehow tied to the orginal request, it has the callback ID in it\n p read_response(true)\n end", "def members\n @group = Group.find(params[:group_id])\n @members = @group.users\n end", "def show\n @group_list = GroupList.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group_list }\n end\n end", "def group_info\n group_id_param = params[:group_id]\n\n if group_id_param.nil? || group_id_param.blank?\n render json: { error: 'group_id not specified.' }, status: :bad_request\n return\n end\n\n group = CanvasSpaces.GroupCategory\n .groups\n .where('groups.id = ?', group_id_param)\n .eager_load(:users)\n .first\n if group.nil?\n render json: { error: 'No such group found.' }, status: :not_found\n else\n maillist = get_maillist_for_space(group.id)\n render json: { id: group.id,\n name: group.name,\n description: group.description,\n maillist: maillist,\n leader_id: group.leader_id,\n created_at: group.created_at,\n join_type: display_join_type(group.join_level),\n size: group.users.count\n },\n status: :ok\n end\n end", "def show\n @group = Group.find(params[:id])\n @members = @group.member\n @project = @group.project\n @events = @group.event\n \n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group.to_xml }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group.to_xml }\n end\n end", "def link\n @title = \"Group Connections\"\n if request.get?\n render :link\n elsif request.xhr? #REQUEST LINK\n response_text = {:flash => {}, :text => {}}\n @group2 = Group.find(params[:group][:id])\n if @group.connect(@group2)\n response_text[:flash] = \"Pending\"\n\n #SEND OUT NOTIFICATION EMAILS\n @group2.admins.each do |admin|\n Notifier.pending_group(admin, @group2, @group).deliver\n end\n else\n response_text[:flash] = \"Error.\"\n end\n render :json => response_text\n return\n elsif request.post?\n @group2 = Group.find(params[:group][:id])\n if @group.connect(@group2)\n flash[:success] = \"Thanks. #{@group2.name} must now approve the connection.\"\n\n #SEND OUT NOTIFICATION EMAILS\n @group2.admins.each do |admin|\n Notifier.pending_group(admin, @group2, @group).deliver\n end\n else\n flash[:error] = \"Error connecting with #{@group2.name}. Please try again.\"\n end\n redirect_to @group\n else\n flash[:error] = \"Error connecting with #{@group2.name}. Please try again.\"\n redirect_to @group\n end\n end", "def join\n if logged_in?\n @group_wanted = Group.find(params[:id]) # later change to params[:name]\n @country_wanted = Country.find(@group_wanted.country_id, :limit => 1)\n #match = current_user.countries.find(@country_wanted.id)\n #unless current_user.has_country?(@country_wanted.id) #countries.find(@country_wanted.id)\n #@matches = current_user.countries\n @match = Country.find_by_sql([\"SELECT * FROM countries_users WHERE user_id = ? AND country_id = ?\", current_user.id, @country_wanted.id])\n if @match.empty? #current_user.countries.find(@country_wanted.id).nil? #current_user.has_country?(@country_wanted.id) \n flash[:notice] = \"Joined!\"\n current_user.groups << @group_wanted # add group\n current_user.countries << @country_wanted # add country\n redirect_to '/perfil'\n else\n flash[:error] = \"You're either already subscribed to a group in the same country or in the same group.\"\n redirect_to :action => 'index'\n end \n end \n end", "def show\n @group = WorkGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def index\n @groups = Group.paginate :page => (params[:page]||1), :order => 'name ASC', :per_page => 10\n \n @self_member_groups = Group.find(User.find(current_user).group_members.map(&:group_id)) \n @self_created_groups = User.find(current_user).groups\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @groups }\n end\n end", "def new\n id = params[:format]\n line = \"https://gtcollab.herokuapp.com/api/groups/\" + id + \"/join/\"\n \n #puts line\n #puts $token\n\n require \"net/http\"\n require \"uri\"\n\n parsed_url = URI.parse(line)\n\n http = Net::HTTP.new(parsed_url.host, parsed_url.port)\n http.use_ssl = true\n\n req = Net::HTTP::Post.new(parsed_url.request_uri)\n\n req.add_field(\"authorization\", $token)\n\n response = http.request(req)\n response.inspect\n\n #puts response.body\n redirect_to courses_path\n end", "def show\n @group = Group.find(params[:id])\n @meetups = Meetup.where(\"group_id = ?\", @group.id)\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @group }\n end\n end", "def index\n authorize Group\n render :json => @group.group_memberships\n end", "def index\n @group = Group.find params[:group_id]\n \n @concierts = @group.concierts\n end", "def index\n @group = Group.find(params[:group_id])\n @surveys = @group.surveys\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @surveys }\n end\n end", "def index\n if params[:group_id]\n @members = Group.find(params[:group_id]).members\n else\n @members = Member.all\n end\n end", "def index\n @user_groups = UserGroup.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @user_groups }\n end\n end", "def join_url(username, role, key=nil, options={})\n server = BigbluebuttonRails.configuration.select_server.call(self, :join_meeting_url)\n\n pass = case role\n when :moderator\n self.moderator_api_password\n when :attendee\n self.attendee_api_password\n when :guest\n if BigbluebuttonRails.configuration.guest_support\n options = { guest: true }.merge(options)\n end\n self.attendee_api_password\n else\n map_key_to_internal_password(key)\n end\n\n r = server.api.join_meeting_url(self.meetingid, username, pass, options)\n r.strip! unless r.nil?\n r\n end", "def is_in_group_url\n returnGroup = []\n group = query_root_node(\"fao:isInGroup/@rdf:resource\", @@NAMESPACES)\n if !group.nil?\n group.each do |entry|\n returnGroup << entry.to_s\n end\n end\n returnGroup\n end", "def test_should_join_a_group_as_admin_via_API_JSON\r\n get \"/logout\"\r\n post \"/memberships.json\", :api_key => 'testapikey',\r\n :group_id => 1,\r\n :user_id => 1\r\n assert_response :created\r\n membership = JSON.parse(response.body)\r\n assert membership['user_id'] == 1, 'Incorrect user id'\r\n assert membership['group_id'] == 1, 'Incorrect group id'\r\n assert membership['role_id'].to_i == Role.find_by_rolename('group_admin').id, 'Incorrect role id' \r\n end", "def show\n get_target_groups\n end", "def group_ids_by_name\n reversed = assignment.groups.pluck(:group_name, :id).to_h\n respond_to do |format|\n format.xml do\n render xml: reversed.to_xml(root: 'groups', skip_types: 'true')\n end\n format.json do\n render json: reversed.to_json\n end\n end\n end", "def index\n\t#Once sessions are implemented, return all groups where the user has a priveledge\n\t#A table including all subgroups will be generated.\n\tGroup.rebuild! if nil.|Group.find(:first).rgt\n\n\t@groups = current_user.get_unique_group_branches\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @groups }\n end\n end", "def index\r\n return jump_to(\"/groups/all\") unless has_login?\r\n \r\n account_id = session[:account_id]\r\n \r\n account_setting = AccountSetting.get_account_setting(account_id)\r\n module_group_index = account_setting.get_setting_value(:module_group_index)\r\n \r\n url = case module_group_index\r\n when \"all\"\r\n \"all\"\r\n when \"recent\"\r\n \"recent/#{account_id}\"\r\n when \"join\"\r\n \"list/#{account_id}\"\r\n when \"admin\"\r\n \"list_admin/#{account_id}\"\r\n else\r\n \"recent/#{account_id}\"\r\n end\r\n jump_to(\"/groups/#{url}\")\r\n end", "def groups\n \n \n @groups = @current_user.groups\n render 'groups.json.jbuilder', status: :ok\n end", "def show\n @group = Group.find(params[:id]) #Sicherheitsrisiko ohne @user.groups.find ?\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def do_join\n flash.keep\n begin\n # Use group invitation\n group = Group.find_by(invitation_code: params[:group][:invitation_code])\n # Fall back to normal user invitation\n if group.blank?\n invitation = GroupInvitation.get_by_invitation_code(\n params[:group][:invitation_code]\n )\n invitation.use_invitation\n group = invitation.group\n end\n group.join_group(current_user, 'member')\n flash[:notice] = 'Group joined!'\n redirect_to(action: 'show', id: group.id) && return\n rescue\n flash[:error] = 'The provided invitation code has already been used.'\n redirect_to(action: 'join') && return\n end\n end", "def invite\n @group = Group.find(params[:id]) || not_found\n @group_invitation = GroupInvitation.new\n @invite_link = \"#{request.protocol}#{request.host_with_port}\"\\\n \"#{new_user_registration_path}/group/#{@group.invitation_code}\"\n end", "def index\n @group_id = params[:group_id]\n logger.info \"the group id is : #{@group_id}\"\n if @group_id \n @topics = Topic.where(:group_id => params[:group_id])\n @group = Group.find(@group_id)\n else\n @topics = Topic.all\n end\n #logger.info \"the topics has #{@topics}\"\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @topics }\n end\n end", "def index\n groupname = params[:groupname]\n @groups = Group.all\n @users = User.all\n end", "def join\n end", "def show\n @group = Group.includes(:users).find(params[:id])\n @group_member = GroupMember.new\n @group_member.group = @group\n\n if params[:from_reading_list]\n @reading_list = ReadingList.find(params[:from_reading_list])\n else\n @reading_list = nil\n end\n\n respond_to do |format|\n if @group.owner == current_user\n format.html\n format.json { render json: @group }\n else\n msg = \"You are not authorized to access group #{params[:id]}.\"\n format.html { redirect_to groups_url, alert: msg }\n format.json { render json: msg }\n end\n end\n end", "def list_group\n RequestListGroup.where(:request_id => id).last\n end", "def join\n payload = { \"id\" => id }.to_json\n data = client.post \"#{api_prefix}/user/#{client.user.id}/rooms\", payload\n\n self\n end", "def show\n @group = Group.find(params[:id])\n\n @list_group_subgroups = @group.group_subgroups.paginate :page => params[:subgroups_page], :per_page => 10\n @list_group_members = @group.group_members.paginate :page => params[:members_page], :per_page => 10\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def li__my_groups(access_keys)\n access_token = OAuth::AccessToken.new(get_linkedin_consumer(3), access_keys[:access_token], access_keys[:access_secret])\n \n \n # -- X. WITH 'posts' -> to be used as part of automatically determining the location of the group (for targeted groups)\n # json_groups = _get_linkedin(access_token, \"people/~/group-memberships:(group:(id,name,num-members,short-description,small-logo-url,large-logo-url,website-url,site-group-url,posts,counts-by-category,locale,location),membership-state)?format=json&start=0&count=1000\")\n \n \n # -- X. WITHOUT 'posts'\n json_groups = _get_linkedin(access_token, \"people/~/group-memberships:(group:(id,name,num-members,short-description,small-logo-url,large-logo-url,website-url,site-group-url,counts-by-category,locale,location),membership-state)?format=json&start=0&count=1000\")\n \n \n # response = access_token.get(\"http://api.linkedin.com/v1/people/~/group-memberships:(group:(id,name,num-members,small-logo-url),membership-state,show-group-logo-in-profile,allow-messages-from-members,email-digest-frequency,email-announcements-from-managers,email-for-every-new-post)?format=json&start=0&count=1000\")\n # response = access_token.get(\"http://api.linkedin.com/v1/groups::(5049608,5112233,5161898):(id,name,site-group-url,posts)?format=json\")\n \n json_groups\n end", "def show\n\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def index\n @people = Person.all\n @group = Group.find(params[:group_id]) \n # render :layout => 'admin'\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @people }\n end\n end", "def index\n #@groupings = Grouping.all\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @groupings }\n end\n end", "def join\n @room = OnlineMeetingRoom.find_by_id(params[:id])\n role = @room.user_role(current_user)\n unless role == :denied\n join_internal(current_user.full_name, role, :join)\n else\n flash[:notice] = \"#{t('access_denied')}\"\n redirect_to :index and return\n end\n end", "def show\n @join_section = JoinSection.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @join_section }\n end\n end", "def index\n id = -1 \n if params[:id] \n id = params[:id]\n else \n id = current_user.id \n end \n\n @user = User.find(id)\n @group = UserGroup.find_by(user_id: id, name: params[:group])\n\n if [email protected]?\n @group_members = get_members(@user, @group)\n end \n\n end", "def new\n @jido_grp_rel = JidoGrpRel.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @jido_grp_rel }\n end\n end", "def get_group\n send_request(FUNCTION_GET_GROUP, [], '', 4, 'k4')\n end", "def get_groups(params)\n send_get \"get_groups\", params\n end", "def show_activities\n @group = Group.find(params[:id])\n end", "def index\n @group = @authorized_group\n @candidates = @group.candidates\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @candidates }\n end\n end", "def index\n @all_groups = Group.joins(:users).where(\"memberships.user_id = ? and memberships.acceptance_status = ?\" , current_user, true)\n end", "def resumes_joining\n\t @resumes = ReqMatch.where(:status => \"JOINING\")\n\t\trender \"resumes/joining\"\n\tend", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end", "def show\n @group = Group.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @group }\n end\n end" ]
[ "0.6410519", "0.63028866", "0.6271348", "0.61650723", "0.61116976", "0.5949017", "0.5712206", "0.5645062", "0.56180453", "0.5455591", "0.54521614", "0.5450662", "0.5440575", "0.5428224", "0.5402525", "0.53564525", "0.5349451", "0.5332109", "0.53220916", "0.53220916", "0.53058", "0.5286577", "0.5286577", "0.5246094", "0.5242165", "0.5235075", "0.5210438", "0.52093965", "0.5199436", "0.5191961", "0.5187102", "0.5185614", "0.517942", "0.51758885", "0.5171015", "0.5161332", "0.51594603", "0.5158954", "0.515693", "0.51517856", "0.5149573", "0.5145024", "0.51414657", "0.5132222", "0.5130216", "0.51255894", "0.51251924", "0.51198053", "0.5109618", "0.51065624", "0.51065624", "0.5101311", "0.509489", "0.5073047", "0.506968", "0.5065457", "0.505948", "0.5058316", "0.50427973", "0.5039191", "0.5029215", "0.50282675", "0.50202686", "0.50127727", "0.5012012", "0.50104743", "0.50078857", "0.5005405", "0.5000785", "0.49979085", "0.49978602", "0.49930024", "0.49690014", "0.49664593", "0.49646077", "0.4964041", "0.49582273", "0.49578986", "0.49562332", "0.49523297", "0.4949906", "0.494939", "0.49472153", "0.49471983", "0.49458078", "0.49430245", "0.49380314", "0.493751", "0.49350536", "0.49319804", "0.49298412", "0.49260303", "0.49223852", "0.49192423", "0.49186763", "0.49186763", "0.49186763", "0.49186763", "0.49186763" ]
0.63111734
1
The view contains a more detailed account_setup form (step 2 signup process).
def account_setup # GET 'admins#account_setup' authenticate_admin if @current_admin == nil redirect_to root_path end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def account_complete # POST '/admin/account_setup'\n @admin = Admin.find(session[:admin_id])\n\n # This section of the site is built for new admin accounts.\n # Update the admin attributes with more detailed information from the\n # account_setup (step 2) form, && update the their first store which was\n # instantiated at the account_create action.\n if @admin.update(admin_params) && @admin.stores.first.update(store_params)\n ## Sign-up is successful and admin dashboard is loaded.\n redirect_to admin_path\n else\n render :account_setup\n end\n end", "def account_for\n wizard_step(STEPS) { { setup_step: :setup_step, next_step: :account_for_next_step } }\n end", "def account_details\n wizard_step(STEPS) { { next_step: :account_details_next_step } }\n end", "def user_details\n wizard_step(STEPS) { { after_merge: :save_account } }\n end", "def account_details_next_step\n return STEPS if AccountType.individual?(@account.account_type)\n\n accounts_registration_user_details_path\n end", "def setup_step\n @post_path = wizard_post_path\n @account = wizard_load || Account.new\n end", "def sign_up\n self.sign_up_link\n CreateNewAccount.new @browser\n end", "def submit_signup_details_without_password\n user = Users.signup_user_without_password\n enter_signup_details(user)\n # @pages.page_home.signup_register_button\n end", "def index\n @user = current_user\n render 'signup/step2'\n end", "def signup\n end", "def signup\n end", "def signup_info\n end", "def second_step\n\t\trender :layout => 'signup' \n\t\tend", "def new\n session[:signup_params] ||= {}\n @user = User.new(session[:signup_params])\n @user.signup_current_step = session[:signup_step]\t\n# @company = @user.build_company\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @user }\n end\n end", "def signup\n @teacher = Teacher.new\n render 'signup', :layout => false\n end", "def setup \n \t@user = User.create(email: '[email protected]', password: '123456789', \n \t\tpassword_confirmation: '123456789')\n \[email protected][\"devise.mapping\"] = Devise.mappings[:user]\n \t@user1 = users(:one)\n \[email protected] = accounts(:one)\n end", "def show\n set_account\n end", "def new\n if current_user\n flash[:notice] = 'you are already signed up'\n redirect_to home_path\n else\n @user = User.new\n render 'signup/step1'\n end\n end", "def signup_step1\n unless defined? params[:signup_email] or params[:signup_password]\n flash[:error] = \"Fields can't be blank !\"\n render :js => form_err_js(:signup_email, \"Please put your email and password. Fields can't be blank !\")\n else\n\n @email = params[:signup_email] if defined? params[:signup_email]\n @password = params[:signup_password] if defined? params[:signup_password]\n\n if defined? params[:type]\n case params[:type]\n when 'Student'\n @user = Student.create(:email => @email, :password => @password, :password_confirmation => @password)\n when 'Teacher'\n @user = Teacher.create(:email => @email, :password => @password, :password_confirmation => @password)\n when 'Coach'\n @user = Coach.create(:email => @email, :password => @password, :password_confirmation => @password)\n end\n redirect_to root_path, notice: \"Please check your Inbox for Account Confirmation Mail!\" if @user\n end\n end\n end", "def signup\r\n\t@title = \"Sign Up\"\r\n end", "def show\n @user = current_user\n @organization = Organization.find(session[:organization_id])\n render_wizard\n end", "def new\n @title = \"Signup Page\"\n @user = User.new\n end", "def show\n @user = current_user\n render_wizard\n end", "def show\n @user = current_user\n render_wizard\n end", "def show\n @user = current_user\n render_wizard\n end", "def new\n @sign_up = SignUp.new\n end", "def signup\n @user = User.new\n # render :layout => 'framed'\n end", "def account\n # It is just for rendering the view\n end", "def prep_signup_view(tenant=nil, user=nil, coupon={coupon:''})\n @user = klass_option_obj( User, user )\n @tenant = klass_option_obj( Tenant, tenant )\n @coupon = coupon # if ::Milia.use_coupon\n end", "def show_account_configuration\n bla = Account.new @base\n puts \"done #{bla}\"\n end", "def show\n\t\t@user = current_user\n\t\trender_wizard\n\tend", "def do_signup\n hobo_do_signup do\n if this.errors.blank?\n #flash[:notice] << \"You must activate your account before you can log in. Please check your email.\"\n flash[:notice] << \" Your account has been created.\"\n\n # FIXME: remove these two lines after you get email working reliably\n # and before your application leaves its sandbox...\n #secret_path = user_activate_path :id=>this.id, :key => this.lifecycle.key\n #flash[:notice] << \"The 'secret' link that was just emailed was: <a id='activation-link' href='#{secret_path}'>#{secret_path}</a>.\"\n else\n flash[:notice] = @this.errors.full_messages.join(\"<br/>\")\n logger.info \"error is \" + flash[:notice]\n end\n\n end\n end", "def profile\n account_id = params[:account_id].to_i if (params[:account_id] != nil)\n account = SystemTrack::AccountsProxy.new.account(session, account_id)\n form = AccountEntryPage.new(session, account)\n render text: form.render\n end", "def new\n super\n @title = \"Sign up\"\n logger.debug \"\\n\\t RegistrationsController#new \\n\\n\"\n end", "def company_registered\n wizard_company_step(accounts_registration_address_url)\n end", "def sign_up\n request_params = {\n host_url_with_protocol: host_url_with_protocol,\n host_url: host_url,\n entity_type: GlobalConstant::TemplateType.registration_template_type\n }\n service_response = GlobalConstant::StTokenSale.get_client_details(request_params)\n\n # Check if error present or not?\n unless service_response.success?\n render_error_response(service_response)\n return\n end\n\n @presenter_obj = ::Web::Client::Setup.new(service_response, params)\n\n redirect_to '/token-sale-blocked-region', status: GlobalConstant::ErrorCode.temporary_redirect and return if @presenter_obj.is_blacklisted_ip?(get_ip_to_aml_countries)\n redirect_to \"/login\", status: GlobalConstant::ErrorCode.temporary_redirect and return if @presenter_obj.has_registration_ended?\n set_page_meta_info(@presenter_obj.custom_meta_tags)\n end", "def show\n case step\n when :verify_mobile\n skip_step if current_user.mobile_verified?\n # TODO: Remove this once SMS provider has been added\n @otp = Redis.current&.get(current_user.id)\n when :enter_address\n skip_step if current_user.address_entered?\n @address = Address.find_or_initialize_by(user: current_user, address_type: :permanent)\n when :upload_docs\n skip_step if current_user.docs_uploaded?\n @user_document = UserDocument.find_or_initialize_by(user: current_user)\n end\n render_wizard\n end", "def sign_up\n link(:id=>\"navigation_anon_signup_link\").click\n CreateNewAccount.new @browser\n end", "def new\n\t@user = User.new\n\t@title = \"Sign up\"\n end", "def new\n @user_signup = User.new\n @is_signup = true\n end", "def new\n #showing the registration page\n end", "def registration\n @saas = SETTINGS['saas_registration_mode']\n @user = User.new\n initialize_registration_form\n @errors = {\n :general => [],\n :subjects => [],\n :policies => [],\n :purchase => []\n }\n end", "def set_sign_up\n @sign_up = SignUp.find(params[:id])\n end", "def set_sign_up\n @sign_up = SignUp.find(params[:id])\n end", "def set_signup\n @signup = Signup.find(params[:id])\n end", "def set_signup\n @signup = Signup.find(params[:id])\n end", "def set_signup\n @signup = Signup.find(params[:id])\n end", "def setup\n # If account setup, redirect to Studies page\n # if @me.setup\n # flash.alert = \"Error: Page not accessible\"\n # redirect_to studies_path\n # end\n end", "def mera25\n\t # combined signup for DiEMers and non-DiEMers\n\t\t# through multi-step\n\t\t# this shows the start page of the process\n\t I18n.locale = :el\n\t\t@user = current_user || User.new\n\t\tif current_user and current_user.is_mera25_member?\n\t\t # no signup needed\n\t\t\tredirect_to mera25_welcome_path, layout: 'picture_background' and return\n\t\telse\n\t\t @user.full_legal_name = @user.name\n\t\t @user.language = LANGUAGE_NAME[I18n.locale]\n\t\t @user.refer = params[:refer].gsub(/[^A-z0-9]/, '') if params[:refer]\n\t\t render layout: 'mera25'\n\t\tend\n\tend", "def show\n success_response(message: 'Account', data: { user_account: @user_account })\n end", "def form_setup\n\tend", "def create_accounts\n end", "def new\n @title = \"Sign up\"\n @user = User.new\n end", "def create\n @site_id = Account::Site.id_from_subdomain(current_subdomain) # mark as belonging to *Vault\n @account.affiliate = SubscriptionAffiliate.find_by_token(cookies[:affiliate]) unless cookies[:affiliate].blank?\n @account.site_id = @user.site_id = @site_id\n \n # Taken from users controller to support email activation\n cookies.delete :auth_token\n # protects against session fixation attacks, wreaks havoc with \n # request forgery protection.\n # uncomment at your own risk\n # reset_session\n\n # Using email registration?\n @user.registration_required = false\n \n # Some subscriptions use Captcha in signup form\n @hide_errors = params[:hide_errors]\n\n @account.user ||= @user\n @account.name = @user.full_name\n \n # Do custom validation of account fields\n \n if @account.company_name.blank?\n @account.errors.add(:company_name, \"Please enter your company name\")\n end\n if @account.phone_number.blank?\n @account.errors.add(:phone_number, \"Please enter a contact phone number\")\n end\n \n Rails.logger.debug \"Creating account #{@account.inspect}\"\n Rails.logger.debug \"Account user #{@account.user.inspect}\"\n Rails.logger.debug \"Profile: #{@user.profile.inspect}\"\n \n @success = false\n begin\n Account.transaction do\n @user.profile ||= Profile.new\n @account.save!\n @success = true\n end\n rescue ActiveRecord::RecordInvalid => e\n flash[:error] = \"There are errors in your input. Please correct them and try again\"\n Rails.logger.error \"#{e.class} #{e.message}\"\n rescue Exception => e\n Rails.logger.error \"#{e.class} #{e.message}\" \n end\n \n if @success\n @user.register!\n @user.activate!\n \n # Set session for choose_plan\n session[:account_id] = @account.id\n redirect_to url_for(:action => :plans)\n else # @account not saved\n # Need to reload it otherwise form action will be update!?\n # Happens on transaction ROLLBACK\n unless @account.new_record?\n @account = Account.new(params[:account])\n if @account.users.any?\n @user = @account.users.first\n else\n @account.user = @user = @account.users.build\n end\n end\n @terms_of_service = @user.terms_of_service == \"1\"\n @invitation_token = params[:invitation_token] rescue nil\n render :action => :new\n end\n end", "def account\n @user = current_user\n add_breadcrumb @user.name, :backend_account_path\n\n render :show\n end", "def call\n add_fields(confirm_registration: true)\n end", "def account\n\n end", "def eventcodesignup_step2\n @user = current_user\nend", "def new\r\n jump_to(\"/profiles/#{session[:account_id]}/basic\")\r\n end", "def signup\n # save the google ad tracking to a marketing hash in the session -- delete a session if exist\n session.delete(:marketing) unless session[:marketing].nil?\n # check for google marketing info to store for the member\n if params[:utm_source]\n marketing = { :campaign_source => params[:utm_source], :campaign_medium => params[:utm_medium], \n :campaign_name => params[:utm_campaign] }\n session[:marketing] = marketing\n end\n @signup_form = SignupForm.new\n end", "def new\n if logged_in?\n # User who has logged in has no need to access create action\n redirect_to current_user\n else\n # Get ready to display signup page by declaring instance var\n \t @user = User.new\n end\n end", "def set_sign_up_user\n @sign_up_user = SignUpUser.find(params[:id])\n end", "def new\n @account = current_user.accounts.new\n end", "def create\r\n @account = Account.new(params[:account])\r\n @account.updating_password = true\r\n\r\n\r\n @family_type = FamilyType.find(params[:family_type])\r\n @account.family_type = @family_type\r\n\r\n\r\n if @account.save\r\n sign_in @account\r\n flash[:success] = \"Account created successfully!\"\r\n redirect_to @account\r\n else\r\n @account.creating_account = true\r\n @family_types = FamilyType.all\r\n @family_type = FamilyType.find(params[:family_type])\r\n render 'new'\r\n end\r\n end", "def set_user_details\n api_response = AccountsApi::Users.user(\n account_id: current_user.account_id,\n account_user_id: current_user.user_id\n )\n @user = AccountDetails::User.new(api_response)\n end", "def new\n @signup = Signup.new\n\n respond_to do |format|\n format.html # new.html.erb\n end\n end", "def new\n @user = User.new\n @is_signup = true\n end", "def get_signup(req)\n render template_path()\n end", "def new\n @title = \"Sign up\"\n @user= User.new\n end", "def create_account\n\n end", "def create\n @analytic_account = AnalyticAccount.new(analytic_account_params)\n\n respond_to do |format|\n if @analytic_account.save\n format.html { redirect_to wizard_index_path, notice: 'Analytic account was successfully created.' }\n\n else\n format.html { render :new }\n format.json { render json: @analytic_account.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @user = User.where(email: sign_up_params[:email]).first || User.new(sign_up_params)\n @account = Account.new(subdomain: sign_up_params[:subdomain])\n\n if @account.valid?\n\n # Don't send the typical confirmation email. We need to relate to the\n # account first as its subdomain is required in the email link.\n @user.skip_confirmation_notification!\n @user.save if @user.new_record?\n\n if resource.persisted?\n if @account.save\n\n # relate the user to the new account\n @account.account_users.create(user: @user, is_owner: true)\n\n Hanumi::OnboardingMailer.welcome_account(@account, @user).deliver_now\n\n if resource.active_for_authentication?\n set_flash_message! :notice, :signed_up\n sign_up(resource_name, resource)\n respond_with resource, location: after_sign_up_path_for(resource, @account)\n else\n set_flash_message! :notice, :\"signed_up_but_#{resource.inactive_message}\"\n expire_data_after_sign_in!\n respond_with resource, location: after_inactive_sign_up_path_for(resource, @account)\n end\n else\n @account.errors.each { |k, v| resource.errors.add(k, v) }\n flash[:alert] = resource.errors\n respond_with resource, location: new_user_registration_path\n end\n\n else\n clean_up_passwords resource\n set_minimum_password_length\n flash[:alert] = resource.errors\n respond_with resource, location: new_user_registration_path\n end\n else\n\n @account.errors.each { |k, v| resource.errors.add(k, v) }\n flash[:alert] = resource.errors\n respond_with resource, location: new_user_registration_path\n end\n end", "def signup \n @page_title = 'Sign Up'\n return if generate_blank\n params[:user][:email].downcase!\n @user = User.new(params[:user]) \n if params[:account_type].nil?\n flash[:warning] = \"Please select a user type (student/renter/instructor).\"\n return\n end\n User.transaction(@user) do\n @user.new_password = true\n\n unless LoginEngine.config(:use_email_notification) and LoginEngine.config(:confirm_account)\n @user.email_verified = true\n end\n \n if @user.save\t\n @group = Group.find_by_group_name(params[:account_type]) \n @user.add_group @group\n key = @user.generate_security_token\n url = url_for(:action => 'home', :user_id => @user.id, :key => key)\n UserNotify.deliver_signup(@user, params[:user][:password], url)\n\n flash[:notice] = 'Signup successful! Please check your email at '\n flash[:notice] << @user.email + ' and confirm your membership before using the system.'\n @session[:user] = nil\n redirect_to '/'\n end\n end\n end", "def create\n @signup = Signup.new(params[:signup])\n \n respond_to do |format|\n if @signup.save\n format.html { redirect_to(root_url, :notice => 'Signup was successfully created.') }\n format.xml { render :xml => @signup, :status => :created, :location => @signup }\n else\n format.html { render :action => \"index\" }\n format.xml { render :xml => @signup.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n\t\t@path = [link_to_signup]\n\t\t@subnavigation = []\n\n cookies.delete :auth_token\n # protects against session fixation attacks, wreaks havoc with \n # request forgery protection.\n # uncomment at your own risk\n # reset_session\n\n @user = User.new(params[:user])\n\t\n\t\t# for the user details\n\t\t@user_detail = UserDetail.new(params[:user_detail])\n\t\t\n\t\[email protected]_detail = @user_detail\n\t\t\n\t\t# what will be the login name be\n\t\tif params[:last_name_check] == \"1\"\n\t\t\[email protected] = @user_detail.last_name\n\t\telse\n\t\t\[email protected] = @user_detail.first_name + \" \" + @user_detail.last_name\n\t\tend\n\n\n\t\tvalid1 = @user.valid?\n\t\tvalid2 = @user_detail.valid?\n\n\t\tif valid1 && valid2\n\t\t\[email protected]!\n\t\n\t\t\t@user_detail.save!\n\t\t\n self.current_user = @user\n redirect_back_or_default('/')\n flash[:notice] = \"Thanks for signing up!\"\n else\n render :action => 'new'\n end\n end", "def new\n\t@title = \"Sign Up\"\n\t@user = User.new\n end", "def new\n respond_with(account)\n end", "def create\n @account = current_user.accounts.new(account_params)\n if @account.save\n redirect_to @account, notice: 'Account was successfully created.'\n else\n render :new\n end\n end", "def client_sign_up\n\n end", "def show\n @user = current_user || User.new\n case step\n when :select_language\n skip_step if I18n.locale.present?\n end\n\n case step\n when :select_country\n session[:locale] = params[:locale]\n end\n\n case step\n when :accept_terms\n session[:country] = params[:user][:country]\n end\n\n case step\n when :refuse\n end\n render_wizard\n end", "def show\n render_wizard\n end", "def signup_complete\n\n if params[:signup_complete_form]\n \n @signup_complete_form = SignupCompleteForm.new(params[:signup_complete_form])\n\n if @signup_complete_form.valid?\n \n # try and create the member in sfdc\n new_member_create_results = CsApi::Account.create(SfdcConnection.admin_dbdc_client.oauth_token, params[:signup_complete_form]).symbolize_keys!\n logger.info \"[SessionsController]==== creating a new third party user with email address (#{@signup_complete_form.email}): #{new_member_create_results.to_yaml}\"\n \n # if the user was created successfully in sfdc\n if new_member_create_results[:success].eql?('true')\n \n # delete the user if they already exists\n User.delete(User.find_by_username(new_member_create_results[:username]))\n \n user = User.new(:username => new_member_create_results[:username], \n :sfdc_username => new_member_create_results[:sfdc_username], \n :password => Encryptinator.encrypt_string(ENV['THIRD_PARTY_PASSWORD']))\n \n logger.info \"[SessionsController]==== try to save #{@signup_complete_form.email} to the database\"\n\n if user.save\n logger.info \"[SessionsController]==== saving #{@signup_complete_form.email} to the database\"\n # sign the user in\n sign_in user\n logger.info \"[SessionsController]==== #{@signup_complete_form.email} successfully signed in\"\n # send the 'welcome' email -- taken care of by the API now\n # Resque.enqueue(WelcomeEmailSender, current_access_token, new_member_create_results[:username]) unless ENV['MAILER_ENABLED'].eql?('false')\n # add the user to badgeville\n Resque.enqueue(NewBadgeVilleUser, current_access_token, new_member_create_results[:username], new_member_create_results[:sfdc_username]) unless ENV['BADGEVILLE_ENABLED'].eql?('false')\n unless session[:marketing].nil?\n # update their info in sfdc with the marketing data\n Resque.enqueue(MarketingUpdateNewMember, current_access_token, new_member_create_results[:username], session[:marketing]) \n # delete the marketing session hash\n session.delete(:marketing)\n end\n # check for any referral & update the record with the newly created member\n unless session[:referral].nil?\n Resque.enqueue(ProcessReferral, session[:referral], new_member_create_results[:username]) \n # delete the referral_id session hash\n session.delete(:referral)\n end\n redirect_to welcome2cloudspokes_path\n else\n logger.error \"[SessionsController]==== error creating a new third party member after manually entering their email address. Could not save to database.\"\n render :inline => \"Whoops! An error occured during the authorization process. Please hit the back button and try again.\"\n end \n \n # display the error to them in the flash\n else \n logger.info \"[SessionsController]==== error creating new member: #{new_member_create_results[:message]}\"\n flash.now[:error] = new_member_create_results[:message]\n end\n end\n else\n # first time through -- prepopulate the form from the session\n @signup_complete_form = SignupCompleteForm.new(session[:auth])\n if ['github','twitter'].include?(@signup_complete_form.provider) \n @signup_complete_form.provider_username = @signup_complete_form.username\n else\n @signup_complete_form.provider_username = @signup_complete_form.email \n end\n \n logger.info \"[SessionsController]==== starting the signup process for #{session[:auth][:provider]}\"\n end\n end", "def step2\n user = User.new\n user.first_name = params[:first_name]\n user.last_name = params[:last_name]\n user.email = params[:username]\n user.email_confirmation = params[:username_confirmation]\n user.password = params[:password]\n user.password_confirmation = params[:password_confirmation]\n session[:user] = user\n if user.valid?\n session[:errors] = nil\n else\n session[:errors] = user.errors\n redirect_to admin_signup_step1_path\n end\n end", "def details\n if params['form_details']\n results = CsApi::Member.update(current_access_token, @current_user.username, params['form_details'])\n #check for errors!!\n if results['success'].eql?('false')\n if results['message'].index('Email__c duplicates').nil?\n flash.now[:error] = results['message']\n else\n flash.now[:error] = 'Duplicate email address found! The email address that you specified is already in use.'\n end\n else\n flash.now[:notice] = 'Your account information has been updated.'\n # get the updated account\n get_account \n end \n end\n @page_title = 'Account Details'\n end", "def new\n @user = Account.new\n end", "def new\n \t@client = Client.new\n @page_title = \"Registration\"\n end", "def candidate_sign_up\n\n end", "def account_details\n selection = account_details_select\n case selection\n when 'View Details'\n @user.details\n when 'Change Username'\n username = @prompt.ask('Please enter your new username >')\n @user.change_username(username)\n else\n case_account_details(selection)\n end\n end", "def show_sign_up(user_id)\n unless user_id\n content_tag :li do\n link_to 'Sign up', new_users_path\n end\n end\n end", "def show\n # If not authenticating/-ed: block all further action.\n step == :authenticate or session[:authenticated] or\n return redirect_to wizard_path(:authenticate)\n\n # Differ the current step to load the current settings for display.\n case step\n\n # The first step authenticates the server-root-user. Generate a token that\n # he can read from the file-system.\n when :authenticate\n File.write('SETUP_CODE', SecureRandom.base64(32))\n\n # Domain- and mailer-steps alter global server configuration via the\n # SettingsHelper. They are provided with current settings.\n when :domain, :mailer\n get_settings\n\n # If no setting is set, provide the user with default settings.\n defaults = {\n domain: request.base_url,\n redis: 'redis://localhost:6379',\n timezone: 'UTC',\n email_server: 'smtp.gmail.com',\n email_port: 587\n }\n defaults.each { |k, v| @settings[k] ||= v }\n\n # Here a new server is generated. Create an empty one to not let the form\n # screw up.\n when :server\n @server = Server.new\n\n # Something went wrong: Nobody should be here.\n when :finish\n return redirect_to root_path\n end\n\n # Finally let wicked decide what to render.\n render_wizard\n end", "def new\n @account = current_user.accounts.build\n @help = \"New Account\"\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @account }\n end\n end", "def new_user_setup\n \t@person = Person.find(:first, :conditions => {:email_address => params[:email_address]})\n \tif @person.nil?\n \t\tflash[:error] = \"Invalid Email Address.\"\n \t\tredirect_to :action => 'register'\n\t\telsif [email protected]_hash.blank?\n \t\tflash[:error] = \"Your account has already been set up. See the system administrator for help logging in.\"\n \t\tredirect_to :action => 'index'\n \telsif params[:password] != params[:password_confirm]\n \t\tflash[:error] = \"Passwords don't match. Please try again.\"\n \t\tredirect_to :action => 'register'\n \telse\n \t\[email protected] = params[:password]\n \t\[email protected]!\n \t\tredirect_to home_url(@person)\n \tend\n end", "def account_for_next_step\n return accounts_registration_taxes_path if AccountType.individual?(@account.account_type)\n\n return accounts_registration_company_path unless AccountType.registered_organisation?(@account.account_type)\n\n accounts_registration_company_registered_path\n end", "def create\n @sign_up = SignUp.new(sign_up_params)\n\n respond_to do |format|\n if @sign_up.save\n # format.html { redirect_to @sign_up, notice: 'Sign up was successfully created.' }\n format.html { redirect_to panel_path(1)}\n else\n format.html { render :new }\n format.json { render json: @sign_up.errors, status: :unprocessable_entity }\n end\n end\n end", "def signup\n\n\t\temail = params[:email] # Extract the email from the params of the signup form\n\t\ttimezone = params[:timezone] # Extract the timezone from the params of the signup form\n\n\t\t@url = uniqueUrlKeyGenerator # Generate a unique url key\n\t\told_user = User.find_by_email(email)\n\n\t\t# If user exists\n\t\tif !old_user.nil?\n\t\t # If user is not registered\n\t\t if !old_user.registered?\n\t\t # Send welcome email again and save him\n\t\t old_user.sendWelcomeEmail\n\t\t old_user.save\n\t\t end\n\t\tend\n\n\t\t# Find the user in the user db with the same email as extracted in the params\n\t\tcheck_users = User.find_by_email(email)\n\n\t\t#create a new PotentialUser object with the extarcted email, timezone and url key\n\t\tuser = User.new(email: email, url: @url, timezone: timezone, day: 1, registered: false)\n\n\t\t# If no such user exists\n\t\tif check_users.nil?\n\n\t\t#If the new user is valid and can be saved\n\t\t if user.save\n\t\t user.sendWelcomeEmail\n\t\t @title = \"Thank you for signing up\"\n\t\t @result = \"A confirmation email with instructions has been sent to you\"\n\t\t @result2 = \"Your unique access key is: \" + @url\n\n\t\t#If not valid\n\t\t else\n\t\t #Set @result as the error message\n\t\t @title = \"Looks like something went wrong ...\"\n\t\t @result = \"Email #{user.errors[:email][0]}.\".html_safe\n\t\t end\n\n\t\t#User by this email already exists\n\t\telse\n\n\t\t if !check_users.registered?\n\t\t\t # Result instance variable for the view\n\t\t\t @title = \"Looks like something went wrong ...\"\n\t\t\t @result = \"User by this email already exists, but we sent another confirmation email just in case\"\n\t\t\t else\n\t\t\t @title = \"Looks like something went wrong ...\"\n\t\t\t @result = \"User by this email already exists\"\n\t\t end\n\n\tend\n\n\t\t# Respond to only javascript, set for AJAX\n\t\trespond_to do |format|\n\t\t\tformat.js\n\t\tend\n\tend", "def create_account\n set_user\n set_payer\n set_user_sport\n save_account\n end", "def new\n @partner_sign_in = true\n render layout: 'sign_pages'\n end", "def show\n @current_section = 'register'\n \n respond_to do |format|\n format.html { render :action => 'dashboard' }\n format.xml { render :xml => @account }\n end\n end", "def sign_up_with(dj_name, password)\n visit new_user_path\n fill_in 'user_dj_name', with: dj_name\n fill_in 'user_password', with: password\n click_button 'Create User'\n end", "def signup \n @user = params[:user]\n @link = params[:link]\n Merb.logger.info \"Sending Signup to #{@user.email} with code #{@user.activation_code}\"\n render_mail :text => :signup, :layout => :core\n end" ]
[ "0.69698673", "0.6576694", "0.65688086", "0.65266263", "0.6369515", "0.62892467", "0.62160987", "0.6211264", "0.61650825", "0.6136421", "0.6136421", "0.6131539", "0.61231965", "0.6053946", "0.6034264", "0.59950346", "0.59885436", "0.598013", "0.5939335", "0.59365517", "0.592921", "0.5921448", "0.5906459", "0.5906459", "0.5906459", "0.58878815", "0.5887361", "0.58849436", "0.5849363", "0.58424973", "0.5831086", "0.58229965", "0.58184063", "0.57515895", "0.5731471", "0.5718419", "0.5713713", "0.5707239", "0.56889707", "0.56847674", "0.5684182", "0.5671072", "0.56359625", "0.56359625", "0.562521", "0.562521", "0.562521", "0.561426", "0.5611329", "0.56091374", "0.5607274", "0.5599038", "0.5598931", "0.55913854", "0.5565248", "0.5554528", "0.5551525", "0.55479646", "0.5540839", "0.5538291", "0.5529157", "0.5521505", "0.5519683", "0.5518679", "0.5513041", "0.5504967", "0.55043477", "0.54980516", "0.5489561", "0.5489485", "0.54878885", "0.54877514", "0.54857826", "0.547878", "0.5469708", "0.5466045", "0.5456364", "0.5450374", "0.544527", "0.5438788", "0.5438", "0.5436477", "0.54340935", "0.54252344", "0.54241806", "0.54196286", "0.5413454", "0.5409706", "0.54071873", "0.5395171", "0.53882414", "0.53691196", "0.5362328", "0.53531444", "0.53459984", "0.5338667", "0.533734", "0.53352314", "0.53312665", "0.53283083" ]
0.6136644
9
Update the account & store with params sent from account_setup form.
def account_complete # POST '/admin/account_setup' @admin = Admin.find(session[:admin_id]) # This section of the site is built for new admin accounts. # Update the admin attributes with more detailed information from the # account_setup (step 2) form, && update the their first store which was # instantiated at the account_create action. if @admin.update(admin_params) && @admin.stores.first.update(store_params) ## Sign-up is successful and admin dashboard is loaded. redirect_to admin_path else render :account_setup end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update\n set_action(:update)\n set_account_type\n validate_account_type!\n assign_params\n set_affiliate\n set_updated_by\n\n after_update\n account\n end", "def update\n respond_to do |format|\n @account.assign_attributes(account_params)\n if @account.save\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n if @account.update(account_params)\n redirect_to root_path, notice: 'Account was successfully updates.'\n else\n redirect_to root_path, alert: 'Some thing went wrong! try more...'\n end\n end", "def update!(**args)\n @accounts = args[:accounts] if args.key?(:accounts)\n end", "def update\n \n if @account.update(account_params)\n redirect_to @account, notice: \"Account was successfully updated.\" \n else\n render :edit, status: :unprocessable_entity \n end\n \n end", "def update\n if @account.update(account_params)\n render :show, status: :ok, location: @account\n else\n render json: @account.errors, status: :unprocessable_entity\n end\n end", "def update\n\n begin\n\n update_zone = false\n deploy_zone = false\n\n @account = Account.active.find_by_email(params[:id])\n @account = Account.active.find(params[:id]) if @account.nil?\n\n if @account_user.instance_of?(Account)\n #verify user is accessing his/her own account\n raise \"Username '#{@account_user.email}' does not have access to this account\" if @account != @account_user\n end\n\n Account.transaction do\n\n #changed from \"update_attributes\" because\n # - not all attributes can be updated\n # - params may contain one or more (might not be all) attributes\n # - super-user can change attrs that user cannot\n @account[:name] = params[:name] unless params[:name].nil?\n @account[:first_name] = params[:first_name] unless params[:first_name].nil?\n @account[:last_name] = params[:last_name] unless params[:last_name].nil?\n @account[:address] = params[:address] unless params[:address].nil?\n @account[:city] = params[:city] unless params[:city].nil?\n @account[:province] = params[:province] unless params[:province].nil?\n @account[:country] = params[:country] unless params[:country].nil?\n @account[:postal_code] = params[:postal_code] unless params[:postal_code].nil?\n @account[:phone] = params[:phone] unless params[:phone].nil?\n\n #Change password?\n unless (params[:password].nil? || params[:new_password].nil?)\n raise Exceptions::InvalidCredentials.new(@account.email) unless @account.verify_password(params[:password])\n @account[:password] = params[:new_password]\n @account[:status] = VersaFile::AccountStates.Enabled\n end\n\n #Allow super-user to modify these\n if @account_user.instance_of?(RkoUser)\n @account[:customer_code] = params[:customer_code] unless params[:customer_code].nil?\n @account[:billing_type] = params[:billing_type] unless params[:billing_type].nil?\n @account[:account_type] = params[:account_type] unless params[:account_type].nil?\n @account[:trial_period] = ((@account[:account_type] == VersaFile::AccountTypes.Trial) ? params[:trial_period] : VersaFile::TrialStates.NoTrial) unless params[:trial_period].nil?\n\n deploy_zone = ( ([email protected]_code.nil?) && (@account.customer_code_changed?) && (@account.status == VersaFile::AccountStates.Pending))\n end\n\n @account[:updated_by] = @account_user.instance_of?(RkoUser) ? @account_user.name : @account_user.email\n\n update_zone |= @account.trial_period_changed?\n unless @account.save\n raise_errors(@account.errors)\n end\n\n unless params[:subdomains].nil?\n\n params[:subdomains].each do |subdomain|\n @zone_node = @account.zone_nodes.find_by_subdomain(subdomain[:name])\n raise \"Zone '#{subdomain[:name]}' is not associated with account '#{@account.name}'\" if @zone_node.nil?\n\n @zone_node.name = @account.name unless @account.name.nil?\n @zone_node.max_users = subdomain[:user_quota] unless subdomain[:user_quota].nil?\n @zone_node.max_disk_space = (subdomain[:disk_quota].to_i * VersaFile::DiskSizes.Gigabyte) unless subdomain[:disk_quota].nil?\n\n update_zone |= @account.name_changed? |\n @zone_node.max_users_changed? |\n @zone_node.max_disk_space_changed?\n\n unless @zone_node.save\n raise_errors(@zone_node.errors)\n end\n end\n\n end\n\n #Update remote zone information\n logger.debug(\"UPDATE ZONE???? :> #{update_zone}\")\n if update_zone\n @account.zone_nodes.each do |zone_node|\n zone_node.delay.zone_update\n end\n end\n\n end\n\n if deploy_zone\n @account.zone_nodes.each do |zn|\n #Auto-deploy\n zn.delay.zone_deploy if deploy_zone\n end\n end\n\n respond_to do |format|\n format.html { redirect_to @account, notice: 'Account was successfully created.' }\n format.json { render json: @account.to_json(:except=>:password, :methods => [:subdomains, :template]), :status => :ok }\n end\n\n rescue => e\n logger.error \"Account update failed => #{e.message}\"\n respond_to do |format|\n format.json { render :json => e.message, :status => :unprocessable_entity }\n end\n end\n\n end", "def update\n authorize @account\n if @account.update(account_params)\n render json: { status: :ok, account: @account }\n else\n render json: {error: @account.errors, status: :unprocessable_entity }\n end\n end", "def assign_params\n account.assign_attributes(account_params)\n account\n end", "def update!(**args)\n @account_id = args[:account_id] if args.key?(:account_id)\n @service_account = args[:service_account] if args.key?(:service_account)\n end", "def update!(**args)\n @account_id = args[:account_id] if args.key?(:account_id)\n @service_account = args[:service_account] if args.key?(:service_account)\n end", "def account\n @title = \"Account Details\"\n # Update account details\n if request.post?\n if @affiliate.update_attributes(params[:affiliate])\n flash.now[:notice] = \"Account details saved.\"\n else\n flash.now[:notice] = \"There was a problem saving your account.\"\n end\n end\n end", "def update!(**args)\n @billing_account = args[:billing_account] if args.key?(:billing_account)\n end", "def update_account\n if params[:account].present?\n if @account.update(avatar: params[:account][:avatar])\n redirect_to gns_core.my_account_backend_accounts_path, flash: {success: 'Avatar was successfully updated.'}\n end\n else\n redirect_to gns_core.my_account_backend_accounts_path, flash: {error: 'Avatar update failed. Please try again'}\n end\n end", "def update\n flash[:notice] = 'The account was successfully updated.' if account.update_attributes(params[:account])\n respond_with(account)\n end", "def account\n @title = \"Your Account Details\"\n # Update account details\n if request.post?\n if @customer.update_attributes(params[:customer])\n flash.now[:notice] = \"Account details saved.\"\n else\n flash.now[:notice] = \"There was a problem saving your account.\"\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to action: \"index\" }\n \n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @account = current_account \n respond_to do |format|\n if @account.update_attributes(params[:account])\n flash[:notice] = 'Account was successfully updated.'\n format.html { redirect_to user_account_url }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @account.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update_account(client, options)\n accounts = get_accounts(client, options)\n if accounts.nil?\n return\n end\n\n account = find_account(accounts, options[:email])\n if !account.nil?\n if !options[:firstname].nil?\n account.given_name = options[:firstname]\n end\n if !options[:surname].nil?\n account.surname = options[:surname]\n end\n if !options[:username].nil?\n account.username = options[:username]\n end \n if !options[:password].nil?\n account.password = options[:password]\n end\n if !options[:status].nil?\n account.status = (options[:status] == \"disabled\" ? Stormpath::Resource::Status::DISABLED : Stormpath::Resource::Status::ENABLED)\n end\n\n begin\n account.save\n puts \"Account updated.\"\n rescue Stormpath::Error => e\n print_error(e)\n end\n else\n puts \"Account not found\"\n return\n end\nend", "def set_account\n @account = Account.find(params[:account_id])\n end", "def update\n # Check for an existing Stripe account\n # unless current_user.stripe_account\n # redirect_to new_stripe_account_path and return\n # end\n #\n # begin\n # # Retrieve the Stripe account\n # @stripe_account = Stripe::Account.retrieve(current_user.stripe_account)\n #\n # @account = StripeAccount.new(account_params)\n #\n #\n # # Reject empty values\n # account_params.each do |key, value|\n # if value.empty?\n # flash.now[:alert] = \"Please complete all fields.\"\n # render 'edit' and return\n # end\n end", "def update!(**args)\n @account_type = args[:account_type] if args.key?(:account_type)\n end", "def update\n @account = Account.find(params[:id]) \n if @account.update_attributes!(params[:account])\n flash[:success] = \"Profile updated\"\n sign_in @account\n redirect_to @account\n else\n render 'edit'\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to edit_user_info_path(session[:user_info_no]) }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n current_account.update(provider_params, without_protection: true)\n respond_with current_account\n end", "def update!(**args)\n @account_name = args[:account_name] if args.key?(:account_name)\n @account_number = args[:account_number] if args.key?(:account_number)\n @name = args[:name] if args.key?(:name)\n @organization_info = args[:organization_info] if args.key?(:organization_info)\n @permission_level = args[:permission_level] if args.key?(:permission_level)\n @profile_photo_url = args[:profile_photo_url] if args.key?(:profile_photo_url)\n @role = args[:role] if args.key?(:role)\n @state = args[:state] if args.key?(:state)\n @type = args[:type] if args.key?(:type)\n end", "def update\n @account.accountName = params[:account][:accountName]\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n else\n format.html { render :edit }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\r\n @account = Account.find(params[:id])\r\n @family_types = FamilyType.all\r\n @family_type = FamilyType.find(params[:family_type])\r\n @account.family_type = @family_type\r\n\r\n\r\n respond_to do |format|\r\n\r\n # If the current account is an admin user but not the account that is being modified,\r\n # they will not have entered a password and we should just assign the current password values\r\n # to the account.\r\n\r\n if current_account == @account\r\n @account.updating_password = true\r\n end\r\n\r\n logger.debug \"Token 1: #{@account.remember_token}\"\r\n\r\n if @account.update_attributes(params[:account])\r\n logger.debug \"Token 2: #{@account.remember_token}\"\r\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\r\n\r\n else\r\n format.html { render action: \"edit\" }\r\n format.json { render json: @account.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end", "def update\n email = AuthenticationHelper::Auth.instance.getEmailFromJWT( request )\n if ( email == nil )\n render json: :nothing, status: :unprocessable_entity\n return\n end\n\n user = User.find_by(email: email)\n if ( user )\n account = Account.find_by(user_id: user[\"id\"], id: params[:id])\n \n if ( account == nil )\n render json: :nothing, status: :unprocessable_entity\n return\n end\n\n if account.update(account_params)\n render json: account, status: :ok\n else\n render json: account.errors, status: :unprocessable_entity\n end\n else\n render json: :nothing, status: :unprocessable_entity\n end\n end", "def set_account\n @account = Account.find params[:id]\n end", "def update\n\n accountParams[:accounting_method] = accountParams[:accounting_method].to_i\n accountParams[:payor_type] = accountParams[:payor_type].to_i\n accountParams[:print_statement] = accountParams[:print_statement].to_i\n accountParams[:rx_charge_description] = accountParams[:rx_charge_description].to_i\n accountParams[:statement_type] = accountParams[:statement_type].to_i\n\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @account = Account.find(params[:id])\n\n respond_to do |format|\n if @account.update_attributes(params[:account])\n format.html { redirect_to quick_launch_path, notice: 'Account was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n flash[:notice] = \"Account was successfully updated.\"\n format.html { redirect_to @account }\n format.json { render :show, status: :ok, location: @account }\n else\n flash[:error] = \"Unable to update account: #{@account.errors.full_messages.to_sentence}\"\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @account = current_user.accounts.find(params[:id])\n\n respond_to do |format|\n if @account.update_attributes(params[:account])\n flash[:notice] = 'Account was successfully updated.'\n format.html { redirect_to([current_user, @account]) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @account.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to account_path(@account), notice: I18n.t('notice.update', model: t('account.name')) }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @account = current_user.account.find(params[:id])\n\n respond_to do |format|\n if @account.update_attributes(params[:account])\n flash[:notice] = 'Account was successfully updated.'\n format.html { redirect_to(@account) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @account.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update(id, params)\n if valid_data?(params)\n update_execution('accounts', params, id)\n else\n print(\"ERROR: couldn't insert account data\")\n end\n end", "def update!(**args)\n @billing_accounts = args[:billing_accounts] if args.key?(:billing_accounts)\n @next_page_token = args[:next_page_token] if args.key?(:next_page_token)\n end", "def update\n respond_to do |format|\n if @account.save\n user = @account.users.build(account_params[:users_attributes])\n if user.save\n session[:user_id] = user.id\n format.html { redirect_to samples_path, notice: 'account was successfully created.' }\n format.json { render samples_path, status: :created, location: @account }\n else\n format.html { redirect_to register_path, notice: 'Error, in user save' }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n else\n format.html { render :new, notice: 'Error, in account save' }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n if @account.present?\n\n if @user.account.update_with_stripe(params)\n redirect_to user_url(@user), notice: 'Account was successfully updated.'\n else\n handle_account_errors(@user, params)\n render :edit\n end\n\n else\n flash[:alert] = \"We could not find the requested credit card account.\"\n redirect_to user_url(@user)\n end\n end", "def update \n @account_params = light_account_params\n if admin_signed_in?\n @account_params = account_params\n end\n respond_to do |format|\n if @account.update(@account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Conta atualizada.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update_attributes(params[:account])\n flash[:notice] = 'Account was successfully updated.'\n format.html { redirect_to(user_accounts_path(current_user)) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @account.errors, :status => :unprocessable_entity }\n end\n end\n end", "def save_account\n @account = copy_account_to_user(@account)\n wizard_save(@account)\n @account.save\n end", "def set_account\n Finance::Account.create.save\n @account = Finance::Account.first\n end", "def update!(**args)\n @restored_account = args[:restored_account] if args.key?(:restored_account)\n end", "def update!(**args)\n @account_id = args[:account_id] if args.key?(:account_id)\n @account_mid = args[:account_mid] if args.key?(:account_mid)\n @ares_id = args[:ares_id] if args.key?(:ares_id)\n @state = args[:state] if args.key?(:state)\n end", "def update\n if @account && !update_account_params[:currency_id].empty?\n update_account_params[:currency_id].each do |i|\n @account.currency_id << i\n json_response({ status: true, messages: 'Currency added' }) if @account.save\n end\n else\n json_response({ status: false, messages: 'The account was not found or the params are empty' })\n end\n end", "def update!(**args)\n @account_name = args[:account_name] if args.key?(:account_name)\n @account_number = args[:account_number] if args.key?(:account_number)\n @name = args[:name] if args.key?(:name)\n @permission_level = args[:permission_level] if args.key?(:permission_level)\n @role = args[:role] if args.key?(:role)\n @state = args[:state] if args.key?(:state)\n @type = args[:type] if args.key?(:type)\n end", "def update\n respond_to do |format|\n if @current_account.update(current_account_params)\n format.html { redirect_to @current_account, flash: { success: \"AccountPayable was successfully updated.\" } }\n format.json { render :show, status: :ok, location: @current_account }\n else\n format.html { render :edit }\n format.json { render json: @current_account.errors, status: :unprocessable_entity }\n end\n end\n end", "def set_account\n @account = current_user.accounts.find(params[:account_id])\n end", "def set_account\n response = @account_api.find(params[:id])\n @account = response[:account]\n end", "def update\n @account = current_user.person.blackberry_accounts.find(params[:id])\n\n respond_to do |format|\n if @account.update_attributes(params[:blackberry_account])\n format.html { redirect_to( accounts_path, :notice => 'Account was successfully updated.') }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @account.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @account = @user.account\n @plan = Plan.find(params[:account][:plan_id])\n \n if @account.plan_change_valid?(@plan)\n if @plan.free?\n respond_to do |format|\n if @account.update_attributes(params[:account])\n flash[:notice] = 'Account was successfully updated.'\n format.html { redirect_to(account_path(@account, :subdomain => @user.subdomain)) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @account.errors, :status => :unprocessable_entity }\n end\n end\n else\n session[:account_id] = @account.id\n session[:plan_id] = @plan.id\n checkout(upgrade_account_url, projects_root_url(:subdomain => @user.subdomain))\n end\n else\n flash[:warning] = \"Account plan could not be changed.\"\n redirect_to(projects_root_url(:subdomain => @user.subdomain))\n end\n end", "def update\n @account = Account.find(params[:id])\n \n respond_to do |format|\n if @account.update_attributes(params[:account])\n format.html { redirect_to account_url(@account) }\n format.xml { render :nothing => true }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @account.errors.to_xml } \n end\n end\n end", "def set_account\n account = Account.find(params[:id])\n redirect_to(accounts_url) unless current_user.admin? || account.sales_associate == current_user\n @account = account\n end", "def update_order_detail_accounts\n order_details.each do |od|\n od.account = account\n od.save!\n end\n end", "def update!(**args)\n @accounts = args[:accounts] if args.key?(:accounts)\n @next_page_token = args[:next_page_token] if args.key?(:next_page_token)\n end", "def update!(**args)\n @accounts = args[:accounts] if args.key?(:accounts)\n @next_page_token = args[:next_page_token] if args.key?(:next_page_token)\n end", "def update!(**args)\n @accounts = args[:accounts] if args.key?(:accounts)\n @next_page_token = args[:next_page_token] if args.key?(:next_page_token)\n end", "def update!(**args)\n @accounts = args[:accounts] if args.key?(:accounts)\n @next_page_token = args[:next_page_token] if args.key?(:next_page_token)\n end", "def update!(**args)\n @accounts = args[:accounts] if args.key?(:accounts)\n @next_page_token = args[:next_page_token] if args.key?(:next_page_token)\n end", "def update!(**args)\n @accounts = args[:accounts] if args.key?(:accounts)\n @next_page_token = args[:next_page_token] if args.key?(:next_page_token)\n end", "def update!(**args)\n @accounts = args[:accounts] if args.key?(:accounts)\n @next_page_token = args[:next_page_token] if args.key?(:next_page_token)\n end", "def update\n @account = Account.find(params[:id])\n\n respond_to do |format|\n if @account.update_attributes(params[:account])\n format.html { redirect_to @account, :notice => 'Account was successfully updated.' }\n format.json { head :ok }\n else\n @suppliers = Supplier.all(:order=>:name).collect{|c| [c.name, c.id]}\n @aircraft_types = AircraftType.all(:order => :name).collect{|c| [c.name, c.id]}\n\n format.html { render :action => \"edit\" }\n format.json { render :json => @account.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @account = account.find(params[:id])\n\n respond_to do |format|\n if @account.update_attributes(params[:account])\n flash[:notice] = 'account was successfully updated.'\n format.html { redirect_to(@account) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @account.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update_user_details_in_chargify\n unless self.account.chargify_customer_id.blank?\n\n if self.roles.any? { |r| r.title == 'account_holder' }\n chargify_customer = Chargify::Customer.find_by_reference(self.account.id)\n chargify_customer.first_name = self.firstname\n chargify_customer.last_name = self.lastname\n chargify_customer.email = self.email\n chargify_customer.save\n end\n end\n end", "def update\n @account = Admin::Account.find(params[:id])\n\n respond_to do |format|\n if @account.update_attributes(params[:account])\n flash[:notice] = 'Admin::Account was successfully updated.'\n format.html { redirect_to(@account) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @account.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @current_account.update(current_account_params)\n format.html { redirect_to @current_account, notice: 'Current account was successfully updated.' }\n format.json { render :show, status: :ok, location: @current_account }\n else\n format.html { render :edit }\n format.json { render json: @current_account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @synthetic_account.update(synthetic_account_params)\n format.html { redirect_to @synthetic_account, notice: 'Synthetic account was successfully updated.' }\n format.json { render :show, status: :ok, location: @synthetic_account }\n else\n format.html { render :edit }\n format.json { render json: @synthetic_account.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_account(user)\n account = Account.to_adapter.get!(user.id)\n update_status = account.update_with_password({ \"email\" => user.email, \"name\" => user.username })\nend", "def update!(**args)\n @account_id = args[:account_id] if args.key?(:account_id)\n @creation_ms = args[:creation_ms] if args.key?(:creation_ms)\n @email = args[:email] if args.key?(:email)\n @email_verified = args[:email_verified] if args.key?(:email_verified)\n @phone_number = args[:phone_number] if args.key?(:phone_number)\n @phone_verified = args[:phone_verified] if args.key?(:phone_verified)\n end", "def update\n @event.user.account.update(account_params)\n \n if @event.update(event_params)\n redirect_to :back\n else\n redirect_to dashboard_path\n end\n end", "def update\n respond_to do |format|\n if @transaction.update(account_params)\n format.html { redirect_to @transaction, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @transaction }\n else\n format.html { render :edit }\n format.json { render json: @transaction.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @main_account.update(main_account_params)\n format.html { redirect_to @main_account, notice: 'Main account was successfully updated.' }\n format.json { render :show, status: :ok, location: @main_account }\n else\n format.html { render :edit }\n format.json { render json: @main_account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, notice: 'Account was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @account.update(account_params)\n format.html { redirect_to @account, info: 'Post was successfully updated.' }\n format.json { render :show, status: :ok, location: @account }\n else\n format.html { render :edit }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n\t\t\t\ttoken = params[:auth_token]\n\t\t\t\tid = params[:id].to_i\n\n\t\t\t\tif token.nil?\n\t\t\t\t\trender :status => 400,\n\t\t\t\t\t\t:json => { :message => \"The request must contain an auth token.\"}\n\t\t\t\t\treturn\n\t\t\t\tend\n\t\t\t\t@user = User.find_by_authentication_token(token)\n\t\t\t\taccount = JSON.parse params[:account]\n\n\t\t\t\tif account[\"username\"].nil?\n\t\t\t\t\trender :status => 400,\n\t\t\t\t\t\t:json => { :message => \"'username' cannot be null.\"}\n\t\t\t\t\treturn\n\t\t\t\tend\n\t\t\t\tif account[\"computer_id\"].nil?\n\t\t\t\t\trender :status => 400,\n\t\t\t\t\t\t:json => { :message => \"The request must contain a 'computer_id'.\"}\n\t\t\t\t\treturn\n\t\t\t\tend\n\t\t\t\t@accounts = Account.where computer_id: @user.computer_ids\n\t\t\t\tif id != 0\n\t\t\t\t\tunless @accounts.pluck(:id).include? id\n\t\t\t\t\t\trender :status => 401,\n\t\t\t\t\t\t\t:json => { :message => \"Access Denied, check account Id.\"}\n\t\t\t\t\t\treturn\n\t\t\t\t\tend\n\t\t\t\tend\n\n\t\t\t\tputs \n\t\t\t\tputs account\n\t\t\t\tputs \n\t\t\t\tputs account[\"account_process_attrubutes\"]\n\t\t\t\tputs \n\t\t\t\tputs \n\t\t\t\t@account = Account.find(id)\n\t\t\t\tif Account.update(id, account)\n\t\t\t\t\trender json: {account: @account}\n\t\t\t\telse\n\t\t\t\t\trender :json => { :message => \"Something went wrong with saving the entity.\"}\n\t\t\t\t\treturn\n\t\t\t\tend\n\t\t\tend", "def update!(**args)\n @account = args[:account] if args.key?(:account)\n @kind = args[:kind] if args.key?(:kind)\n @profile = args[:profile] if args.key?(:profile)\n @webproperty = args[:webproperty] if args.key?(:webproperty)\n end", "def update\n respond_to do |format|\n if @analytic_account.update(analytic_account_params)\n format.html { redirect_to @analytic_account, notice: 'Analytic account was successfully updated.' }\n format.json { render :show, status: :ok, location: @analytic_account }\n else\n format.html { render :edit }\n format.json { render json: @analytic_account.errors, status: :unprocessable_entity }\n end\n end\n end", "def update!(**args)\n @account = args[:account] if args.key?(:account)\n @id = args[:id] if args.key?(:id)\n @kind = args[:kind] if args.key?(:kind)\n @profile = args[:profile] if args.key?(:profile)\n @redirect_uri = args[:redirect_uri] if args.key?(:redirect_uri)\n @webproperty = args[:webproperty] if args.key?(:webproperty)\n end", "def update_account_info\n Validation.validateParameters(params)\n begin\n customer_id = @customer_id\n result = ChargeBee::Customer.update(customer_id, {:first_name => params['first_name'],\n :last_name => params['last_name'],\n :email => params['email'],\n :company => params['company'],\n :phone => params['phone']\n }) \n render json: {\n :forward => \"/ssp/subscription\"\n }\n rescue ChargeBee::InvalidRequestError => e\n ErrorHandler.handle_invalid_request_errors(e, self)\n rescue Exception => e\n ErrorHandler.handle_general_errors(e, self)\n end\n end", "def set_account\n @account = Account.find(params[:id])\n end" ]
[ "0.78936625", "0.73299444", "0.7187067", "0.70923245", "0.7070606", "0.69922864", "0.69412386", "0.69100744", "0.6853872", "0.68528765", "0.68528765", "0.6838077", "0.68362284", "0.6815558", "0.680591", "0.68021846", "0.6765862", "0.6764676", "0.6737823", "0.6731071", "0.67164165", "0.67140317", "0.6708418", "0.668228", "0.6679683", "0.6679542", "0.6673154", "0.66695255", "0.66695255", "0.66695255", "0.66695255", "0.66695255", "0.66695255", "0.66695255", "0.66695255", "0.66695255", "0.66695255", "0.66695255", "0.66695255", "0.66695255", "0.66695255", "0.66695255", "0.66695255", "0.66695255", "0.6654356", "0.6645543", "0.6640655", "0.6635535", "0.6621052", "0.66000587", "0.65989524", "0.6588802", "0.65866566", "0.65860367", "0.65824026", "0.6574069", "0.65732515", "0.6573206", "0.6573195", "0.6571562", "0.65664", "0.6557148", "0.6545748", "0.6544465", "0.6539861", "0.6538675", "0.6535887", "0.6530883", "0.653022", "0.6529081", "0.651944", "0.6516721", "0.6508272", "0.65057844", "0.6503617", "0.6503617", "0.6503617", "0.6503617", "0.6503617", "0.6503617", "0.6503617", "0.6490921", "0.64795655", "0.6477681", "0.64767057", "0.64707065", "0.6469583", "0.64565855", "0.64512026", "0.6446969", "0.6444778", "0.6440677", "0.64393216", "0.64372164", "0.6437199", "0.64360774", "0.64356315", "0.64309984", "0.6430913", "0.6427477" ]
0.70518905
5
Managing a store Home page for a signed in admin: 'Dashboard'.
def dashboard # redirection to root_path if not logged into an admin account. if @current_admin == nil || @current_admin.empty? redirect_to root_path else @products = Product.all # See private function at the bottom. Refactored to be more DRY.. store_options_for_select @selected_store_products = @selected_store.products @staff_members = @selected_store.admins.map { |s| "#{s.first_name} #{s.last_name}" } @price_array = [] @hidden_items_array = [] @inventory_array = [] @gross_array = [] # Store products analytics @selected_store.products.each do |product| if product.quantity && product.price sum = product.quantity * product.price @gross_array << sum @price_array << product.price @inventory_array << product.quantity end if product.visibility && product.visibility == false @hidden_items_array << product end end unless @gross_array.blank? @gross_array.inject do |sum, x| @gross_income = sum + x end end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def admin_index\n return if !user_is_allowed('pages', 'view') \n @domain = Domain.where(:domain => request.host_with_port).first\n @home_page = @domain ? Page.index_page(@domain.site_id) : nil\n if @domain && @home_page.nil?\n @home_page = Caboose::Page.create(:site_id => @domain.site_id, :parent_id => -1, :title => 'Home')\n end\n render :layout => 'caboose/admin' \n end", "def home\n if current_user\n redirect_to '/dashboard'\n else\n render 'home'\n end\n end", "def dashboard_home\n service_response = SimpleTokenApi::Request::User.new(host_url_with_protocol, request.cookies, {\"User-Agent\" => http_user_agent}).profile_detail\n\n # Check if error present or not?\n unless service_response.success?\n error_extra_info = service_response['error_extra_info'] || {}\n user_token_sale_state = error_extra_info['user_token_sale_state']\n\n if user_token_sale_state.present? && user_token_sale_state != GlobalConstant::TokenSaleUserState.profile_page\n # extra_param = params[:t].present? ? \"?e_t=1\" : \"\"\n redirect_if_step_not_reachable(user_token_sale_state, GlobalConstant::TokenSaleUserState.profile_page_allowed_states)\n return if has_performed?\n else\n render_error_response(service_response)\n end\n\n return\n end\n\n @presenter_obj = ::Web::Client::Profile.new(service_response, params)\n set_page_meta_info(@presenter_obj.custom_meta_tags)\n end", "def home\n if user_signed_in?\n current_user.update_attribute(:login_status, true) if current_user # update login status for logged in user\n if !current_user.is_super_admin? && current_user.sign_in_count == 1 && current_user.created_by != 0 #User records which are created by super admin or dept admin has to change the password while they are logged in first time\n redirect_to :controller => \"registrations\", :action => \"privacy_setting\"\n else\n redirect_to :controller => \"dashboard\", :action => \"index\"\n end\n else\n redirect_to new_user_session_path\n end\n end", "def admin_home\n redirect_to(:action => 'index')\n end", "def index\n if !user_signed_in?\n redirect_to home_path\n elsif current_user.admin?\n redirect_to admin_portal_path\n else\n redirect_to portal_path\n end\n end", "def redirect_to_home\n redirect_to session[:superadmin_mode] ? ubiquo.superadmin_home_path : ubiquo.home_path\n end", "def home\r\n\r\n redirect_to(@current_user.home_page)\r\n end", "def home #uses default admin layout\n end", "def user_dashboard\n if @current_user.superadmin_role\n path = auth_hub.index_superadmin_url\n elsif @current_user.admin_role\n path = auth_hub.index_admin_url\n elsif @current_user.admin_servizi\n path = auth_hub.index_admin_url\n end\n redirect_to path unless path.blank?\n end", "def home\n if user_signed_in?\n\n if current_user.connection_valid?\n @favorites = current_user.favorites\n @queries = current_user.queries\n @keytech_username = current_user.keytech_username\n # Load Dashboard async in javascript\n render 'home'\n else\n render 'invalid_login'\n end\n else\n\n # User is not signed in in platform\n render 'landing_page'\n end\n end", "def index\n render ('index') #render the admin_helpers view. The main page controller calls this action after the admin_helpers logs in\n end", "def admin\n self.sitemap\n self.follow('dtime:dashboard:admin')\n self.get\n self\n end", "def admin_nav\n items = {'Home' => admin_root_path, \n 'Users' => admin_users_path,\n 'Submissions' => admin_submissions_path}\n output_nav(items)\n end", "def ensure_admin_user\n redirect_to dashboard_index_path unless is_admin?\n end", "def dashboard\n @title = 'Admin'\n render 'admin/dashboard', :layout => 'layouts/main', :cachable => true\n end", "def home_page\n if current_user.guest?\n user_signup_url\n elsif current_user.administrator?\n admin_films_path\n elsif current_user.judge?\n judging_categories_path\n else\n obejct_url current_user\n end\n end", "def index\n if current_user\n if !request.referer || (request.env['HTTP_REFERER'] && !(URI(request.env['HTTP_REFERER']).path == dashboard_path))\n redirect_to :controller => 'dashboards', :action => 'show'\n end\n end\n @title = 'Home'\n end", "def admin\n unless current_user.admin?\n #if logged user is not admin display error message and redirect to application INDEX (store_path)\n flash[:error] = \"Authorisation is required to access this content.\"\n redirect_to store_path\n end\n end", "def home_dashboard\n\n endpoint = '/api/dashboards/home'\n\n @logger.debug(\"Attempting to get home dashboard (GET #{endpoint})\") if @debug\n\n get(endpoint)\n end", "def show\n if !session[:is_admin]\n redirect_to root_url\n end\n end", "def admin_store\n\t\t@store = current_store\n\t\t@stores = Store.all.page(params[:page]).per(10)\n\t\trender :layout => 'store_application'\n\tend", "def go_to_admin_page\n user_name.click\n admin_link.click\n end", "def set_admin_home\n @admin_home = Admin::Home.find(params[:id])\n end", "def set_admin_home\n @admin_home = Admin::Home.find(params[:id])\n end", "def set_admin_home\n @admin_home = Card::Home.find(params[:id])\n end", "def home\n if logged_in?\n redirect_to user_path(current_user) # Redirect to user show page if logged in\n else\n render layout: 'welcome' # Render home view with welcome layout\n end\n end", "def admin\n\t\tif !session[:admin]\n\t\t\tredirect_to root_url\n\t\tend\n\tend", "def admin_user\n redirect_to(root_path) unless is_admin?\n end", "def home_check\n if logged_in? && @current_user.admin?\n puts @current_user\n redirect_to account_url(@current_user.id)\n return\n elsif logged_in?\n redirect_to cases_path\n else\n redirect_to page_path(\"home\")\n return\n end\n end", "def manager_home\n redirect_to(:action => 'index')\n end", "def set_dashboard\n end", "def logged_in_admin\n unless admin?\n # show a flash message on the page\n flash[:danger] = \"Non sei autorizzato a visitare questa pagina\"\n # redirect user to home page\n redirect_to root_path\n end\n end", "def index\n # @admins = Admin.all\n # \n # respond_to do |format|\n # format.html # index.html.erb\n # format.xml { render :xml => @admins }\n # end\n @admin_session = AdminSession.new\n if current_admin\n redirect_to dashboards_url\n end\n end", "def administrate!\n\tif current_user.admin == false \n\t\tredirect \"/home\"\n\tend\nend", "def home_path\n if user_signed_in?\n if admin_signed_in?\n admins_posts_path\n else #If Alum or friend\n opportunities_path\n end\n else #If not logged in\n root_path\n end\n end", "def admin_user\n unless logged_in? && current_user.admin?\n redirect_to root_url\n end\n end", "def admin_user\n unless signed_in?\n store_location\n redirect_to signin_url, notice: \"Please sign in.\" \n return\n end\n\n unless current_user.admin?\n redirect_to boards_path, notice: \"You do not have permission for that.\" \n return\n end\n end", "def dashboard\n end", "def dashboard\n end", "def dashboard\n end", "def dashboard\n end", "def dashboard\n end", "def home_page\n return root_path unless current_user\n return admins_path if current_user.has_role? :super_admin\n return room_path(current_user.main_room) if current_user.role&.get_permission(\"can_create_rooms\") && current_user.main_room.present?\n cant_create_rooms_path\n end", "def landing\n redirect_to dashboard_path if current_user\n end", "def admin_user\n redirect_to(admin_page_url) if current_user.admin?\n end", "def index\r\n # State machine stuff\r\n current_user.came_home\r\n eval current_user.redirect, binding()\r\n return\r\n end", "def home\n redirect_to user_path(current_user)\n end", "def admin_signin_status\n unless user_signed_in? && is_admin?\n redirect_to root_url\n end\n end", "def pcb_admin_home\n redirect_to(:action => 'index')\n end", "def admin_user\n redirect_to(news_index_path) unless is_admin?\n end", "def set_admin_dashboard\n @admin_dashboard = Admin::Dashboard.find(params[:id])\n end", "def admin_agent\n redirect_to(root_url) unless current_agent.admin?\n end", "def admin_user\n redirect_to(root_url) unless current_user.admin? # se current_user.admin for falso redireciona para pagina principal\n end", "def dashboard\n\n end", "def dashboard\n\n end", "def dashboard\r\n end", "def dashboard_admin\n @user = current_user\n @users = User.all\n @grid = AdminsGrid.new(params[:admins_grid]) do |scope|\n scope.page(params[:page]).per(10)\n end\n @index_path = request.path\n end", "def home\n # check if signed_in and then display (you are currently signed in, view your profile)\n # automatically renders page 'users/home'\n if signed_in?\n redirect_to current_user\n else\n @event = Event.find(ENV['demopage'].to_i)\n @event_code = @event.event_code\n @url = demo_record_vg_url(:event_code => @event.event_code)\n\n\n @first_plan = Plan.find_by_my_plan_id(plan_set_one) # sets @first_plan the first plan object ACCORDING TO MY LEGEND (with my_plan_id)\n @second_plan = Plan.find_by_my_plan_id(plan_set_two)\n @third_plan = Plan.find_by_my_plan_id(plan_set_three)\n render :unsigned_home, :layout => nil\n end\n\n end", "def home\n\t\t# Home Page\n\tend", "def after_sign_in_path_for(user)\n user.is_admin? ? admin_dashboard_path : root_path\n end", "def admin_user\n redirect_to(admin_page_url) if current_user.admin?\n end", "def admin_user\n\t\t\tredirect_to(root_url) unless current_user.admin?\n\t end", "def admin_user\n if logged_in?\n redirect_to(root_url) unless current_user.admin?\n else\n flash[:danger] = \"You reached an invalid url and have been redirected to the home page.\"\n redirect_to(root_url)\n end\n end", "def set_home\n end", "def logged_in_admin\n unless logged_in?\n store_location\n flash[:danger] = \"Please log in.\"\n redirect_to admin_login_path\n end\n end", "def admin_user\n\t\t\tredirect_to(root_url) unless current_user.admin?\n\t\tend", "def admin_user\n\t\t\tredirect_to(root_url) unless current_user.admin?\n\t\tend", "def home\n render 'users/homepage'\n end", "def admin_user\n redirect_to(root_url) unless user_signed_in? && current_user.admin?\n end", "def admin_user\n redirect_to(root_url) unless user_signed_in? && current_user.admin?\n end", "def admin_user\n unless admin_user?\n redirect_to login_url\n end\n end", "def landing_page\n if current_user\n redirect_to actions_path(current_user[:id])\n end\n end", "def admin_user\n redirect_to(root_url) unless current_user.is_admin?\n end", "def admin_user\n redirect_to(root_url) unless current_user.is_admin?\n end", "def admin_user\n redirect_to(root_url) unless current_user.is_admin?\n end", "def admin_user\n redirect_to(root_url) unless current_user.is_admin?\n end", "def index\n if user_signed_in?\n redirect_to dashboard_path\n else\n render :index\n #redirect_to new_user_session_url(subdomain: ENV[\"SUBDOMAIN\"])\n end\n end", "def redirect_to_home_if_user_requests_admin\n if params[:controller].scan(/^rails_admin/).length > 0\n unless user_signed_in? && current_user.role == 'admin' \n redirect_to \"/#{I18n.locale}\"\n end\n end\n end", "def index\n if session[:admin_id] != nil\n redirect_to admin_path(Admin.find(session[:admin_id]))\n else\n @admin = Admin.new\n end\n end", "def home_page\r\nif self.has_profile?('Admin', 'Faveod User')\r\n {:controller => :system_settings}\r\nelse\r\n {:controller => :users, :action => :show, :id => self.id}\r\nend\r\nend", "def dashboard\n 'dashboard'\n end", "def admin_user\n \t\tredirect_to(root_url) unless current_user.admin?\n \tend", "def set_admin_landing_page\n @landing_page = LandingPage.find(params[:id])\n end", "def admin_user\n redirect_to root_url unless current_user.admin?\n end", "def admin_user\n redirect_to root_url unless current_user.admin?\n end", "def admin_actions\n unless @current_admin.is_super_admin\n flash[:error]=\"You are not authorized to navigate to this page \"\n redirect_to admin_index_path\n return\n end\n end", "def index\n if store\n if super_user?\n redirect_to edit_store_path(store)\n else\n if store.current_menu\n redirect_to menu_path(store.current_menu)\n else\n redirect_to menus_path, :alert => \"#{store.name} is currently closed.\"\n end\n end\n else\n redirect_to new_store_path\n end\n end", "def admin_user\n\t\tredirect_to(root_url) unless current_user.admin?\n\tend", "def admin_user\n\t\tredirect_to(root_url) unless current_user.admin?\n\tend", "def admin_user\n\t\tredirect_to(root_url) unless current_user.admin?\n\tend", "def admin\n if user_signed_in?\n \t redirect_to members_url\n \telse\n \t redirect_to new_user_session_path\n \t end\n \n end", "def dashboard\n\tend", "def show\n @main_page = \"Admin\"\n @page_title = \"Configuração\"\n end", "def dashboard; end", "def admin_user\n redirect_to(root_url) unless current_user.is_admin?\n end", "def admin_user\n redirect_to(root_url) unless current_user.is_admin?\n end", "def admin_user\n redirect_to(root_url) unless current_user.admin?\n end", "def admin_user\n redirect_to(root_url) unless current_user.admin?\n end", "def admin_user\n redirect_to(root_url) unless current_user.admin?\n end", "def admin_user\n redirect_to(root_url) unless current_user.admin?\n end" ]
[ "0.7092553", "0.7062843", "0.7044631", "0.69345635", "0.6927658", "0.68333244", "0.682641", "0.6808186", "0.67994493", "0.6759342", "0.6737024", "0.6721251", "0.6655804", "0.6628852", "0.66228056", "0.6607425", "0.6605968", "0.6604882", "0.65962106", "0.65921307", "0.653976", "0.6535581", "0.65017885", "0.64970493", "0.64970493", "0.6496091", "0.64959514", "0.6494358", "0.6482601", "0.6463062", "0.64569587", "0.64547384", "0.645322", "0.6450476", "0.6450079", "0.64451456", "0.64396274", "0.6427021", "0.6419886", "0.6419886", "0.6419886", "0.6419886", "0.6419886", "0.6419064", "0.6411656", "0.6411007", "0.64103025", "0.6406735", "0.6405938", "0.6404449", "0.64028186", "0.6395058", "0.63881075", "0.6378741", "0.63708377", "0.63708377", "0.6366287", "0.63540196", "0.6348343", "0.63455826", "0.63380873", "0.63362896", "0.63308835", "0.63278234", "0.63260794", "0.6325465", "0.6317829", "0.6317829", "0.6315531", "0.63138646", "0.63138646", "0.63074297", "0.62895054", "0.6283594", "0.6283594", "0.6283594", "0.6283594", "0.6281829", "0.627691", "0.62745446", "0.62731504", "0.62695754", "0.62689376", "0.6264696", "0.6262993", "0.6262993", "0.6256054", "0.62559265", "0.6250635", "0.6250635", "0.6250635", "0.6248396", "0.62460464", "0.6239713", "0.6237219", "0.6234282", "0.6234282", "0.62311184", "0.6230507", "0.6230507", "0.6230507" ]
0.0
-1
Create a new campaign. Requires a name, templateId and at least one listId
def create(name, template_id, list_ids = [], attrs = {}) body = attrs.merge(name: name, templateId: template_id, listIds: list_ids) Iterable.request(conf, '/campaigns/create').post(body) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mccreate_campaign_template(type, opts)\n # setup_mcapi.campaigns.create(\"regular\", {list_id: \"a38ec3df9c\", subject: \"Gibbon is cool\", from_email: \"[email protected]\", from_name: \"Darth Vader\", template_id: 198041, generate_text: true}, {})\n setup_mcapi.campaigns.create(type, opts, content = nil, segment_opts = nil, type_opts = nil) \n end", "def create(name, id=nil)\n campaign_opts = {name: name}\n campaign_opts = campaign_opts.merge({id: id}) unless id.nil?\n Mailgun.submit(:post, campaign_url, campaign_opts)\n end", "def mccreate_campaign(type, opts, content)\n # create(type, options, content, segment_opts = nil, type_opts = nil) \n # setup_mcapi.campaigns.create(\"regular\", {list_id: \"a38ec3df9c\", subject: \"Gibbon is cool\", from_email: \"[email protected]\", from_name: \"Darth Vader\", generate_text: true}, {template_id: \"<html><head></head><body><h1>Foo</h1><p>Bar</p></body></html>\"})\n \n begin\n campaign = setup_mcapi.campaigns.create(type, {list_id: opts[\"list_id\"], subject: opts[\"subject\"], from_email: opts[\"from_email\"], from_name: opts[\"from_name\"], generate_text: true}, {html: content}) \n error = {'error' => true, 'campaign' => campaign[\"id\"]}\n rescue Mailchimp::Error => e\n puts \"A Mailchimp error occurred\"\n error = {'error' => false, 'campaign' => nil}\n end \n return error\n end", "def create\n authenticate_user!\n authorize! :index, @user, :message => 'Not authorized as an administrator.'\n \n @campaign = Campaign.new(params[:campaign])\n\n respond_to do |format|\n if @campaign.save\n format.html { redirect_to @campaign, notice: 'Campaign was successfully created.' }\n format.json { render json: @campaign, status: :created, location: @campaign }\n else\n format.html { render action: \"new\" }\n format.json { render json: @campaign.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_campaign(opts = {})\n data, _status_code, _headers = create_campaign_with_http_info(opts)\n return data\n end", "def create\n @campaign = Campaign.new(campaign_params)\n\n respond_to do |format|\n if @campaign.save\n format.html { redirect_to @campaign, notice: 'Campaign was successfully created.' }\n format.json { render :show, status: :created, location: @campaign }\n else\n format.html { render :new }\n format.json { render json: @campaign.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @campaign = Campaign.new(campaign_params)\n\n respond_to do |format|\n if @campaign.save\n format.html { redirect_to @campaign, notice: 'Campaign was successfully created.' }\n format.json { render :show, status: :created, location: @campaign }\n else\n format.html { render :new }\n format.json { render json: @campaign.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @campaign = Campaign.new(params[:campaign])\n\n respond_to do |format|\n if @campaign.save\n format.html { redirect_to @campaign, notice: 'Campaign was successfully created.' }\n format.json { render json: @campaign, status: :created, location: @campaign }\n else\n format.html { render action: \"new\" }\n format.json { render json: @campaign.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n return false unless self.valid? \n \n # set defaults for budget for campaign.create only\n self.budget = budget.reverse_merge( period: 'DAILY', delivery_method: 'STANDARD' )\n\n # create basic campaign attributes\n operand = Hash[\n [ :name, :status, :start_date, :end_date,\n :budget, :bidding_strategy, :network_setting, :settings ].map do |k|\n [ k.to_sym, self.send(k) ] if self.send(k)\n end.compact\n ]\n\n # set default values for settings (for create only - should we set it also for update?)\n # PS: KeywordMatchSetting is required since 201206\n operand[:settings] ||= []\n unless operand[:settings].map { |s| s[:xsi_type] }.include?('KeywordMatchSetting')\n operand[:settings] << { :xsi_type => 'KeywordMatchSetting', :opt_in => false }\n end\n\n response = self.mutate( \n operator: 'ADD', \n operand: operand\n )\n\n check_for_errors(self)\n\n self.id = response[:value].first[:id] rescue nil\n \n if criteria && criteria.size > 0\n new_criteria = Adapi::CampaignCriterion.create(\n campaign_id: @id,\n criteria: criteria\n )\n\n check_for_errors(new_criteria)\n end\n\n ad_groups.each do |ad_group_data|\n ad_group = Adapi::AdGroup.create(\n ad_group_data.merge( campaign_id: @id )\n )\n\n check_for_errors(ad_group, :prefix => \"AdGroup \\\"#{ad_group[:id] || ad_group[:name]}\\\"\")\n end\n\n self.errors.empty?\n\n rescue CampaignError => e\n false\n end", "def create\n @campaign = Campaign.new(params[:campaign])\n\n respond_to do |format|\n if @campaign.save\n format.html { redirect_to '/' + @campaign.path, notice: 'Campaign was successfully created.' }\n format.json { render json: @campaign, status: :created, location: @campaign }\n else\n format.html { render action: \"new\" }\n format.json { render json: @campaign.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @campaign = Campaign.new(params[:campaign])\n\n respond_to do |format|\n if @campaign.save\n format.html { redirect_to @campaign, :notice => 'Campaign was successfully created.' }\n format.json { render :json => @campaign, :status => :created, :location => @campaign }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @campaign.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @campaign = Campaign.create!(name: params[:name], dm_id: params[:dm_id], theme: params[:theme], setting: params[:setting], calendar_id: params[:calendar_id], starting_level: params[:starting_level], skillset_id: params[:skillset_id], custom_notes: params[:custom_notes], starting_month: params[:month], starting_weekday: params[:weekday], starting_day: params[:day], starting_year: params[:year], starting_age: params[:age], current_month: params[:month], current_weekday: params[:weekday], current_day: params[:day], current_year: params[:year], current_age: params[:age])\n\n params[:race_ids].each do |ri|\n CampaignRace.create!(campaign_id: @campaign.id, race_id: ri)\n end\n\n params[:klass_ids].each do |ki|\n CampaignKlass.create!(campaign_id: @campaign.id, klass_id: ki)\n end\n\n render json: @campaign, status: 201\n\n end", "def create_campaign_from_template(application_id, body, opts = {})\n data, _status_code, _headers = create_campaign_from_template_with_http_info(application_id, body, opts)\n data\n end", "def create\n @campaign = Campaign.new(create_campaign_params)\n @campaign.start_date = DateTime.now\n @campaign.user = get_current_user\n if @campaign.save\n render json: @campaign, status: :created\n else\n render json: @campaign.errors, status: :unprocessable_entity\n end\n end", "def create\n @campaign = Campaign.new(campaign_params)\n @campaign.users << current_user\n\n respond_to do |format|\n if @campaign.save\n format.html { redirect_to tenant_campaign_path(@campaign, tenant_id: @tenant.id), notice: 'Campaign was successfully created.' }\n else\n format.html { render :new }\n end\n end\n end", "def create\n @campaign = Campaign.new(campaign_params)\n\n begin\n ActiveRecord::Base.transaction do\n # 両方の Validation を実行しておく\n campaign_invalid = @campaign.invalid?\n\n if campaign_invalid\n render :new\n return\n end\n\n @campaign.save!\n\n # 対応するオファーを作成\n @campaign.update_related_offers\n\n redirect_to :action => 'index', notice: 'Campaign was successfully created.'\n end\n rescue => e\n logger.error e\n render :new\n end\n end", "def create\n @vkontakte_campaign = VkontakteCampaign.new(params[:vkontakte_campaign])\n\n respond_to do |format|\n if @vkontakte_campaign.save\n format.html { redirect_to @vkontakte_campaign, notice: 'Vkontakte campaign was successfully created.' }\n format.json { render json: @vkontakte_campaign, status: :created, location: @vkontakte_campaign }\n else\n format.html { render action: \"new\" }\n format.json { render json: @vkontakte_campaign.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_campaign_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: CampaignsApi.create_campaign ...\"\n end\n # resource path\n local_var_path = \"/campaigns\"\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(opts[:'campaign_resource'])\n auth_names = ['oauth2_client_credentials_grant', 'oauth2_password_grant']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'CampaignResource')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: CampaignsApi#create_campaign\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def create\n @campaign = current_user.campaigns.new(params[:campaign])\n\n respond_to do |format|\n if @campaign.save\n format.html { redirect_to(@campaign, :notice => 'Campaign was successfully created.') }\n format.xml { render :xml => @campaign, :status => :created, :location => @campaign }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @campaign.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @campaign = Campaign.new(campaign_params)\n\n if @campaign.save\n render json: @campaign\n else\n render json: @campaign.errors, status: :unprocessable_entity\n end\n end", "def create\n @admins_campaign = Admins::Campaign.new(admins_campaign_params)\n respond_to do |format|\n if @admins_campaign.save\n format.html { redirect_to @admins_campaign, notice: 'Campaign was successfully created.' }\n format.json { render :show, status: :created, location: @admins_campaign }\n else\n format.html { render :new }\n format.json { render json: @admins_campaign.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_campaign_template_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: CampaignsApi.create_campaign_template ...\"\n end\n # resource path\n local_var_path = \"/campaigns/templates\"\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(opts[:'campaign_template_resource'])\n auth_names = ['oauth2_client_credentials_grant', 'oauth2_password_grant']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'TemplateResource')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: CampaignsApi#create_campaign_template\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def create\n @campaign = Campaign.new(campaign_params)\n @campaign.user_id = current_user[:id]\n\n respond_to do |format|\n if @campaign.save\n puts \"New campaign created: #{@campaign.inspect}\" \n\n if Rails.env.production?\n tracker = Mixpanel::Tracker.new(Generic.get_mixpanel_key)\n tracker.track(@campaign.user_id, 'Campaign Created')\n end \n\n format.html {\n redirect_to new_campaign_message_path(@campaign)\n }\n format.json { render :show, status: :created, location: @campaign }\n else\n format.html { render :new }\n format.json { render json: @campaign.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n\n Campsite.create(\n name: params[:name],\n location: params[:location],\n campsite_type: params[:campsite_type],\n max_people: params[:max_people],\n per_night: params[:per_night]\n )\n\n # CREATE has no template of its own:\n # redirect to index page\n redirect_to( campsites_path )\n\n end", "def launch_campaign\n campaign_info = platform_campaign.create\n if campaign_info.present?\n self.dsp_campaign_id = campaign_info[:id]\n end\n campaign_info\n end", "def valid_post\n post :create, params: { campaign: FactoryGirl.attributes_for(:campaign) }\n end", "def create_campaign_template(opts = {})\n data, _status_code, _headers = create_campaign_template_with_http_info(opts)\n return data\n end", "def create\n @platform_campaign = PlatformCampaign.new(platform_campaign_params)\n\n respond_to do |format|\n if @platform_campaign.save\n format.html { redirect_to @platform_campaign, notice: 'Platform campaign was successfully created.' }\n format.json { render :show, status: :created, location: @platform_campaign }\n else\n format.html { render :new }\n format.json { render json: @platform_campaign.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_campaign_from_template_with_http_info(application_id, body, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: ManagementApi.create_campaign_from_template ...'\n end\n # verify the required parameter 'application_id' is set\n if @api_client.config.client_side_validation && application_id.nil?\n fail ArgumentError, \"Missing the required parameter 'application_id' when calling ManagementApi.create_campaign_from_template\"\n end\n # verify the required parameter 'body' is set\n if @api_client.config.client_side_validation && body.nil?\n fail ArgumentError, \"Missing the required parameter 'body' when calling ManagementApi.create_campaign_from_template\"\n end\n # resource path\n local_var_path = '/v1/applications/{applicationId}/create_campaign_from_template'.sub('{' + 'applicationId' + '}', CGI.escape(application_id.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:body] || @api_client.object_to_http_body(body) \n\n # return_type\n return_type = opts[:return_type] || 'CreateTemplateCampaignResponse' \n\n # auth_names\n auth_names = opts[:auth_names] || ['management_key', 'manager_auth']\n\n new_options = opts.merge(\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ManagementApi#create_campaign_from_template\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def create\n @campaign = Campaign.new(params[:campaign])\n @campaign.build_campaign_stat\n @campaign.campaign_stats_custom.build\n respond_to do |format|\n if @campaign.save\n \n format.html { redirect_to @campaign, notice: 'Campaign was successfully created and sent for review.' }\n format.json { render json: @campaign, status: :created, location: @campaign }\n else\n format.html { render action: \"new\" }\n format.json { render json: @campaign.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n\n @template = Template.find(params[:template_id])\n @template_parameter = @template.template_parameters.create(template_parameter_params)\n redirect_to template_path(@template)\n #@template_parameter = TemplateParameter.new(template_parameter_params)\n end", "def create\n @campaign = Campaign.find(params[:campaign_id])\n @project = @campaign.projects.create(params[:project])\n respond_to do |format|\n if @project.update_attributes(params[:project])\n format.html { redirect_to campaign_path(@campaign), notice: 'Project was successfully created.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @project.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @advertisement_campaign = AdvertisementCampaign.new(advertisement_campaign_params)\n\n respond_to do |format|\n if @advertisement_campaign.save\n format.html { redirect_to @advertisement_campaign, notice: 'Advertisement campaign was successfully created.' }\n format.json { render action: 'show', status: :created, location: @advertisement_campaign }\n else\n format.html { render action: 'new' }\n format.json { render json: @advertisement_campaign.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @user_campaign = UserCampaign.new(user_campaign_params)\n\n respond_to do |format|\n if @user_campaign.save\n format.html { redirect_to @user_campaign, notice: 'User campaign was successfully created.' }\n format.json { render :show, status: :created, location: @user_campaign }\n else\n format.html { render :new }\n format.json { render json: @user_campaign.errors, status: :unprocessable_entity }\n end\n end\n end", "def create name, template_group\n raise ArgumentError, \"Fleet #{name} already exists\" if @resource.fleet(name).exists?\n raise ArgumentError, \"Group is already in a fleet\" if template_group.fleet\n\n template_group.set_fleet(name, \"template\")\n @resource.fleet(name)\n end", "def create\r\n\t\tif !current_user\r\n\t\t\tredirect_to '/'\r\n\t\t\treturn\r\n\t\tend\r\n\t\tif current_user.additionsThisRound >= Round.first.maxAdditionsPerUser + current_user.abilities.find_by(ability_id: 4).charges\r\n\t\t\trespond_to do |format|\r\n\t\t\t\tmsg = \"<span class=\\\"alert alert-warning\\\">You have exceeded your submission limit for this round.</span>\"\r\n\t\t\t\tformat.html { redirect_to current_user, notice: msg }\r\n\t\t\t\tformat.json { render :show, location: current_user }\r\n\t\t\tend\r\n\t\t\treturn\r\n\t\tend\r\n\r\n\t\t@campaign = Campaign.new(campaign_params)\r\n\r\n\t\tlink = campaign_params[:link]\r\n\t\tprovider = link.sub(\"https://\", \"\").sub(\"http://\", \"\").split(\"/\")[0]\r\n\r\n\t\tcase provider\r\n\t\twhen *Crowdfunding_site.pluck(:domain)\r\n\t\t\tif Campaign.exists?(link: link)\r\n\t\t\t\tnominate(link, provider)\r\n\t\t\telse\r\n\t\t\t\tadd(provider)\r\n\t\t\tend\r\n\t\telse\r\n\t\t\trepond_to do |format|\r\n\t\t\t\tmsg = \"<span class=\\\"alert alert-warning\\\">The site this URL is pointing at is not supported.</span>\"\r\n\t\t\t\tformat.html { redirect_to current_user, notice: msg }\r\n\t\t\t\tformat.json { render :show, location: current_user }\r\n\t\t\tend\r\n\t\tend\r\n\tend", "def create_card(list_id, name = nil, options = {})\n post \"cards\", options.merge(name: name, list_id: resource_id(list_id))\n end", "def create\n @marketing_campaign = MarketingCampaign.new(params[:marketing_campaign])\n \n @marketing_campaign.contact_groups = Array.new\n \n if(params[:contact_groups] != nil)\n\t\t params[:contact_groups].each do |id|\n\t\t \t@marketing_campaign.contact_groups << ContactGroup.find(id[0])\n\t\t\tend\n\t\tend\n\t\t\n\t\t@marketing_campaign.next_status\n\n respond_to do |format|\n if @marketing_campaign.save\n format.html { redirect_to @marketing_campaign, notice: 'Marketing campaign was successfully created.' }\n format.json { render json: @marketing_campaign, status: :created, location: @marketing_campaign }\n else\n format.html { render action: \"new\" }\n format.json { render json: @marketing_campaign.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n authorize Template\n args = template_params\n # Swap in the appropriate visibility enum value for the checkbox value\n args[:visibility] = parse_visibility(args, current_user.org)\n\n # creates a new template with version 0 and new family_id\n @template = Template.new(args)\n @template.org_id = current_user.org.id\n @template.locale = current_org.language.abbreviation\n @template.links = if params['template-links'].present?\n ActiveSupport::JSON.decode(params['template-links'])\n else\n { funder: [], sample_plan: [] }\n end\n if @template.save\n redirect_to edit_org_admin_template_path(@template),\n notice: success_message(@template, _('created'))\n else\n flash.now[:alert] = flash.now[:alert] = failure_message(@template, _('create'))\n render :new\n end\n end", "def create\n @message_template = @company.message_templates.build(params[:message_template])\n\n respond_to do |format|\n if @message_template.save\n format.html { redirect_to [@company,@message_template], notice: 'Message template was successfully created.' }\n format.json { render json: @message_template, status: :created, location: @message_template }\n else\n format.html { render action: \"new\" }\n format.json { render json: @message_template.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @ad_boards_campaign = AdBoardsCampaign.new(params[:ad_boards_campaign])\n\n respond_to do |format|\n if @ad_boards_campaign.save\n flash[:notice] = 'AdBoardsCampaign was successfully created.'\n format.html { redirect_to(@ad_boards_campaign) }\n format.xml { render :xml => @ad_boards_campaign, :status => :created, :location => @ad_boards_campaign }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @ad_boards_campaign.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n restricted\n @template = Template.new(template_params)\n\n respond_to do |format|\n if @template.save\n format.html { redirect_to @template, notice: 'Template was successfully created.' }\n format.json { render :show, status: :created, location: @template }\n else\n format.html { render :new }\n format.json { render json: @template.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_smart_display_campaign(client, customer_id, budget)\n # Creates a campaign operation.\n operation = client.operation.create_resource.campaign do |c|\n c.name = \"Smart Display Campaign #{(Time.new.to_f * 1000).to_i}\"\n # Smart Display campaign requires the advertising_channel_type as 'DISPLAY'.\n c.advertising_channel_type = :DISPLAY\n # Smart Display campaign requires the advertising_channel_sub_type as\n # 'DISPLAY_SMART_CAMPAIGN'.\n c.advertising_channel_sub_type = :DISPLAY_SMART_CAMPAIGN\n # Smart Display campaign requires the TargetCpa bidding strategy.\n c.target_cpa = Google::Ads::GoogleAds::V8::Common::TargetCpa.new\n c.target_cpa.target_cpa_micros = 5_000_000\n c.campaign_budget = budget\n # Optional: Sets the start and end dates for the campaign, beginning one day\n # from now and ending a month from now.\n c.start_date = DateTime.parse((Date.today + 1).to_s).strftime('%Y%m%d')\n c.end_date = DateTime.parse(Date.today.next_month.to_s).strftime('%Y%m%d')\n end\n\n # Issues a mutate request to add the campaign.\n response = client.service.campaign.mutate_campaigns(\n customer_id: customer_id,\n operations: [operation],\n )\n\n # Prints out some information about the created campaign.\n resource_name = response.results.first.resource_name\n puts \"Added a smart display campaign named #{resource_name}\"\n\n resource_name\nend", "def create\n @call_template = CallTemplate.new(call_template_params)\n\n respond_to do |format|\n if @call_template.save\n format.html { redirect_to @call_template, notice: 'Call template was successfully created.' }\n format.json { render :show, status: :created, location: @call_template }\n else\n format.html { render :new }\n format.json { render json: @call_template.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_list(options={})\n database_id = options[:database_id]\n list_name = options[:contact_list_name]\n visibility = options[:visibility]\n\n raise ArgumentError, \":database_id option is required\" unless database_id\n raise ArgumentError, \":list_name option is required\" unless list_name\n raise ArgumentError, \":visibility option is required\" unless visibility\n\n request_body = ''\n xml = Builder::XmlMarkup.new(:target => request_body, :indent => 1)\n xml.instruct!\n xml.Envelope do\n xml.Body do\n xml.CreateContactList do\n xml.DATABASE_ID database_id\n xml.CONTACT_LIST_NAME list_name\n xml.VISIBILITY visibility\n end\n end\n end\n\n doc = send_xml_api_request(request_body)\n result_dom(doc)['CONTACT_LIST_ID']\n end", "def create\n @campaign = Campaign.new(params[:campaign])\n\n respond_to do |format|\n if @campaign.save\n format.html { redirect_to campaigns_path, notice: 'Campaign was successfully created.' }\n format.json { render json: @campaign, status: :created, location: @campaign }\n format.js\n else\n format.html { render action: \"new\" }\n format.json { render json: @campaign.errors, status: :unprocessable_entity }\n format.js\n end\n end\n end", "def create\n @creative = Creative.new(creative_params)\n @creative.campaign = @campaign\n\n respond_to do |format|\n if @creative.save\n format.html { redirect_to [@campaign, @creative], notice: 'Вид рекламы создан.' }\n format.json { render :show, status: :created, location: @creative }\n else\n format.html { render :new }\n format.json { render json: @creative.errors, status: :unprocessable_entity }\n end\n end\n end", "def campaign_params\n params.require(:campaign).permit(:name, :target_group_id)\n end", "def create\n @asset_category_template = AssetCategoryTemplate.new(asset_category_template_params)\n\n respond_to do |format|\n if @asset_category_template.save\n format.html { redirect_to @asset_category_template, notice: 'Asset category template was successfully created.' }\n format.json { render action: 'show', status: :created, location: @asset_category_template }\n else\n format.html { render action: 'new' }\n format.json { render json: @asset_category_template.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @campaign = Campaign.new\n end", "def new\n @campaign = Campaign.new\n end", "def new\n @campaign = Campaign.new\n end", "def create\n @campaign = Campaign.new(params[:campaign])\n @campaign.admin_id = @organization.admin_id \n respond_to do |format|\n if @campaign.save\n flash[:notice] = 'Campaign was successfully created.'\n format.html { redirect_to orgadmin_organization_campaign_url(@organization, @campaign) }\n format.xml { render :xml => @campaign, :status => :created, :location => @campaign }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @campaign.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @bulk_message_template = BulkMessageTemplate.new(params[:bulk_message_template])\n\n respond_to do |format|\n if @bulk_message_template.save\n format.html { redirect_to @bulk_message_template, notice: 'Bulk message template was successfully created.' }\n format.json { render json: @bulk_message_template, status: :created, location: @bulk_message_template }\n else\n format.html { render action: \"new\" }\n format.json { render json: @bulk_message_template.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @voice_campaign = VoiceCampaign.new(voice_campaign_params)\n @voice_campaign.user = current_user\n\n respond_to do |format|\n if @voice_campaign.save\n format.html { redirect_to @voice_campaign, notice: 'Voice campaign was successfully created.' }\n format.json { render :show, status: :created, location: @voice_campaign }\n else\n format.html { render :new }\n format.json { render json: @voice_campaign.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_card(options = {})\n trello = trello_api\n card_options = {\n :name => card_title,\n :list => list_id,\n :description => card_body,\n }.merge(options)\n trello.create_card(card_options)\n end", "def create_contact(project_name, optional={})\n\t\targs = self.class.new_params\n\t\targs[:method] = 'POST'\n\t\targs[:path]['ProjectName'] = project_name\n\t\targs[:pattern] = '/projects/[ProjectName]/contacts'\n\t\targs[:query]['Action'] = 'CreateContact'\n\t\targs[:region] = optional[:_region] if (optional.key? :_region)\n\t\targs[:scheme] = 'http'\n\t\tif optional.key? :contact\n\t\t\targs[:body]['Contact'] = optional[:contact]\n\t\tend\n\t\tself.run(args)\n\tend", "def create\n @campaign_result = CampaignResult.new(params[:campaign_result])\n\n respond_to do |format|\n if @campaign_result.save\n format.html { redirect_to @campaign_result, notice: 'Campaign result was successfully created.' }\n format.json { render json: @campaign_result, status: :created, location: @campaign_result }\n else\n format.html { render action: \"new\" }\n format.json { render json: @campaign_result.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @correspondence_template = CorrespondenceTemplate.new(params[:correspondence_template])\n\n @correspondence_template.department_id = current_user.department_id\n\n respond_to do |format|\n if @correspondence_template.save\n flash[:notice] = 'Correspondence Template was successfully created.'\n format.html { redirect_to(@correspondence_template) }\n format.xml { render :xml => @correspondence_template, :status => :created, :location => @correspondence_template }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @correspondence_template.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n delivery_list = DeliveryList.find(params[:delivery_list_id])\n authorize delivery_list\n authorize @record\n authorize DeliverySubject\n DeliverySubject.create(\n delivery_list_id: delivery_list.id,\n deliverable_type: @record.class.model_name,\n deliverable_id: @record.id\n )\n render 'renew_list_subjects'\n end", "def create_campaign_using_post_with_http_info(campaign_request, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: MarketingApi.create_campaign_using_post ...'\n end\n # verify the required parameter 'campaign_request' is set\n if @api_client.config.client_side_validation && campaign_request.nil?\n fail ArgumentError, \"Missing the required parameter 'campaign_request' when calling MarketingApi.create_campaign_using_post\"\n end\n # resource path\n local_var_path = '/campaign'\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['*/*'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(campaign_request)\n auth_names = ['oauth2']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'Campaign')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: MarketingApi#create_campaign_using_post\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def create_campaign_using_post(campaign_request, opts = {})\n data, _status_code, _headers = create_campaign_using_post_with_http_info(campaign_request, opts)\n data\n end", "def campaign_params\n params.require(:campaign).permit(:title, :description, :campaign_type_id)\n end", "def create\n @campaign_commitee = CampaignCommitee.new(campaign_commitee_params)\n\n respond_to do |format|\n if @campaign_commitee.save\n format.html { redirect_to @campaign_commitee, notice: 'Campaign commitee was successfully created.' }\n format.json { render :show, status: :created, location: @campaign_commitee }\n else\n format.html { render :new }\n format.json { render json: @campaign_commitee.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n sha1 = params[:sha1] || params[:h] || params[:id]\n @campaign = Campaign.cache_it.find(:sha1 => sha1, :archived => false) if sha1\n if @campaign.match_time_zone_flag\n puts(\"request#{request.inspect}\")\n session[:req] = request.fullpath.to_s\n session[:ref] = request.referrer || ''\n @request = request\n else\n req = request.fullpath.to_s\n ref = request.referrer || ''\n lp = Hit.select_lp_from_request(request, @campaign,params[:time_zone],req,ref)\n respond_to do |format|\n format.html { return redirect_to redirection_url(lp) }\n format.js { return render :inline => (lp == :real_lp ? \"top.location.replace('#{redirection_url(lp)}')\" : \"\") }\n end\n return render :text => nil, :layout => false unless @campaign\n end\n end", "def add_to_campaign(contact_id, campaign_id, additional_fields = {})\n cm = additional_fields\n cm['CampaignId'] = campaign_id\n\n if cm['CampaignId']\n cm['ContactId'] = contact_id\n\n begin\n cm = client.create('CampaignMember', cm)\n Rails.cache.delete(\"salesforce/user_campaigns/#{contact_id}\")\n return cm['Id']\n rescue Databasedotcom::SalesForceError => e\n # If this failure happens, it is almost certainly just because they\n # are already in the campaign \n # logger.debug \"#{e} #{contact_id} #{campaign_id}\"\n end\n end\n\n nil\n end", "def campaign_params\n params.require(:campaign).permit(:name)\n end", "def campaign_params\n params.require(:campaign).permit(:name)\n end", "def campaign_params\n params.require(:campaign).permit(:name)\n end", "def campaign_params\n params.require(:campaign).permit(:name)\n end", "def create_card\n trello = trello_api\n card_options = {\n :name => card_title,\n :list => list_id,\n :description => card_body,\n }\n trello.create_card(card_options)\n end", "def create\n return nil if created?\n request :create_creative_on_bidstalk do\n client = Bidstalk::Creative::Client.new\n client.create to_platform_creative!\n end\n end", "def create\n @template = Template.new(template_params)\n\n respond_to do |format|\n if @template.save\n format.html {\n redirect_to @template,\n notice: \"Template #{@template.template_name} was successfully created.\"\n }\n format.json {\n render :show, status: :created, location: @template\n }\n else\n format.html { render :new }\n format.json { render json: @template.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n # 添加了活动\n\t@campaign = Campaign.find(params[:campaign_id])\n\n\n # 防止重复提交\n\t@apply_record = Apply.where(\"campaign_id = #{@campaign.id} and mobile = #{params[:apply][:mobile]}\")\n\tif not @apply_record.empty?\n flash[:message] = \"您已经成功预约了参与此次讲座活动:#{@campaign.name}\"\n redirect_to bm_xmjz_success_path\n return\n else\n @apply = @campaign.applies.create(params[:apply])\n\t if @apply.save\n flash[:message] = \"您已经成功预约了参与此次讲座活动:#{@campaign.name}\"\n redirect_to bm_xmjz_success_path\n return\n else\n flash.now[:error] = \"您填写的信息有误,请重新填写\"\n render :action => \"new\"\n return\n end\n end\n\n\n\n end", "def campaign_params\n params.require(:campaign).permit(:title, :description, :tenant_id)\n end", "def create_sms_campaign(create_sms_campaign, opts = {})\n data, _status_code, _headers = create_sms_campaign_with_http_info(create_sms_campaign, opts)\n data\n end", "def create\n @survey_template = @organization.survey_templates.build(survey_template_params)\n authorize @survey_template\n\n respond_to do |format|\n if @survey_template.save\n format.html { redirect_to @survey_template, notice: 'Template was successfully created.' }\n format.json { render :show, status: :created, location: @survey_template }\n else\n @record = [@organization, @survey_template]\n format.html { render :new }\n format.json { render json: @survey_template.errors, status: :unprocessable_entity }\n end\n end\n end", "def create(options={})\n title = options[:title]\n summary = options[:summary]\n template = options[:template]\n token = options[:token]\n\n GoodData::Project.create(:title => title, :summary => summary, :template => template, :auth_token => token)\n end", "def create\n @coach_template = CoachTemplate.new(params[:coach_template])\n\n respond_to do |format|\n if @coach_template.save\n flash[:notice] = 'CoachTemplate was successfully created.'\n format.html { redirect_to(@coach_template) }\n format.xml { render :xml => @coach_template, :status => :created, :location => @coach_template }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @coach_template.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @template = Template.new(template_params)\n if @template.save\n respond_ok \"template\", @template\n else\n respond_err \"template\", @template, @template.errors\n end\n end", "def create_ad_group(client, customer_id, campaign_resource_name)\n ad_group = client.resource.ad_group do |ag|\n ag.type = :SEARCH_DYNAMIC_ADS\n ag.name = \"Earth to Mars Cruises #{(Time.now.to_f * 1000).to_i}\"\n\n ag.campaign = campaign_resource_name\n\n ag.status = :PAUSED\n ag.tracking_url_template = \"http://tracker.example.com/traveltracker/{escapedlpurl}\"\n\n ag.cpc_bid_micros = 3_000_000\n end\n\n operation = client.operation.create_resource.ad_group(ad_group)\n\n response = client.service.ad_group.mutate_ad_groups(\n customer_id: customer_id,\n operations: [operation],\n )\n\n puts(\"Created ad group with ID: #{response.results.first.resource_name}\")\n response.results.first.resource_name\nend", "def create\n @email_template = EmailTemplate.new(email_template_params.merge(user_id: current_user.id))\n respond_to do |format|\n if @email_template.save\n update_contact_category\n format.html { redirect_to @email_template, notice: 'Email template was created.' }\n else\n format.html { render :new }\n format.json { render json: @email_template.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @project_template = ProjectTemplate.new(params[:project_template])\n\n respond_to do |format|\n if @project_template.save\n format.html { redirect_to @project_template, notice: 'Project template was successfully created.' }\n format.json { render json: @project_template, status: :created, location: @project_template }\n else\n format.html { render action: \"new\" }\n format.json { render json: @project_template.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n intent_request = @api_ai_client.create_intents_request\n response = intent_request.create(param_options)\n contexts_templates = { contexts: category_params[:contexts].split(\",\"), templates: category_params[:templates].split(\",\") }\n @category = Category.new(category_params.merge(contexts_templates))\n\n respond_to do |format|\n if response.is_a?(Hash) && response[:status][:code].eql?(200)\n @category.intent_id = response[:id]\n\n if @category.save\n\n format.html { redirect_to @category, notice: 'Category was successfully created.' }\n format.json { render :show, status: :created, location: @category }\n else\n format.html { render :new }\n format.json { render json: @category.errors, status: :unprocessable_entity }\n end\n else\n @notice = response.message\n\n format.html { render :new }\n format.json { render json: { error: response.message }, status: response.code}\n end\n end\n end", "def create\n @survey_item_template = SurveyItemTemplate.new(survey_item_template_params)\n\n respond_to do |format|\n if @survey_item_template.save\n format.html { redirect_to @survey_item_template.survey_item_category_template.survey_template, notice: 'Survey item template was successfully created.' }\n format.json { render :show, status: :created, location: @survey_item_template }\n else\n format.html { render :new }\n format.json { render json: @survey_item_template.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @campaign_banner = CampaignBanner.new(campaign_banner_params(:create))\n\n respond_to do |format|\n if @campaign_banner.save\n format.html { redirect_to campaign_banner_url(@campaign, @campaign_banner), notice: 'Campaign banner was successfully created.' }\n format.json { render action: 'show', status: :created, location: @campaign_banner }\n else\n format.html { render action: 'new' }\n format.json { render json: @campaign_banner.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @template = Template.new(template_params)\n\n respond_to do |format|\n if @template.save\n format.html { redirect_to @template, notice: 'Template was successfully created.' }\n format.json { render json: @template, status: :created, location: @template }\n else\n format.html { render action: \"new\" }\n format.json { render json: @template.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @donation_campaign = DonationCampaign.new(donation_campaign_params)\n\n respond_to do |format|\n if @donation_campaign.save\n format.html { redirect_to @donation_campaign, notice: 'Donation campaign was successfully created.' }\n format.json { render :show, status: :created, location: @donation_campaign }\n else\n format.html { render :new }\n format.json { render json: @donation_campaign.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n input = create_input\n return unauthorized unless create_auth(input)\n create_collaboration = API::V1::CreateCollaboration.new(input)\n result = create_collaboration.call\n if result\n render status: 201, json: result\n else\n error(create_collaboration.errors)\n end\n end", "def campaign_params\n params.require(:campaign).permit(:name, :street, :city, :zipcode, :description, :promotion, :lastdate)\n end", "def create\n @contract_template = ContractTemplate.new(params[:contract_template])\n\n respond_to do |format|\n if @contract_template.save\n format.html { redirect_to(@contract_template, :notice => 'ContractTemplate was successfully created.') }\n format.xml { render :xml => @contract_template, :status => :created, :location => @contract_template }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @contract_template.errors, :status => :unprocessable_entity }\n end\n end\n end", "def post(list_id, name, description = \"\")\n Trello::Card.create(name: name, list_id: list_id, desc: description)\n end", "def create_sms_campaign_with_http_info(create_sms_campaign, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: SMSCampaignsApi.create_sms_campaign ...'\n end\n # verify the required parameter 'create_sms_campaign' is set\n if @api_client.config.client_side_validation && create_sms_campaign.nil?\n fail ArgumentError, \"Missing the required parameter 'create_sms_campaign' when calling SMSCampaignsApi.create_sms_campaign\"\n end\n # resource path\n local_var_path = '/smsCampaigns'\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(create_sms_campaign)\n auth_names = ['api-key', 'partner-key']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'CreateModel')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: SMSCampaignsApi#create_sms_campaign\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def create\n @csp = Csp.new(csp_params)\n\n respond_to do |format|\n if @csp.save\n format.html { redirect_to @csp, notice: 'Csp was successfully created.' }\n format.json { render :show, status: :created, location: @csp }\n else\n format.html { render :new }\n format.json { render json: @csp.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_list(name, template, options = {}, &blk)\n\t\t\t # create the new list\n\t\t\t new_list = Lists.create_list(@web, name, template, options)\n\t\t\t context_for_new_object(new_list, ListContext, &blk)\n\t\t\tend", "def create\n @campaign = Campaign.new(campaign_params)\n\n respond_to do |format|\n if @campaign.save\n @campaign.import(params[:file], params[:number_of_rows])\n format.html { redirect_to campaigns_url, notice: 'Campaign was successfully created.' }\n format.json { render :show, status: :created, location: @campaign }\n else\n format.html { render :new }\n format.json { render json: @campaign.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_contact(list_id, contact_base_extra_post, opts = {})\n data, _status_code, _headers = create_contact_with_http_info(list_id, contact_base_extra_post, opts)\n data\n end", "def create\n @activities_list = ActivitiesList.new(activities_list_params)\n\n respond_to do |format|\n if @activities_list.save\n format.html { redirect_to @activities_list, notice: 'Activities list was successfully created.' }\n format.json { render :show, status: :created, location: @activities_list }\n else\n format.html { render :new }\n format.json { render json: @activities_list.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @template_task = TemplateTask.new(permitted_params_for_template_task)\n\n respond_to do |format|\n if @template_task.save\n format.html { redirect_to template_tasks_path, notice: 'TemplateTask was successfully created.' }\n format.json { render json: @template_task, status: :created, location: @template_task }\n else\n format.html { render action: 'new' }\n format.json { render json: @template_task.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @lead = Lead.new(lead_params)\n @lead.campaign_id = @campaign.id\n\n respond_to do |format|\n if @lead.save\n format.html { redirect_to thanks_path, notice: 'Your subscription has been successfully completed.' }\n format.json { render :show, status: :created, location: @lead }\n else\n format.html { render :new }\n format.json { render json: @lead.errors, status: :unprocessable_entity }\n end\n end\n end" ]
[ "0.6966859", "0.69255346", "0.6486239", "0.6331614", "0.6303938", "0.6069134", "0.6069134", "0.6010403", "0.5967475", "0.59510046", "0.59034526", "0.59006727", "0.5888634", "0.58681035", "0.58034587", "0.5803171", "0.5776307", "0.57684535", "0.5764754", "0.57470644", "0.57466453", "0.5703624", "0.56630754", "0.56441075", "0.56361765", "0.5631364", "0.56297034", "0.56247354", "0.5591146", "0.5514215", "0.549466", "0.54799604", "0.5444302", "0.5432699", "0.5428091", "0.54233426", "0.5419969", "0.53914547", "0.5388662", "0.53885776", "0.5387375", "0.5376784", "0.53751975", "0.5343303", "0.5327622", "0.5324518", "0.5324487", "0.5323797", "0.5312804", "0.5309853", "0.5309853", "0.5309853", "0.5297358", "0.52956784", "0.529355", "0.52858627", "0.52843624", "0.5280615", "0.52685493", "0.52622133", "0.5255408", "0.52548945", "0.52516735", "0.52468127", "0.52346957", "0.5202627", "0.51923394", "0.51923394", "0.51923394", "0.51923394", "0.5190201", "0.51838696", "0.5179468", "0.51780444", "0.5172759", "0.5158422", "0.51536375", "0.5153524", "0.5152469", "0.514789", "0.5138645", "0.5131021", "0.5126605", "0.5122105", "0.51048326", "0.510203", "0.508145", "0.50811315", "0.5066498", "0.50649244", "0.50638056", "0.50565505", "0.50565284", "0.5046899", "0.50459063", "0.5040952", "0.5029877", "0.50292534", "0.50258493", "0.50239027" ]
0.86716205
0
Get recurring child campaigns for a campaign
def recurring(campaign_id) Iterable.request(conf, "/campaigns/recurring/#{campaign_id}/childCampaigns").get end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def campaigns\n response = get 'campaigns'\n response.map{|item| Hashie::Mash.new(item)}\n end", "def get_contact_campaigns(id)\n return make_request(\"#{self.endpoint}/#{id}/campaigns\")\n end", "def children()\n #Ressource.filter(:parent_id => self.id, :parent_service_id => self.service_id).all\n end", "def scrape_campaigns\n # Initialize crawler\n more_campaigns = true\n campaign_page = 0\n transalt_campaigns = []\n\n puts \"Scraping campaigns...\"\n # Collect all campaigns\n while more_campaigns do\n puts \"Scraping ?page=#{campaign_page}\"\n scraper = CampaignCrawler.new(campaign_page)\n output = scraper.crawl\n\n transalt_campaigns = transalt_campaigns + output['campaigns']\n\n output['campaigns'].empty? ? (more_campaigns = false) : campaign_page +=1\n end\n\n puts \"Scraped #{campaign_page} pages\\n\\n\"\n\n transalt_campaigns\n end", "def mcget_campaign(campaign_id)\n # campaigns_res = setup_mcapi.campaigns.list( {campaign_id: \"f5666d1862\"} )\n campaigns_res = setup_mcapi.campaigns.list( {campaign_id: campaign_id} )\n search_campaign(campaigns_res)\n end", "def get_childs\n childs = Category.any_in(parent_ids: [parent.id])\n\n results = Array.new\n childs.each do |child|\n results << child\n end\n\n results\n end", "def campaign\n company.campaign\n end", "def campaigns\r\n CampaignsController.instance\r\n end", "def all_activities_children\n @activity_children = ActivityChild.includes(:child)\n end", "def all_recurrings(&block)\n recurrings.list(&block)\n end", "def campaign; Campaign.get(self.campaign_id); end", "def campaign; Campaign.get(self.campaign_id); end", "def campaigns(client_id)\n response = Client_GetCampaigns(\"ClientID\" => client_id)\n return [] if response.empty?\n unless response[\"Code\"].to_i != 0\n response[\"Campaign\"].collect{|c| Campaign.new(c[\"CampaignID\"].to_i, c[\"Subject\"], c[\"SentDate\"], c[\"TotalRecipients\"].to_i)}\n else\n raise response[\"Code\"] + \" - \" + response[\"Message\"]\n end\n end", "def recurring_charges\n data.recurring_charges\n end", "def campaigns(options={})\n response = connection.get do |req|\n \treq.url \"campaigns\", options\n end\n return_error_or_body(response)\n \tend", "def campaigns(params, additional_headers = {})\n perform_request(self, @token, 'campaigns', 'get', params, additional_headers)\n end", "def report_children(parent_id) \n\t\tReport.find(:all, :conditions => [\"parent_id=?\",parent_id])\t\t\t\n\tend", "def campaigns(options={})\n Resources::Campaigns.new(self, options)\n end", "def joinable_campaigns\n Campaign.currently_running.where(\n \"NOT EXISTS (\n SELECT 'X'\n FROM organization_campaigns oc\n WHERE oc.campaign_id = campaigns.id\n AND oc.organization_id = ?\n )\", id\n )\n end", "def children\n Children.new({\n cc_conditions: self.association( :cc_conditions),\n cc_loops: self.association( :cc_loops),\n cc_questions: self.association( :cc_questions),\n cc_sequences: self.association( :cc_sequences),\n cc_statements: self.association( :cc_statements)\n })\n end", "def children\n if self.rep_parent.nil?\n return super\n else\n the_children = Array.new\n self.rep_parent.children.each do |child|\n if child.is_a?(Repeatable)\n \n child.repititions.\n where(:show_date => self.show_date).each do |r|\n the_children << r\n end\n elsif child.show_date == self.show_date\n the_children << child\n end\n end\n return the_children\n end\n end", "def sub_facts\n MatterFact.all(:conditions => [\"parent_id = ?\", self.id])\n end", "def index\n @creatives = @campaign.creatives\n end", "def get_campaign(campaign_id)\n request(:get, \"api/1/campaigns/#{campaign_id}\").campaign\n end", "def child_rels(*args)\n options = args.extract_options!\n rels = relationships.flat_map(&:children).uniq\n Relationship.filter_by_resource_type(rels, options)\n end", "def children \n\t\t_c = Children.joins(:families).where(:families => {:parent_id => id} )\n\t\tif _c.present?\n\t\t\t_c\n\t\telse\n\t\t\tnil\n\t\tend\n\tend", "def mcget_campaign_content(campaign_id)\n # campaigns_res = setup_mcapi.campaigns.content(\"f5666d1862\")\n campaigns_res = setup_mcapi.campaigns.content( campaign_id )\n end", "def children\n return [] unless category?\n\n server.channels.select { |c| c.parent_id == id }\n end", "def children\n \n TapirLogger.instance.log \"Finding children for #{self}\"\n children = []\n EntityMapping.all.each do |mapping| \n\n # Go through each associated entity mapping, and find mappings where the parent_id is us\n # which means that the child_id is some other entity, and it's a child\n \n # the to_s is important, otherwise self.id returns a :Moped::BSON::ObjectID\n children << mapping.get_child if mapping.parent_id == self.id.to_s\n\n # TODO - what happens if parent_id and child_id are the same. We'll\n # end up grabbing it. Could that break any assumptions?\n end\n \n children\n end", "def index\n @npc_relations = @campaign.npc_relations\n end", "def campaign\n self.class.caffeinate_campaign\n end", "def all_event_push_campaigns\n get(url_(\"event-pushcampaign\"))\n end", "def get_children(args = '', output = :OBJECT)\n kids = {}\n if args.blank?\n if $GLOBALS['post']\n args = { post_parent: $GLOBALS['post'].post_parent }\n else\n return kids\n end\n elsif args.is_a? Integer\n args = { post_parent: args}\n elsif args.is_a? String\n args = { post_parent: args.to_i}\n elsif args.is_a? Railspress::WpPost\n args = { post_parent: args.post_parent }\n end\n\n defaults = {\n numberposts: -1,\n post_type: 'any',\n post_status: 'any',\n post_parent: 0,\n }\n\n r = defaults.merge args\n\n children = get_posts(r)\n\n return kids if children.nil?\n\n return children unless r[:fields].blank?\n\n # update_post_cache(children)\n\n children.each do |child|\n kids[child.id] = child\n end\n\n if output == :OBJECT\n kids\n elsif output == :ARRAY_A\n weeuns = {}\n kids.values.each do |kid|\n weeuns[kid.id] = kid.attributes\n end\n return weeuns\n elsif output == :ARRAY_N\n babes = {}\n kids.values.each do |kid|\n babes[kid.id] = kid.attributes.values\n end\n return babes\n else\n kids\n end\n end", "def child_replies\n Reply.find_by_parent_id(self.id) \n end", "def campaigns(id = nil, opts = {})\n load_resource(Campaign, id, opts)\n end", "def children\n self.class.where('? = ANY(parent_ids)', id.to_s)\n end", "def activities\n children('activities')\n end", "def get_campaigns(access_token, opts = {})\n\t\t\t\t\turl = Util::Config.get('endpoints.base_url') + Util::Config.get('endpoints.campaigns')\n\t\t\t\t\turl = build_url(url, opts)\n\n response = RestClient.get(url, get_headers(access_token))\n body = JSON.parse(response.body)\n\n campaigns = []\n body['results'].each do |campaign|\n campaigns << Components::Campaign.create_summary(campaign)\n end\n\n Components::ResultSet.new(campaigns, body['meta'])\n end", "def nested_cpes(children)\n cpes = []\n children.each do |child|\n if child.has_key?('cpe_match')\n child['cpe_match'].each do |cpe_match|\n cpes << cpe_match['cpe23Uri'] if cpe_match.has_key?('cpe23Uri')\n cpes << cpe_match['cpe22Uri'] if cpe_match.has_key?('cpe22Uri')\n end\n elsif child.has_key?('children')\n cpes.push *nested_cpes(child['children']).flatten\n end\n end\n cpes\n end", "def children\n objs = @caldav_backend.scheduling_objects(@principal_uri)\n children = []\n objs.each do |obj|\n # obj['acl'] = self.get_acl\n obj['principaluri'] = @principal_uri\n children << SchedulingObject.new(@caldav_backend, obj)\n end\n\n children\n end", "def list_recurring_details(attributes)\n request = list_recurring_details_request(attributes)\n execute_request(request)\n end", "def find\n Campaign.find(:first, :id => @id)\n end", "def children\n self.class.find(:all, \n :select => \"a.*\",\n :joins => \"a join #{self.class.bridge_class.table_name} b on a.id = b.#{self.class.parent_foreign_key}\", \n :conditions => [\"b.#{self.class.child_foreign_key} = ? and b.#{self.class.levels_from_parent} = 1\", self.id])\n end", "def get_childs(recursive, ret_obj)\n\n return self.class.get_childs(self.id, recursive, ret_obj)\n end", "def children\n dataset.nested.filter(self.class.qualified_parent_column => self.id)\n end", "def campaign_find_active\n call_api(:campaign_find_active)\n end", "def find_child_entries_for(givendate,userid,parent)\n @entries = Entry.find_all_current_entries(givendate,userid)\n @otherChildentries = []\n \n @ancestors = []\n \n if parent != 0 then \n parentid = parent\n while parentid != nil\n @ancestors << parentid\n parent = Entry.find_by_id(parentid)\n parentid = parent.parent_id\n end\n end\n @entries.each do |e|\n if e.id != id && !isAncestor(@ancestors,e.id) then\n @otherChildentries << e\n end \n #@otherChildentries << e unless e.id == id || isAncestor(@ancestors)\n end\n \n return @otherChildentries\n end", "def index\n @campaigns = site.campaigns.by_campaign_and_day.all\n render :json => @campaigns\n end", "def list(options={})\n Mailgun.submit(:get, campaign_url, options)[\"items\"] || []\n end", "def getChildCategories\n Category.find(:all, :conditions => \"parent_category_id = #{self.id}\", :order => ':id')\nend", "def get_campaign\n if params[:campaign_path].nil?\n return nil\n end\n\n path = params[:campaign_path]\n\n @campaign = Rails.cache.fetch \"#{path}-campaign-info\" do\n Campaign.where(path: path).first\n end\n end", "def nested_records(parent_record, database_record, salesforce_instance)\n # We need to identify _this_ association to prevent backtracking.\n inverse = inverse_association_name(target_reflection(parent_record))\n nested = salesforce_instance.mapping.associations.flat_map do |a|\n next if a.name == inverse\n a.build(database_record, salesforce_instance.record, cache)\n end\n\n nested.compact\n end", "def children\n Feature.find(:all, :conditions => [ 'parent_id=?', self.id] )\n end", "def list_child_resources(parent_res_path, recursive = false)\n path = parent_res_path.is_a?(CanonicalPath) ? parent_res_path : CanonicalPath.parse(parent_res_path)\n parent_resource_path = path.resource_ids.join('/')\n feed_id = path.feed_id\n which_children = (recursive ? '/recursiveChildren' : '/children')\n ret = http_get(\"/feeds/#{feed_id}/resources/#{parent_resource_path}#{which_children}\")\n ret.map { |r| Resource.new(r) }\n end", "def direct_children_by_id(*args)\n scope = args.last.is_a?(Hash) ? args.pop : {}\n ids = args.flatten.compact.uniq\n self.class.find_in_nested_set(:all, { \n :conditions => [\"#{scope_condition} AND #{prefixed_parent_col_name} = #{self.id} AND #{self.class.table_name}.#{self.class.primary_key} IN (?)\", ids]\n }, scope) \n end", "def get_childs\n access = Access.where(:parent_id=>id)\n childs=\"\"\n access.each do |item|\n if childs.empty?\n if item.get_childs.empty?\n childs=item.id.to_s\n else\n childs=item.id.to_s+\",\"+item.get_childs\n end\n else\n if item.get_childs.empty?\n childs= childs+\",\"+item.id.to_s\n else\n childs= childs+\",\"+item.id.to_s+\",\"+item.get_childs.to_s\n end\n end\n end\n return childs\n end", "def get_campaign(access_token, campaign_id)\n url = Util::Config.get('endpoints.base_url') +\n sprintf(Util::Config.get('endpoints.campaign'), campaign_id)\n url = build_url(url)\n response = RestClient.get(url, get_headers(access_token))\n Components::Campaign.create(JSON.parse(response.body))\n end", "def get_children(params)\n scope_data_class(params) do\n params[:limit] = config[:rows_per_page] if config[:enable_pagination] && (params[:id].nil? || params[:id] == 'root')\n params[:scope] = config[:scope]\n data_adapter.get_records(params, final_columns)\n end\n end", "def grand_children\n []\n end", "def get_childs_of_category(category_id)\n #convert string to BSON::ObjectId\n if !category_id.is_a?(BSON::ObjectId)\n category_id = BSON::ObjectId.from_string(category_id)\n end\n\n #find categories with ancestors have this category_id\n childs = Category.any_in(parent_ids: [category_id])\n\n results = Array.new\n childs.each do |child|\n results << child\n end\n\n results\n end", "def current_campaign\n return nil if campaigns.empty?\n campaigns.order(donation_deadline: 'DESC').first\n end", "def get_sms_campaigns\r\n # Prepare query url.\r\n _query_builder = Configuration.base_uri.dup\r\n _query_builder << '/sms-campaigns'\r\n _query_url = APIHelper.clean_url _query_builder\r\n\r\n # Prepare and execute HttpRequest.\r\n _request = @http_client.get(\r\n _query_url\r\n )\r\n BasicAuth.apply(_request)\r\n _context = execute_request(_request)\r\n\r\n # Validate response against endpoint and global error codes.\r\n return nil if _context.response.status_code == 404\r\n validate_response(_context)\r\n\r\n # Return appropriate response type.\r\n _context.response.raw_body\r\n end", "def recur_referrers\n if referrer_id.blank?\n []\n else\n [referrer] + referrer.recur_referrers\n end\n end", "def get_pgcr_for_activity(activity_id)\n\t\t\t# If they didn't give us an activity_id to search, there's nothing we can do\n\t\t\treturn nil unless activity_id\n\n\t\t\turl = \"/Destiny2/Stats/PostGameCarnageReport/#{activity_id.to_s.uri_encode}/\"\n\n\t\t\tCowgod::Logger.log \"#{self.class}.#{__method__} - #{url}\"\n\n\t\t\tresponse = with_retry do\n\t\t\t\tself.class.get(url, @options)\n\t\t\tend\n\t\t\traise QueryError, 'API request failed' unless response && response.code == SUCCESS_CODE\n\n\t\t\tresponse.parsed_response['Response'] || {}\n\t\tend", "def children\n self.class.find(:all, :conditions => \"#{acts_as_nested_set_options[:scope]} AND #{acts_as_nested_set_options[:parent_column]} = #{self.id}\", :order => acts_as_nested_set_options[:left_column])\n end", "def child_ids(*args)\n return call_ancestry_method(:child_ids) if use_ancestry?\n\n Relationship.resource_pairs(child_rels(*args))\n end", "def each_plan_child\n self\n end", "def survey_campaign(survey_id, id)\n get(\"survey/#{survey_id}/surveycampaign/#{id}\")\n end", "def index\n @campaigns = Competition.all\n end", "def sub_tasks\n MatterTask.all(:conditions => [\"parent_id = ?\", self.id])\n end", "def generate_recurrences\n # remove all already scheduled occurrences in the future\n Occurrence.where('event_id = ? AND date >= ?', self.id, Date.current).delete_all\n \n if read_attribute(:recurrence).empty?\n Occurrence.create(event: self, date: self.date)\n else\n # schedule events for the next month\n # TODO: make instance variable with schedule instance to avoid repeat instantiation\n schedule = IceCube::Schedule.new\n schedule.add_recurrence_rule(self.recurrence)\n schedule.occurrences(Time.current + 1.month).each do |o|\n Occurrence.create(event: self, date: o.to_date)\n end\n end\n end", "def children\n parent_id = params['parent_id']\n\n children = Child.includes(:bursts).where(parent_id: parent_id).map{ |child|\n last_burst = child.bursts.last\n burst_rate = last_burst.present? ? calc_burst_rate(last_burst) : 0\n\n {\n id: child.id,\n name: child.name,\n burst_rate: burst_rate\n }\n }\n\n render json: children\n end", "def get_subc(category)\n retval = []\n #Category.where(category_id: category.id).each do |c| \n category.categories.each do |c| \n retval += [c] + get_subc(c)\n end\n retval\n end", "def children(options={})\n @global_page.children.all options\n end", "def get_campaign\n @campaign = Campaign.find(params[:campaign_id])\n end", "def children\n\t\treturn self.search( :one, '(objectClass=*)' )\n\tend", "def children_moves\n LedgerMove.where(ledger_account_id: children_accounts_ids)\n end", "def children(*args)\n self.class.send(:with_scope, :find=>{:conditions=>['parent_node_id=?', self.child_node_id]}) do\n self.class.find(:all, *args)\n end\n end", "def children(*args)\n return call_ancestry_method(:children) if use_ancestry?\n\n Relationship.resources(child_rels(*args))\n end", "def list_recurring_details(shopper_reference:, merchant_account: @merchant_account, contract: \"RECURRING\")\n postJSON(\"/Recurring/v12/listRecurringDetails\",\n shopperReference: shopper_reference,\n recurring: { contract: contract },\n merchantAccount: merchant_account\n )\n end", "def index\n @campaigns = Campaign.all\n end", "def index\n @campaigns = Campaign.all\n end", "def index\n @campaigns = Campaign.all\n end", "def children\n @resource.children\n end", "def relationships_all_channels\n if debug?\n channel_fields = ChannelFieldForm.new\n channel_fields.create_field(\n group_id: 1,\n type: 'Relationships',\n label: 'Relationships',\n fields: {\n limit: 25,\n relationship_order_field: 'Entry Date',\n relationship_order_dir: 'Descending (Z-A)',\n relationship_allow_multiple: 'n',\n relationship_future: '1'\n }\n )\n\n @page.load\n else\n $db.query(IO.read('channel_sets/relationships-all-channels.sql'))\n clear_db_result\n end\n end", "def children\n kids = []\n each_child { |kid| kids << kid }\n kids\n end", "def children\n objs = @carddav_backend.cards(@address_book_info['id'])\n children = []\n objs.each do |obj|\n obj['acl'] = child_acl\n children << Card.new(@carddav_backend, @address_book_info, obj)\n end\n children\n end", "def children\n return @children if [email protected]?\n @children = all_children.find_all{|collection| collection.url.count('/') == self.url.count('/') + 1}\n end", "def indirect_contracts\n Contract.where(:user_id => descendant_ids)\n end", "def recalculate_child_breadcrumbs(cards = collection_cards)\n cards.each do |card|\n next unless card.primary?\n\n if card.item.present?\n # have to reload in order to pick up new parent relationship\n card.item.reload.recalculate_breadcrumb!\n elsif card.collection_id.present?\n # this method will run the async worker if there are >50 children\n card.collection.reload.recalculate_breadcrumb_tree!\n end\n end\n end", "def get_campaign(settings_file, campaign_test_key)\n (settings_file['campaigns'] || []).find do |campaign|\n campaign['key'] == campaign_test_key\n end\n end", "def campaign\n @model = 'campaign'\n @key = 'title'\n @values = Campaign.all.order(created_at: :desc)\n render 'campaigns'\n end", "def children\n base_set_class.find(:all, :conditions => \"#{scope_condition} AND #{parent_col_name} = #{self.id}\", :order => left_col_name)\n end", "def pending_refund_payments_projects\n pending_refund_payments.map(&:project)\n end", "def find_child_entries(givendate,userid)\n\t #@entries = Entry.find(:all, :conditions => [\"user_id=? and entries.end_dt_tm >= ? \", userid, givendate])\n\t @entries = Entry.find(:all, :include => [:entry_status], :conditions => [\"entries.user_id= ? and entry_statuses.ended = 0\", userid])\n\t @otherChildentries = []\n\n\t #Grab all the ancestors of this entry\n\t @ancestors = [] \n\t parentid = parent_id \n\t while parentid != nil\n\t @ancestors << parentid\n\t parent = Entry.find_by_id(parentid)\n\t parentid = parent.parent_id\n\t end\n\n\t #Store all entries except self and ancestors\n\t @entries.each do |e|\n\t if e.id != id && !isAncestor(@ancestors,e.id) && e[:type] != 'Goal' then\n\t @otherChildentries << e\n\t end \n\t #@otherChildentries << e unless e.id == id || isAncestor(@ancestors)\n end\n #Also make sure the selected child entries are present in the list if not already \n #Get already selected child entries\n selected_children = find_selected_childentries(userid)\n selected_children.each {|ce|\n if !isItemInList(@otherChildentries,ce) then\n @otherChildentries << ce\n end\n }\n return @otherChildentries\n end", "def descendants\n model_base_class.scoped(:conditions => descendant_conditions)\n end", "def relationships_as_child(entity_id)\n API::request(:get, \"entities/#{entity_id}/relationships_as_child\")\n end", "def map_current_campaign\n\t\tobj = Campaign.all\n\t\tobj.map { |i| {i.id => i.name} }\n\tend", "def children\n return [] if child_count <= 0\n with_cache(:children) do\n @od.request(\"#{api_path}/children?$top=1000\")['value'].map do |child|\n OneDriveItem.smart_new(@od, child)\n end\n end\n end", "def campaign_for_contact entity\n entity.ensure_salesforce_id\n return if entity.salesforce_id.nil?\n \n campaign_member = campaign_member_for_contact(entity)\n return nil if campaign_member.nil?\n \n client.materialize('Campaign')\n SFDC_Models::Campaign.find(campaign_member.CampaignId)\n end" ]
[ "0.5981326", "0.5886731", "0.58036757", "0.57709426", "0.5763354", "0.5758741", "0.57077026", "0.56977236", "0.5695962", "0.56759095", "0.56620777", "0.56620777", "0.5660138", "0.5634783", "0.557007", "0.5441933", "0.5418714", "0.53877634", "0.5374748", "0.53566366", "0.5334701", "0.5317534", "0.5301078", "0.52812225", "0.5279264", "0.52736765", "0.5259843", "0.5245205", "0.52266836", "0.51961565", "0.51905257", "0.51656824", "0.5156813", "0.5150337", "0.512691", "0.51037836", "0.5084262", "0.5077725", "0.506472", "0.5061099", "0.5046303", "0.5015952", "0.49983558", "0.49955547", "0.4992069", "0.4989594", "0.4988128", "0.4983178", "0.4978657", "0.4972883", "0.4966746", "0.49630812", "0.49445432", "0.4937181", "0.49276063", "0.492483", "0.4889698", "0.4872316", "0.4867981", "0.48665792", "0.4850019", "0.48488078", "0.48450914", "0.48427552", "0.4838136", "0.48338228", "0.4831591", "0.48314205", "0.4826816", "0.4812413", "0.4806448", "0.4805353", "0.47859427", "0.47848383", "0.47840512", "0.4783795", "0.47835296", "0.47768277", "0.47728962", "0.47676152", "0.47672978", "0.47672978", "0.47672978", "0.47667447", "0.47663638", "0.47636554", "0.4757617", "0.4750594", "0.47469968", "0.47451103", "0.4733104", "0.47300488", "0.472594", "0.47249085", "0.47226742", "0.47138143", "0.46912333", "0.46880943", "0.46831357", "0.467103" ]
0.8493955
0
Export metrics in CSV format for one or more campaigns
def metrics(campaign_ids = [], start_time = nil, end_time = nil) params = { campaignId: campaign_ids } if start_time params[:startTime] = start_time.to_date.strftime(Iterable::DATE_FORMAT) params[:endTime] = end_time.to_date.strftime(Iterable::DATE_FORMAT) end Iterable.request(conf, '/campaigns/metrics', params).get end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def write_to_csv(opts = {})\n filename = opts.fetch(:filename) { 'twords_report.csv' }\n write_file(filename, :to_csv, opts)\n end", "def write_csv_report\n unless @csv.nil?\n @csvexport.write(@csv)\n end\n end", "def output_csv(datastore)\n CSV.open(OUTPUT_CSV, \"wb\") do |csv|\n csv << \t[\"Period\", \"First Use In\", \"Engaged Users\"]\n\n datastore.keys.sort.each do |date_key|\n pretty_date = Date.parse(date_key.split(\"_\")[0]).strftime(\"%Y-%m-%d\")\n datastore[date_key].keys.sort.each do |cohort_key|\n pretty_cohort= Date.parse(cohort_key.split(\"_\")[0]).strftime(\"%Y-%m-%d\")\n csv << [ pretty_date, pretty_cohort,\tdatastore[date_key][cohort_key]['total_users']]\n end\n end\n end\nend", "def to_csv\n initialize_generator\n csv_report_generator.records = pull_join\n csv_report_generator.generate_report\n end", "def to_csv(format = :long)\n output = ''\n \n columns = []\n # only output\n case format\n when :long\n columns.concat([@id, @updated, @title])\n end\n \n # output all dimensions\n columns.concat(@dimensions.map {|d| d.values.first})\n \n # output all metrics\n columns.concat(@metrics.map {|m| m.values.first})\n\n output = CSV.generate_line(columns) \n return output\n end", "def export_csv\n export_string = \"#{@id},#{type_string},#{@name.gsub(/[\\,,\\s]/,\"\")},\"\n @details.each{|k,v| export_string << \"#{k}=#{v};\".gsub(/[\\,,\\s]/,\"\") }\n export_string\n end", "def export_csv\n export_string = \"#{@id},#{type_string},#{@name.gsub(/[\\,,\\s]/,\"\")},\"\n @details.each{|k,v| export_string << \"#{k}=#{v};\".gsub(/[\\,,\\s]/,\"\") }\n export_string\n end", "def output_to_csv\n if (@values[:result] != nil)\n @additions << calculate_worktime\n filename = get_entry(\"Specify output file: \")\n p = Menu.data_handler.find_person_by_id(@values[:id])\n CSVWriter.output(filename, p, @values[:result], @additions)\n else\n puts \"Nothing to write right now.\"\n end\n end", "def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => @@default_sort\n collection = @@model.find :all, :order => @sort, :conditions => session[:conditions]\n # Creation of the file\n file_name = \"#{@@param_name}_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << @@name_print.map{|n| n.trn}\n collection.each do |element|\n csv << @@field_print.map{|f| element[f]}\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end", "def to_csv\n @csv ||= format_csv\n end", "def outputCampaign(output_filename, first)\n\n\t\twrite_options = \"wb\" if first\n\t\twrite_options = \"a\" if not first\n\n\t\tCSV.open(output_filename, write_options, {:encoding => \"utf-8\", force_quotes: false }) do |csv|\n\t\t\t# Create Headers\n\t\t\tcsv << @output_row_headers if first\n\t\t\t# Output Campaign Settings Row\n\t\t\tcsv << self.settingsRow\n\n\t\t\t# Output location row (this must be it's own row for \n\t\t\t# Mobile Bid adjustment to work in campaign settings row)\n\t\t\tcsv << self.locationRow\n\n\t\t\t# Output All AdGroups Settings Rows\n\t\t\[email protected] do |adgroup|\n\t\t\t \tcsv << adgroup.settingsRow\n\t\t\t \tadgroup.keywords.each do |keyword|\n\t\t\t \t\tcsv << keyword.settingsRow\n\t\t\t \tend\n\t\t\t \tadgroup.ads.each do |ad|\n\t\t\t \t\tcsv << ad.settingsRow\n\t\t\t \tend\n\t\t\tend\n\n\t\t\t# Output All Sitelinks Settings Rows\n\t\t\[email protected] do |sitelink|\n\t\t\t\tcsv << sitelink.settingsRow\n\t\t\tend\n\n\t\tend\n\tend", "def export_contacts_to_csv\n start_time = Time.now\n \n contacts = RESULT_COLL.find()\n \n FasterCSV.open(\"../output/healthcare_sg_export.csv\", 'w') {|csv|\n csv << [\"contact_name\", \"country\", \"auction_time\"]\n contacts.each do |row|\n csv << row[\"contact\"]\n end\n }\n p Time.now - start_time\nend", "def atop_csv\n \"#{@archive_root}/#{master.hostname}/atop_log_#{@gatling_scenario.downcase.gsub('.', '_')}.csv\"\n end", "def dumpCsvSummary()\n CSV.open(csvFilenameSummary(), \"w\", { :col_sep => csvColSepChar() }) { |csv|\n csv << AnalyzerExpA.genCsvTitleRow() ;\n genCsvSummary().each{|row|\n csv << row ;\n }\n }\n end", "def export\n result = Urlmaster.all\n head = 'EF BB BF'.split(' ').map{|a|a.hex.chr}.join()\n exportFile = CSV.generate(csv = head) do |writer|\n writer << [\"mapId\", \"venueName\", \"floor\", \"typeMap\", \"venueFloorMapImageUrl\", \"venueFloorMapUrl\"]\n result.each do |r|\n writer << [r[:mapId], r[:venueName], r[:floor], r[:typeMap], r[:venueFloorMapImageUrl], r[:venueFloorMapUrl]]\n end\n end\n send_data exportFile, filename: \"MapCrawler-#{Time.now.in_time_zone(\"Asia/Tokyo\").strftime(\"%y%m%d%H%M%S\")}.csv\", type: \"text/csv\"\n # redirect_to crawler_path\n end", "def generateReport\n filePath = \"#{@reportFolder}/report.csv\"\n file = File.open(filePath, 'w')\n file.puts ['Screen', 'Description', 'Automation Message', 'Status'].join(',')\n @report.each do |result|\n file.puts result.join(',')\n end\n file.close\n end", "def to_csv(options = {})\n if [\"html\", \"pdf\", \"xml\", \"combined\"].include? options[:format]\n view = \"counter_#{options[:format]}_views\"\n else\n view = \"counter\"\n end\n\n service_url = \"#{ENV['COUCHDB_URL']}/_design/reports/_view/#{view}\"\n\n result = get_result(service_url, options.merge(timeout: 1800))\n if result.blank? || result[\"rows\"].blank?\n Alert.create(exception: \"\", class_name: \"Faraday::ResourceNotFound\",\n message: \"CouchDB report for Counter could not be retrieved.\",\n source_id: id,\n status: 404,\n level: Alert::FATAL)\n return \"\"\n end\n\n if view == \"counter\"\n CSV.generate do |csv|\n csv << [\"pid_type\", \"pid\", \"html\", \"pdf\", \"total\"]\n result[\"rows\"].each { |row| csv << [\"doi\", row[\"key\"], row[\"value\"][\"html\"], row[\"value\"][\"pdf\"], row[\"value\"][\"total\"]] }\n end\n else\n dates = date_range(options).map { |date| \"#{date[:year]}-#{date[:month]}\" }\n\n CSV.generate do |csv|\n csv << [\"pid_type\", \"pid\"] + dates\n result[\"rows\"].each { |row| csv << [\"doi\", row[\"key\"]] + dates.map { |date| row[\"value\"][date] || 0 } }\n end\n end\n end", "def export_csv(csv_data)\n\t\tFile.write(\"kfit_partners.csv\", csv_data.map(&:to_csv).join)\n\tend", "def export\n fn = current_user.name + \"'s blood glucose readings.csv\"\n respond_to do |format|\n format.csv { send_data(BgMeasurement.to_csv(current_user), :filename => fn) }\n end\n end", "def process_data(campaigns = [], stats = {})\n @outputs.each do |output|\n output.format_lines(campaigns)\n output.format_footer(stats)\n output.flush\n end\n end", "def to_csv(csv_file_name = 'ponderated_season_' + @season.id.to_s)\n # Check if data already collected and collect if needed\n scan_for_gender_category_and_event if @single_events.count == 0\n\n rows = []\n\n File.open(csv_file_name + '.csv', 'w') do |f|\n titles = ['gender', 'category', 'event', 'pool', 'total_results', 'ponderated best', 'best results']\n rows << titles.join(';')\n\n @single_events.each do |event|\n event_row = ''\n event_row += event.gender_type.code + ';'\n event_row += event.category_type.code + ';'\n event_row += event.event_type.code + ';'\n event_row += event.pool_type.code + ';'\n event_row += event.total_results.to_s + ';'\n event_row += event.get_ponderated_best.to_s + ';'\n event_row += event.best_results.map { |mir| mir.get_timing.to_s }.join(';')\n rows << event_row\n end\n f.puts rows.map { |row| row }\n end\n end", "def to_csv\n csvString = LineCounter.columnNames.join(',') + \"\\n\"\n self.each { |result| csvString += result.to_a.join(',') + \"\\n\"}\n csvString\n end", "def stats_to_csv(data)\n out_data = {\n member_join: Hash.new { |h, k| h[k] = [] },\n member_leave: Hash.new { |h, k| h[k] = [] },\n guild: Hash.new { |h, k| h[k] = [] },\n channel: Hash.new { |h, k| h[k] = [] },\n user: Hash.new { |h, k| h[k] = [] }\n }\n\n data.each do |time, props|\n props.each do |k, v|\n v.each { |p| out_data[k.to_sym][time] << p }\n end\n end\n\n CSV.generate(encoding: Encoding.find('UTF-8')) do |csv|\n first = true\n out_data.each do |type, vals|\n unless vals.empty?\n csv << [] unless first\n csv << [type] << ['timestamp', vals.values[0][0].keys].flatten!\n\n vals.each do |time, props|\n props.each { |prop| csv << [time, prop.values].flatten! }\n end\n end\n\n first = false\n end\n end\n end", "def show\n @campaigns = Campaign.all\n @campaign = Campaign.includes(:observations => :user, :observations => :person).find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @campaign }\n format.csv do \n response.headers['Content-Disposition'] = \"attachment; filename=\\\"#{@campaign.name}.csv\\\"\"\n csv_text = CSV.generate do |csv|\n csv << [\n 'Person',\n 'Riding',\n 'User',\n 'Value',\n 'Created',\n 'IP',\n 'User Agent',\n ]\n @campaign.observations.each do |observation|\n csv << [\n observation.person ? \"#{observation.person.first_name} #{observation.person.last_name}\" : '-',\n observation.person ? observation.person.riding : '-',\n observation.user ? observation.user.user_name : '-',\n observation.value,\n observation.created_at,\n observation.ip_address,\n observation.user_agent,\n ]\n end\n end\n render text: csv_text\n end\n end\n end", "def csv\n send_data(Map.to_csv, {:filename => \"maps.csv\" })\n end", "def member_stats_to_csv mbox\n stats = extract_member_stats mbox\n \n rows = []\n rows << [\"Name\", \"Total Msg Count\", \"New Msg Count\", \"Msg Reply Count\"]\n stats.each { |r| rows << [r[0], r[1].msg_total_count.to_s, r[1].msg_new_count.to_s, r[1].msg_reply_count.to_s] }\n\n data_to_csv rows\n end", "def export_emails(data)\n\t\t\t\t\t\tmy_string = \"\"\n\t\t\t\t\t\t\t\tmatch_val = 0\n\t\t\t\t\t\t\t\t\t\tloc_urn = \"\"\n\t\t\t\t\t\t\t\t\t\t\t\temails = \"\"\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tdata[\"configurable_attributes\"].each do |item|\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tarr = []\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif item[\"category\"] == \"Location\" && item[\"location_urn\"] != nil\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# only push values that are emails\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tmy_string = item[\"value\"]\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# puts my_string\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tmatch_val = /@/ =~ my_string\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# puts match_val\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif match_val >= 0\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# add data to csv\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tarr.push(item[\"location_urn\"], item[\"value\"])\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tadd_to_csv arr\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tend\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tend\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tend\nend", "def export_contacts_to_csv\n start_time = Time.now\n \n contacts = RESULT_COLL.find()\n \n FasterCSV.open(\"../output/healthcare_sg_export.csv\", 'w') {|csv|\n csv << [\"contact_name\", \"company_name\", \"address\"]\n contacts.each do |row|\n csv << row[\"contact\"]\n end\n }\n p Time.now - start_time\nend", "def csv\n claim_filter = ClaimFilter.new()\n claim_filter.restore_from_session(session,:claim_filter)\n claim_filter.organisation_id = params[:id].to_i\n\n content_type = ( request.user_agent =~ /windows/i ? 'application/vnd.ms-excel' : 'text/csv' )\n content = Claim.csv_by_filter(claim_filter)\n send_data(content,:type => content_type, :filename => 'claims.csv' )\n end", "def csv_export_chrono\n csv_string = CSV.generate do |csv|\n\n csv << [ \"Plaque\", \"Equipe\", \"Nom VTT\", \"Prenom VTT\", \"ADN VTT\", \"Nom Route\", \"Prenom Route\", \"ADN Route\", \"Categorie\"]\n\n Team.order_by(:plate).each do |t|\n cat = t.category\n next unless cat\n\n cname = cat.map { |v| v.capitalize }.join('-')\n csv << [ t.plate, t.name, \n t.vtt.profile.name.upcase, t.vtt.profile.surname.capitalize, t.vtt.profile.birth.year, \n t.route.profile.name.upcase, t.route.profile.surname.capitalize, t.route.profile.birth.year, cname]\n end\n end\n\n respond!(csv_string, 200, 'Content-Type' => 'text/csv')\n end", "def save_as_csv\n CSV.open(\"./db/#{@name}.csv\", \"wb\") {|csv| @result_scrap.to_a.each {|elem| csv << elem} }\n end", "def to_csv(options = {})\n if [\"html\", \"pdf\", \"combined\"].include? options[:format]\n view = \"pmc_#{options[:format]}_views\"\n else\n view = \"pmc\"\n end\n\n service_url = \"#{ENV['COUCHDB_URL']}/_design/reports/_view/#{view}\"\n\n result = get_result(service_url, options.merge(timeout: 1800))\n if result.blank? || result[\"rows\"].blank?\n Alert.create(exception: \"\", class_name: \"Faraday::ResourceNotFound\",\n message: \"CouchDB report for PMC could not be retrieved.\",\n source_id: id,\n status: 404,\n level: Alert::FATAL)\n return nil\n end\n\n if view == \"pmc\"\n CSV.generate do |csv|\n csv << [\"pid_type\", \"pid\", \"html\", \"pdf\", \"total\"]\n result[\"rows\"].each { |row| csv << [\"doi\", row[\"key\"], row[\"value\"][\"html\"], row[\"value\"][\"pdf\"], row[\"value\"][\"total\"]] }\n end\n else\n dates = date_range(options).map { |date| \"#{date[:year]}-#{date[:month]}\" }\n\n CSV.generate do |csv|\n csv << [\"pid_type\", \"pid\"] + dates\n result[\"rows\"].each { |row| csv << [\"doi\", row[\"key\"]] + dates.map { |date| row[\"value\"][date] || 0 } }\n end\n end\n end", "def scg_report_csv(filename)\n puts 'Creating the Storage Connectivity Groups report'\n CSV.open(\"#{filename}\", 'ab') do |csv|\n csv << [\"\\n\"]\n csv << %w(Storage_Connectivity_Groups_List)\n csv << %w(SCG_Name SCG_Auto_Add_VIOs SCG_FC_Storage_Access SCG_Ports_per_Fabric SCG_Host_List SCG_VIOs_List)\n scg_report\n csv_array(@scg_csv_array, csv)\n end\n puts 'Done'\n end", "def csv_print_to_file\n CSV.open(\"metadata_output.csv\", \"w\") do |csv|\n metadata.each do |key, value|\n if value.class == Hash\n csv << [key]\n print_hash_to_csv(value, csv)\n else\n csv << [key, value]\n end\n end\n end\n end", "def to_csv(params, cookies, options = {})\n @cookies = cookies\n @id = \"#{params[:controller]}_#{params[:action]}_#{model_class.name}\" if @id.blank?\n \n before_csv_render\n \n return CSV.generate do |csv|\n tmp = []\n get_visible_columns.each do |col|\n tmp << col.name\n end\n csv << tmp\n @model.each do |mdl|\n tmp = []\n get_visible_columns.each do |col|\n tmp << Misc::nested_send(mdl, col.id)\n end\n csv << tmp\n end\n end\n end", "def export_data\r\n folder = \"\\data\"\r\n FileUtils.mkdir_p folder\r\n CSV.open(File.join(folder, @output_stats_name), 'wb') do |csv|\r\n csv << @global_bests\r\n csv << @average_global_bests\r\n @neighbourhoods_list[0].report_particles.each do |x|\r\n csv << x\r\n end\r\n end\r\n end", "def to_csv(seperator=',')\n report = @report || generate_report\n report.map{ |row| row.join(seperator) }.join(\"\\n\")\n end", "def export_issues_csv\r\n sort_init 'issues.id', 'desc'\r\n sort_update\r\n\r\n search_filter_init_list_issues\r\n\t\t\t\t\t\r\n @issues = Issue.find :all, :order => sort_clause,\r\n\t\t\t\t\t\t:include => [ :author, :status, :tracker, :project ],\r\n\t\t\t\t\t\t:conditions => search_filter_clause\t\t\t\t\t\t\t\r\n\r\n export = StringIO.new\r\n CSV::Writer.generate(export, ',') do |csv|\r\n csv << %w(Id Status Tracker Subject Author Created Updated)\r\n @issues.each do |issue|\r\n csv << [issue.id, issue.status.name, issue.tracker.name, issue.subject, issue.author.display_name, l_datetime(issue.created_on), l_datetime(issue.updated_on)]\r\n end\r\n end\r\n export.rewind\r\n send_data(export.read,\r\n :type => 'text/csv; charset=utf-8; header=present',\r\n :filename => 'export.csv')\r\n end", "def dumpCsvSpeed()\n CSV.open(csvFilenameSpeed(), \"w\", { :col_sep => csvColSepChar() }) { |csv|\n csv << AnalyzerExpA.genCsvTitleRow() ;\n genCsvSpeed().each{|row|\n csv << row ;\n }\n }\n end", "def csv(opts = {})\n Squib.csv(opts)\n end", "def genCsvSummary()\n csv = [] ;\n to_a.each{|analyzer|\n csv.push(analyzer.genCsvRowSummary()) ;\n }\n return csv ;\n end", "def write_csv\n\t\tCSV.open(\"csv_exports/edits_per_user_#{@dataset}.csv\", 'w') do |csv|\n\t\t\tcsv << ['user', 'edits']\n\t\t\[email protected] do |user, edits|\n\t\t\t\tcsv << [user,edits]\n\t\t\tend\n\t\tend\n\tend", "def to_csv(options = {})\n if [\"html\", \"pdf\", \"xml\", \"combined\"].include? options[:format]\n view = \"#{options[:name]}_#{options[:format]}_views\"\n else\n view = options[:name]\n end\n\n # service_url = \"#{ENV['COUCHDB_URL']}/_design/reports/_view/#{view}\"\n\n result = get_result(service_url, options.merge(timeout: 1800))\n if result.blank? || result[\"rows\"].blank?\n message = \"CouchDB report for #{options[:name]} could not be retrieved.\"\n Notification.where(message: message).where(unresolved: true).first_or_create(\n exception: \"\",\n class_name: \"Faraday::ResourceNotFound\",\n source_id: id,\n status: 404,\n level: Notification::FATAL)\n return \"\"\n end\n\n if view == options[:name]\n CSV.generate do |csv|\n csv << [\"pid_type\", \"pid\", \"html\", \"pdf\", \"total\"]\n result[\"rows\"].each { |row| csv << [\"doi\", row[\"key\"], row[\"value\"][\"html\"], row[\"value\"][\"pdf\"], row[\"value\"][\"total\"]] }\n end\n else\n dates = date_range(options).map { |date| \"#{date[:year]}-#{date[:month]}\" }\n\n CSV.generate do |csv|\n csv << [\"pid_type\", \"pid\"] + dates\n result[\"rows\"].each { |row| csv << [\"doi\", row[\"key\"]] + dates.map { |date| row[\"value\"][date] || 0 } }\n end\n end\n end", "def export_to_csv(file_name, arr)\n puts \"Exporting list...\"\n csv_headers = [\"CLS URN:\", \"Lead2Lease Count:\"]\n CSV.open(file_name, \"wb\") do |csv|\n csv << csv_headers\n end\n i = 0\n while i < arr.length\n CSV.open(file_name, \"a+\") do |csv|\n formatted = []\n if arr[i].get_count > 0\n formatted.push(arr[i].name, arr[i].get_count)\n csv << formatted\n end\n end\n i = i+1\n end\n puts \"Lead2Lease list exported!\"\nend", "def save_groups_to_csv\n\t\tCSV.open(\"groups.csv\", \"w\") do | csv | \n\t\t\[email protected] do |row|\n\t\t\t\tcsv << row\n\t\t\tend\n\t\tend\n\tend", "def csv_report\n tire_cols = params[:tire] || {}\n ar_cols = params[:activerecord] || {}\n assocs_to_include = params[:assoc][:include] || {}\n params[:assoc][:max] ||= {}\n klass = model_class.constantize\n @filename = \"#{model_class.humanize}.csv\"\n\n response.headers['Content-Type'] ||= 'text/plain'\n response.headers['Content-Disposition'] = \"attachment; filename=#{@filename}\"\n response.headers['Content-Transfer-Encoding'] = 'binary'\n response.headers['Last-Modified'] = Time.now.to_s\n\n # Right, try to define a header:\n header = []\n tire_cols.keys.each { |x| header.push(x.humanize) }\n ar_cols.keys.each { |x| header.push(x.humanize) }\n assocs_to_include.keys.each do |assoc|\n if params[:assoc][:max][assoc] == 'join' # Is a has_many with only one real column\n header.push params[:assoc][assoc.to_sym].keys.first\n elsif params[:assoc][:max][assoc] # has_many\n (1 .. (params[:assoc][:max][assoc].to_i)).each do |i|\n params[:assoc][assoc.to_sym].keys.each do |k|\n header.push(\"#{assoc.singularize.humanize} #{i} #{k.humanize}\")\n end\n end\n else # has_a\n params[:assoc][assoc.to_sym].keys.each do |k| # Each key requested from the associated record\n header.push \"#{assoc.humanize} #{k.humanize}\"\n end\n end\n end\n\n results = klass.search({ per: TireSearch::INFINITY }, 1, '')\n self.response_body = Enumerator.new do |y|\n results.each_with_index do |result, i|\n y << header.to_csv if i == 0\n\n line = []\n tire_cols.keys.each { |x| line.push(result[x]) }\n\n result = result.load if ar_cols.count > 0 || assocs_to_include.keys.count > 0\n\n ar_cols.keys.each { |x| line.push(result.send(x)) } if ar_cols.count > 0\n\n assocs_to_include.keys.each do |assoc|\n related = result.send(assoc)\n if params[:assoc][:max][assoc] == 'join' # Is a has_many with only one real column\n col = params[:assoc][assoc.to_sym].keys.first\n line.push related.map { |x| x.send(col) }.join(' // ')\n elsif params[:assoc][:max][assoc]\n (0 .. (params[:assoc][:max][assoc].to_i - 1)).each do |j|\n params[:assoc][assoc.to_sym].keys.each do |k|\n line.push(related[j] ? related[j].send(k) : nil)\n end\n end\n else\n params[:assoc][assoc.to_sym].keys.each do |k| # Each key requested from the associated record\n line.push related ? related.send(k) : nil\n end\n end\n end\n y << line.to_csv\n GC.start if i % 500 == 0\n end\n end\n end", "def export_csofeed\n # Create header row #\n header = ['Record Type', 'Device Key', 'IP Addresses', 'MAC Addresses', 'System Name', 'FQDN', 'Status', 'Function', 'Runs MOTS/PRISM Apps', 'MOTS/PRISM IDs', 'Runs Non-MOTS/PRISM Apps', 'Internet Facing', 'Device Criticality', 'Device Owner', 'Operating System', 'Operating System Version', 'Administrator\\'s ATTUID', 'Support Group', 'Serial Number', 'Asset Tag Number', 'Location', 'Location CLLI', 'Comments' \"\\n\"]\n csvdoc = [header.join(',')]\n Node.all.each do |node|\n result = make_csoline(node)\n csvdoc << result.join(',') if result\n end\n fname = \"public/csvexports/csofeed_#{Time.now.strftime(\"%d%m%Y\")}.csv.gz\"\n File.open(fname, 'w') do |f|\n gz = Zlib::GzipWriter.new(f)\n gz.write csvdoc\n gz.close\n end\n end", "def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => 'es_setups.path, es_setups.name'\n setups = EsSetup.find :all, :order => @sort, :conditions => session[:conditions_setup]\n # Creation of the file\n file_name = \"setups_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << [\"Chemin\".trn,\"Nom\".trn,\"Valeur\".trn,\"Type\".trn, \"Lecture seule\".trn]\n setups.each do |t|\n csv << [t.path,t.name,t.value,t.type_data,(t.read_only=='Y' ? 'V' : '')]\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end", "def generate_csv\n\n fields = @resource[:class].typus_fields_for(:csv).collect { |i| i.first }\n\n require 'csv'\n if CSV.const_defined?(:Reader)\n # Old CSV version so we enable faster CSV.\n begin\n require 'fastercsv'\n rescue Exception => error\n raise error.message\n end\n csv = FasterCSV\n else\n csv = CSV\n end\n\n filename = \"#{Rails.root}/tmp/export-#{@resource[:self]}-#{Time.now.utc.to_s(:number)}.csv\"\n\n options = { :conditions => @conditions, :batch_size => 1000 }\n\n csv.open(filename, 'w', :col_sep => ';') do |csv|\n csv << fields\n @resource[:class].find_in_batches(options) do |records|\n records.each do |record|\n csv << fields.map { |f| record.send(f) }\n end\n end\n end\n\n send_file filename\n\n end", "def csv_export(config, patients, inner_batch_size)\n # Get all of the field data based on the config\n field_data = get_field_data(config, patients)\n\n # Determine selected data types for export\n data_types = CUSTOM_EXPORT_OPTIONS.keys.select { |data_type| field_data.dig(data_type, :checked).present? }\n\n files = []\n csvs = {}\n packages = {}\n\n data_types.each do |data_type|\n # Create CSV with column headers\n package = CSV.generate(headers: true) do |csv|\n csv << field_data.dig(data_type, :headers)\n csvs[data_type] = csv\n end\n packages[data_type] = package\n end\n\n # NOTE: in_batches appears to NOT sort within batches, so explicit ordering on ID is also done deeper down.\n # The reorder('') here allows this ordering done later on to work properly.\n patients.reorder('').in_batches(of: inner_batch_size).each do |batch_group|\n # Get export data in batches to decrease size of export data hash maintained in memory\n exported_data = get_export_data(batch_group.order(:id), config[:data], field_data)\n data_types.each do |data_type|\n exported_data[data_type]&.each { |record| csvs[data_type] << record }\n end\n end\n\n data_types.each do |data_type|\n files << { filename: build_export_filename(config, nil), content: StringIO.new(packages[data_type]) }\n end\n files\n end", "def export_to_csv(file)\n Keybox::Convert::CSV.to_file(@db.records, file)\n hsay \"Exported #{@db.records.size} records to #{file}.\", :information\n end", "def write_to_file(course)\n className = \"#{course[\"subject\"]} #{course[\"catalogNbr\"]}\"\n title = course[\"titleLong\"]\n credits = course[\"enrollGroups\"][0][\"unitsMaximum\"]\n CSV.open(\"results.csv\", \"a\") do |csv|\n csv << [className, title, credits]\n end\nend", "def export\n #@contacts = Contact.all\n send_data Contact.to_csv(@contacts),\n :filename => 'addressbook.csv',\n :type => 'text/csv; charset=utf-8',\n :disposition => 'attachment'\n end", "def to_csv(function_list, tests)\n function_list.map { |function_name|\n v = tests[\"#{function_name} function\"]\n if v\n positive_tests = count_tests_in(v['groups']['success'])\n negative_tests = count_tests_in(v['groups']['failure'])\n pending_tests =\n count_pending_tests_in(v['groups']['failure']) +\n count_pending_tests_in(v['groups']['failure'])\n else\n positive_tests = 0\n negative_tests = 0\n pending_tests = 0\n end\n '%-25s, %-9d, %-9d, %-9d' % [function_name, positive_tests, negative_tests, pending_tests]\n }.compact\nend", "def output_to_csv(rows, destination_file_path)\n logger.debug { \"Outputting to CSV File. '#{destination_file_path}'\" }\n total_rows = rows.length\n CSV.open(destination_file_path, 'w') { |writer|\n rows.each_with_index do |row, idx|\n logger.debug { \"Writing Row #{idx+1} of #{total_rows}\" }\n writer << row\n end\n }\n logger.info { \"Output Saved to CSV File. '#{destination_file_path}'\" }\n end", "def print_as_csv\n file_data = []\n file_data << headers\n @case_data_by_date_map.each do |_, elements|\n file_data += elements.shuffle\n end\n\n file_data.each do |element|\n puts element.join(',')\n end\n end", "def to_csv\n FileOutputService.to_csv(result, file_options)\n end", "def to_csv\n Redmine::Export::CSV.generate do |csv|\n # date range\n csv_min_date = [@ev.keys.min, @ac.keys.min, @pv.keys.min].min\n csv_max_date = [@ev.keys.max, @ac.keys.max, @pv.keys.max].max\n evm_date_range = (csv_min_date..csv_max_date).to_a\n # title\n csv << ['DATE', evm_date_range].flatten!\n # set evm values each date\n pv_csv = {}\n ev_csv = {}\n ac_csv = {}\n evm_date_range.each do |csv_date|\n pv_csv[csv_date] = @pv[csv_date].nil? ? nil : @pv[csv_date].round(2)\n ev_csv[csv_date] = @ev[csv_date].nil? ? nil : @ev[csv_date].round(2)\n ac_csv[csv_date] = @ac[csv_date].nil? ? nil : @ac[csv_date].round(2)\n end\n # evm values\n csv << ['PV', pv_csv.values.to_a].flatten!\n csv << ['EV', ev_csv.values.to_a].flatten!\n csv << ['AC', ac_csv.values.to_a].flatten!\n end\n end", "def write_csv filename, fields = nil\n\n # By default all fields present in every row of output_data will be incorporated.\n if fields.nil?\n # Transform each output struct into a list of its keys, then take the intersection of each Array of keys.\n # This ensures that only fields present for all rows will be incorporated.\n fields = output_data.map{|o| o.to_h.keys}.inject do |last_keys, this_keys|\n last_keys & this_keys\n end\n end\n\n CSV.open filename, \"wb\", row_sep: \"\\r\\n\" do |csv|\n # Header line\n csv << fields\n\n output_data.each do |out|\n output_row = []\n fields.each do |field|\n output_row << out[field]\n end\n csv << output_row\n end\n end\n end", "def create_scale_results_csv_file(scale_results_parent_dir)\n CSV.open(\"#{scale_results_parent_dir}/PERF_SCALE_#{@scale_timestamp}.csv\", \"wb\") do |csv|\n headings = [\"agents\",\n \"ok\",\n \"ko\",\n \"combined mean\",\n \"catalog mean\",\n \"filemeta plugins mean\",\n \"filemeta pluginfacts mean\",\n \"locales mean\",\n \"node mean\",\n \"report mean\",\n \"average CPU %\",\n \"average memory\"]\n\n csv << headings\n end\n end", "def to_csv\n attributes = %w{id email name course_ids major_title created_at}\n \n CSV.generate(headers: true) do |csv|\n csv << attributes\n csv << attributes.map{ |attr| self.send(attr) }\n end\n end", "def csv_download\n @stats = Hyrax::WorkUsage.new(params[:id])\n filename = params[:id] + \"_stats.csv\"\n #This is an example that worked\n #send_data @stats.to_csv, :type => 'text/csv; charset=utf-8; header=present', :disposition => 'attachment; filename=payments.csv'\n target = \"attachment`; filename=#{filename}\"\n send_data @stats.to_csv, :type => 'text/csv; charset=utf-8; header=present', :disposition => target\n end", "def index\n @goldmeasures = Goldmeasure.all\n\n respond_to do |format|\n format.html \n format.json\n format.csv { send_data @goldmeasures.to_csv }\n\n end\n end", "def to_csv_dash_data\n @grant_data = dash_data_create\n respond_to do |format|\n format.html\n format.csv do\n send_data @grant_data, filename: \"all_dashboard_data#{Time.now.to_s(:db)}.csv\"\n end\n # format.CSV {render csv: @grant_data.to_csv}\n # format.xls {render text: @grant_data.to_csv(col_sep: \"\\t\")}\n end\n end", "def output_csv\n if @report.is_a?(Reports::Bundle)\n @report.to_csv_usr_file\n else\n path = ask_path('.csv')\n path ? @report.to_csv_file(path) : 'Annulé'\n end\n end", "def build_csv()\n\t\tconcat_path = \"#{@location}/#{@name}.csv\"\n\n\t\tCSV.open(concat_path, 'wb') do |csv_line|\n\t\t\t\t\t\n\t\t\theaders = ['movie_date', 'title', 'lifetime_gross_sales']\n\t\t\tcsv_line << headers\n\n\t\t\[email protected] do |cage|\n\t\t\t\tcsv_line << [cage[:movie_date], cage[:title], cage[:life_time_gross]]\n\t\t\tend\n\t\tend\n\tend", "def index\n if params[:csv]\n # generate the header line\n csv_string = 'Campers , Address ,'\n if @option.use_2nd_address?\n\tcsv_string << 'addr2,'\n end\n csv_string << 'City, State, Mail code,'\n if @option.use_country? && Country.active.count > 0\n\tcsv_string << 'Country,'\n end\n case @option.no_phones\n\twhen 1\n\t csv_string << 'Phone, '\n\twhen 2\n\t csv_string << 'Phone, 2nd Phone, '\n end\n csv_string << 'email address, last activity'+\"\\n\"\n # now for the data\n Camper.all.each do |c|\n\tcsv_string << c.full_name + ',' + c.address + ','\n\tcsv_string << (c.address2 ? c.address2 : '') + ',' if @option.use_2nd_address?\n\tcsv_string << c.city + ',' + c.state + ',' + c.mail_code + ','\n\tif @option.use_country? && Country.active.count > 0\n\t if c.country_id?\n\t csv_string << (c.country.name? ? c.country.name : '') + ','\n\t else\n\t csv_string << ','\n\t end\n\tend\n\tcsv_string << (c.phone ? c.phone : '' ) + ',' if @option.no_phones > 0\n\tcsv_string << (c.phone_2 ? c.phone_2 : '' ) + ',' if @option.no_phones > 1\n\tcsv_string << (c.email ? c.email : '' ) + ',' + c.activity.to_s + \"\\n\"\n end\n # debug csv_string\n send_data(csv_string,\n\t\t:type => 'text/csv;charset=iso-8859-1;header=present',\n\t\t:disposition => 'attachment; filename=Campers.csv') if csv_string.length\n else\n @page_title = \"Camper Report\"\n @campers = Camper.all\n end\n end", "def to_csv\n value.to_csv\n end", "def save_as_csv\n temp = get_email_list.map{|hash| hash.map{|k, v| [k, v]}}\n temp = temp.map { |data| data.join(\",\") }.join(\"\\n\")\n File.open(\"db/emails.csv\", \"w\") do |csv|\n csv.write(temp)\n end\n end", "def lifetime_csv\n CSV.generate do |csv|\n report_details(period_covered: 'Lifetime').each { |a| csv.add_row(a) }\n csv.add_row [] # Blank row\n lifetime_table.each { |a| csv.add_row(a) }\n end\n end", "def index\n\n # parse params hash \n @fact_aggregation = FactAggregation.new\n options = {\n :where => params[:filters],\n :group_by => params[:dimensions].split(\",\"),\n :frequency => params[:frequency],\n :summarize => params[:summarize] ? params[:summarize].split(\",\") : [],\n :tz_offset => params[:tz_offset]\n }\n # aggregate facts\n begin\n params[:metrics].split(\",\").each do |metric|\n ActiveRecord.const_get(metric.classify).aggregate(@fact_aggregation, options)\n end\n rescue Interrupt, SystemExit\n raise\n rescue\n render :text => nil, :status => 422\n return \n end\n\n respond_to do |format|\n format.html\n format.csv do \n @csv_rows = @fact_aggregation.to_csv({\n :facts => params[:metrics].split(\",\"),\n :dimensions => params[:dimensions].split(\",\"),\n :frequency => params[:frequency]\n })\n render_csv :data => @csv_rows\n end\n end\n end", "def generate_csv\n\n fields = @resource.typus_fields_for(:csv)\n\n require 'csv'\n if CSV.const_defined?(:Reader)\n # Old CSV version so we enable faster CSV.\n begin\n require 'fastercsv'\n rescue Exception => error\n raise error.message\n end\n csv = FasterCSV\n else\n csv = CSV\n end\n\n filename = Rails.root.join(\"tmp\", \"export-#{@resource.to_resource}-#{Time.now.utc.to_s(:number)}.csv\")\n\n options = { :conditions => @conditions, :batch_size => 1000 }\n\n csv.open(filename, 'w', :col_sep => ';') do |csv|\n csv << fields.keys\n @resource.find_in_batches(options) do |records|\n records.each do |record|\n csv << fields.map do |key, value|\n case value\n when :transversal\n a, b = key.split(\".\")\n record.send(a).send(b)\n when :belongs_to\n record.send(key).to_label\n else\n record.send(key)\n end\n end\n end\n end\n end\n\n send_file filename\n\n end", "def stats_promoted_tweets_csv(file_names)\n\t\tCSV.open('stats_pm.csv','wb',:force_quotes => true) do |csv|\n\t\t\tcsv << ['id','account_id','fact_name','fact_subnm','fact_value','activity_date']\n\t\t\tfile_names.each{ |file|\n\t\t\t\tdata_hash = JSON.parse(file)\n\n\t\t\t\tid = data_hash['request']['params']['promoted_tweet_id']\n\t\t\t\taccount_id = data_hash['request']['params']['account_id']\n\n\t\t\t\tstart_date = Time.parse(data_hash['request']['params']['start_time']).to_date.to_s\n\t\t\t\tend_date = Time.parse(data_hash['request']['params']['end_time']).to_date.to_s\n\n\t\t\t\t# This is used for calcualting difference betweens start & end date\n\t\t\t\tdiff_dates = ((Time.parse(end_date).to_date) - (Time.parse(start_date).to_date)).to_i\n\n\t\t\t\t#Store all the dates in an array so that we can populate them in the csv file against each correct records\n\t\t\t\tdates = Array.new\n\t\t\t\ttemp = Time.parse(start_date).to_date\n\t\t\t\tfor i in 0..diff_dates\n\t\t\t\t\tdates.push(temp.to_s)\n\t\t\t\t\ttemp = temp +1\n\t\t\t\tend\n\n\t\t\t\tcounter = 0 # This counter is used for resetting the dates whenever new fact is acuqired\n\n\t\t\t\tkeys = data_hash['data'].keys # Generate all keys so that we dont have to specify fact names explicitly\n\t\t\t\tkeys.each do |item|\n\t\t\t\t\tif data_hash['data'][item].is_a? Array\n\t\t\t\t\t\tdata_hash['data'][item].each { |nested|\n\t\t\t\t\t\t\tcsv << [id, account_id, item, nil, nested, dates[counter]]\n\t\t\t\t\t\t\tcounter = counter + 1\n\t\t\t\t\t\t\tcounter = 0 if(counter>=(diff_dates)) # Resetting the counter\n\t\t\t\t\t\t} unless data_hash['data'][item].nil?\n\n\t\t\t\t\telsif data_hash['data'][item].is_a? String\n\n\t\t\t\t\telsif data_hash['data'][item].is_a? Hash\n\t\t\t\t\t\tnested_keys = data_hash['data'][item].keys\n\t\t\t\t\t\tnested_keys.each { |f|\n\t\t\t\t\t\t\tdata_hash['data'][item][f].each do |t|\n\t\t\t\t\t\t\t\tcsv << [id, account_id, item, f, t, dates[counter]]\n\t\t\t\t\t\t\t\tcounter = counter + 1\n\t\t\t\t\t\t\t\tcounter = 0 if(counter>=(diff_dates)) # Resetting the counter\n\t\t\t\t\t\t\tend\n\t\t\t\t\t\t} unless nested_keys.nil?\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t}unless file_names.nil?\n\t\tend\n\tend", "def to_csv\n self.data.to_csv\n end", "def print_csv_results\n FileUtils.mkdir_p(\"data\")\n File.open(\"data/results.csv\", \"w\") do |f|\n @data.sort_by{|id, values| values[\"sum_dose\"]}.each do |id, values|\n f.puts \"%s\\t%d\\t%.4f\" % [id, values[\"ddays\"], dose_unit(values[\"sum_dose\"])]\n end\n end\n end", "def export_to_csv\n puts 'Please enter a csv file name:'\n\n filename = get_filename\n filename = add_csv_suffix_if_needed filename\n \n write_to_csv_file filename\n \n end", "def to_csv(options={})\n CSV.generate(options) do |csv|\n csv << self.values.map{|v| v.to_csv}\n end\n end", "def export_csv\n\n group_array = []\n @page = 1\n @per_page = 50\n\n groups = @context.get(:groups, :page => @page, :per_page => @per_page, :access_token => ENV[\"API_TOKEN\"])\n group_array << groups\n group_array, group_hash = check_paging(groups, group_array, \"groups\", @context, true)\n\n group_array.each_with_index do |group, index|\n is_new = index == 0 ? true : false\n membership_array = []\n @page = 1\n\n group_model = Group.find(group['id'], :params => { :access_token => ENV[\"API_TOKEN\"] })\n memberships = group_model.get(:memberships, :page => @page, :per_page => @per_page, :access_token => ENV[\"API_TOKEN\"])\n membership_array << memberships\n membership_array, @membership_hash = check_paging(memberships, membership_array, \"memberships\", group_model, is_new)\n end\n\n export_data = [group_array, @membership_hash]\n perform_export(export_data) \n\n respond_to do |format|\n format.html { render :inline => \"<a href=<%= @download_url %>>Download CSV</a>\" }\n format.json { render :json => @download_url.to_json }\n end\n end", "def export\n @transactions = Transaction.find_all_by_user_id(current_user.id)\n csv = \"\"\n i = 0\n @transactions.each do |trans|\n if (i==0)\n csv += trans.to_csv(true)\n else\n csv += trans.to_csv(false)\n end\n i += 1\n end\n\n respond_to do |format|\n format.csv { send_data csv }\n end\n end", "def write_analyzed(dir)\n FileUtils.mkdir_p(dir)\n\n attributes.each do |a, v|\n path = File.join(dir, a.to_s)\n s = \"Count|Value\\n\" + v.map { |e| \"#{e[:count]}|#{e[:value]}\" }.join(\"\\n\")\n File.open(\"#{path}.csv\", 'w') { |f| f.write(s) }\n end\n end", "def to_csv\n CSV.generate { |csv| to_a.each { |r| csv << r } }\n end", "def generate_csv\n @project = Project.find(params[:id])\n \n content_type = if request.user_agent =~ /windows/i\n ' application/vnd.ms-excel '\n else\n ' text/csv '\n end\n \n project_net = @project.find_all_connections(friend = true, follower = false) \n \n CSV::Writer.generate(output = \"\") do |csv|\n csv << [\"DL n=\" + @project.persons.count.to_s ]\n csv << [\"format = edgelist1\"]\n csv << [\"labels embedded:\"]\n csv << [\"data:\"]\n project_net.each do |entry|\n csv << [entry[0], entry[1], \"1\"]\n end\n @project.persons.each do |person|\n csv << [person.username]\n end\n end\n send_data(output,\n :type => content_type,\n :filename => @project.name.to_s + \"_FF_SNA.csv\")\n end", "def scg_report\n @scg_csv_array = []\n get_resource_list('compute', 'storage-connectivity-groups', 'storage_connectivity_groups', name = 'display_name', id = 'id')\n @resource_id_list.each do |scgid|\n scg = rest_get(\"#{@resource_url}/storage-connectivity-groups/#{scgid}\", @token_id)\n scg_array = JSON.parse(scg)['storage_connectivity_group']\n scg_name = scg_array['display_name']\n scg_auto_add_vios = scg_array['auto_add_vios']\n scg_fc_storage_access = scg_array['fc_storage_access']\n scg_ports_per_fabric_npiv = scg_array['ports_per_fabric_npiv']\n @scg_host_list = []\n @scg_host_array = scg_array['host_list']\n @scg_host_array.each do |host|\n @scg_host_list.push(host['name'])\n end\n @scg_vios_array = scg_array['host_list'][0]['vios_list']\n @scg_vios_names = []\n @scg_vios_array.each do |vios|\n @scg_vios_names.push(vios['name'])\n end\n @scg_csv_headers = %w(SCG_Name SCG_Auto_Add_VIOs SCG_FC_Storage_Access SCG_Ports_per_Fabric SCG_Host_List SCG_VIOs_List)\n @scg_csv_array << [scg_name, scg_auto_add_vios, scg_fc_storage_access, scg_ports_per_fabric_npiv, @scg_host_list, @scg_vios_names]\n end\n end", "def export_as_csv\n contacts = current_mpd_user.mpd_contacts\n csv_setup\n result = FasterCSV.generate do |csv|\n # Write Column Headers\n csv << [\"Full Name\", \"Salutation\", \"Address 1\", \"Address 2\", \"City\", \"State\", \"Zip\", \"Phone\", \"Email Address\", \"Gift Amount\", \"Letter Sent?\", \"Call Made?\", \"Thank-you Sent?\", \"Notes\"]\n \n # Write data rows\n contacts.each do |c|\n csv << [c.full_name, c.salutation, c.address_1, c.address_2, c.city, c.state, c.zip, number_to_phone(c.phone, :area_code => true), c.email_address, number_to_currency(number_with_delimiter(c.gift_amount(current_event.id))), c.letter_sent(current_event.id), c.contacted(current_event.id), c.thankyou_sent(current_event.id), c.notes]\n end\n end\n render :text => result\n end", "def write_results_to_array(parsed_results)\n # get the array of sprints for the project\n sprints = parsed_results['values']\n # open a CSV with the name created above\n sprints.each do |sprint|\n # output the each line that we're writing to the CSV\n puts \"Sprint: #{sprint['name']}\\n\"\n unless sprint['startDate'].nil?\n puts \"\\t started: #{DateTime.parse(sprint['startDate']).strftime(\"%m/%d/%Y\")}\" \n else\n puts \"\\tHas not started - will not be exported\"\n end\n unless sprint['endDate'].nil? \n puts \"\\t ended: #{DateTime.parse(sprint['endDate']).strftime(\"%m/%d/%Y\")}\" \n else\n puts \"\\tHas not ended - will not be exported\"\n end\n\n # set a default start and end date of 0-0-0 (easily identifiable so they can be removed later)\n startDate = \"0-0-0\"\n endDate = \"0-0-0\"\n\n # format start and end dates\n unless sprint['startDate'].nil?\n startDate = DateTime.parse(sprint['startDate']).strftime('%m-%d-%Y %l:%M:%S %p')\n end\n\n unless sprint['endDate'].nil?\n endDate = DateTime.parse(sprint['endDate']).strftime('%m-%d-%Y %l:%M:%S %p')\n end\n\n #push results to an array that we'll use when writing to the csv later...\n @cumulative_sprints_array.push([sprint['name'], startDate, endDate, sprint['state']])\n end\nend", "def write_to_csv (time, platform, browser_name, browser_version, build, counter, num_cases, delay, duration, rate, test_name)\n googledrive_path=\"Google Drive/CODAP @ Concord/Software Development/QA\"\n localdrive_path=\"Documents/CODAP data/\"\n\n if !File.exist?(\"#{Dir.home}/#{$dir_path}/#{$save_filename}\") || $new_file\n CSV.open(\"#{Dir.home}/#{$dir_path}/#{$save_filename}\", \"wb\") do |csv|\n csv<<[\"Time\", \"Platform\", \"Browser\", \"Browser Version\", \"CODAP directory\", \"CODAP Build Num\", \"Test Name\", \"Counter\", \"Num of Cases\", \"Delay (s)\", \"Time Result (ms)\", \"Rate (cases/sec)\"]\n csv << [time, platform, browser_name, browser_version, build, $buildno, test_name, counter, num_cases, delay, duration, rate]\n end\n else\n CSV.open(\"#{Dir.home}/#{$dir_path}/#{$save_filename}\", \"a\") do |csv|\n csv << [time, platform, browser_name, browser_version, build, $buildno, test_name, counter, num_cases, delay, duration, rate]\n end\n end\nend", "def export(params={})\n columns = delimited_string_to_array(Settings.export.travel_fields)\n send_data Travel.export(columns), :filename => \"travel.csv\"\n end", "def as_csv(*)\n populate_carriers!\n\n CSV.generate do |csv|\n csv << [\n 'key', 'primary_co2_emission',\n *primary_carriers.map { |c| \"primary_demand_of_#{c} (MJ)\" },\n *final_carriers.map { |c| \"final_demand_of_#{c} (MJ)\" }\n ]\n\n @graph.group_nodes(:application_group).each do |node|\n csv << node_row(node)\n end\n end\n end", "def assign_csv_report\n unless @csv.nil?\n csv_header = ['Time', 'Req/s', 'Avg. resp. (ms)']\n @csvexport = Ralphttp::CsvExport.new(csv_header)\n end\n end", "def report_activities\n @project = Project.find params[:project_id]\n ###\n retrieve_query\n # sort_init(@query.sort_criteria.empty? ? [['id', 'desc']] : @query.sort_criteria)\n # sort_update(@query.sortable_columns)\n \n if @query.valid?\n @limit = Setting.issues_export_limit.to_i\n\n @issue_count = @query.issue_count\n @issue_pages = Paginator.new self, @issue_count, @limit, params['page']\n @offset ||= @issue_pages.current.offset\n @issues = @query.issues(:include => [:assigned_to, :tracker, :priority, :category, :fixed_version],\n # :order => sort_clause,\n :offset => @offset,\n :limit => @limit)\n\n @iss = []\n @issues.each{|i|\n issue = Issue.find i.id\n @iss << issue\n }\n respond_to do |format|\n format.html { send_data(statuses_to_csv_activities(@iss, @project), :type => 'text/csv; header=present', :filename => 'export.csv') }\n end \n end\n ###\n end", "def index\n session['goals_view'] = params[:view] if params[:view].present?\n\n @goals = Goal.all\n #Goal.gds_goals\n respond_to do |format|\n format.html\n format.csv { send_data @goals.to_csv}\n end\n\n end", "def export\n headers = JSON[params[:column_array]]\n rows = JSON[params[:row_array]]\n column_names = Array.new\n headers.each do |col|\n column_names << col\n end\n csv_string = CSV.generate do |csv|\n csv << column_names\n rows.each do |row|\n csv << row\n end\n end\n\n filename = params[:file_name] + \".csv\"\n send_data(csv_string,\n :type => 'text/csv; charset=utf-8; header=present',\n :filename => filename)\n end", "def to_csv\n \"#{@key},#{@expected},#{@type}\"\n end", "def create_assignment_csv(args)\n [:path, :urls].each{|arg| args[arg] or raise Error::Argument, \"Missing arg '#{arg}'\" }\n headers = ['audio_url',\n 'project_id',\n 'unusual',\n 'chunk',\n 'chunk_hours',\n 'chunk_minutes',\n 'chunk_seconds',\n 'voices_count',\n (1 .. args[:voices].count).map{|n| [\"voice#{n}\", \"voice#{n}title\"]}\n ].flatten\n csv = args[:urls].map do |url|\n [url, \n local.id,\n args[:unusual].join(', '),\n interval_as_time_string,\n interval_as_hours_minutes_seconds.map{|n| (n == 0) ? nil : n },\n args[:voices].count,\n args[:voices].map{|v| [v[:name], v[:description]]}\n ].flatten\n end\n local.file(*args[:path]).as(:csv).write_arrays(csv, headers)\n local.file_path(*args[:path])\n end", "def csv\n @records = Reply.all\n csv_string = FasterCSV.generate do |csv|\n csv << %w{Code Name Email Engagement Engagement_Adults Engagement_Children Wedding Wedding_Adults Wedding_Children Camping Diet Notes Updated}\n @records.each do |line|\n csv << [\n line['code'],\n line['name'], \n line['email'],\n line['engagement'],\n line['engagement_adults'],\n line['engagement_children'],\n line['wedding'],\n line['wedding_adults'],\n line['wedding_children'],\n line['camping'],\n line['diet'],\n line['notes'],\n line['updated_at']\n ]\n end\n end\n filename = \"rsvp_download\" + Time.now.strftime(\"%d%m%y-%H%M\").to_s + \".csv\"\n send_data(csv_string,\n :type => 'text/csv; charset=utf-8; header=present',\n :filename => filename)\n end", "def generate_csv\n csv_string = CSV.generate do |csv|\n csv << [\"Sl no.\",\"Scheme\",\"IP no.\", \"Patient name\", \"UHID no.\", \"Reg no.\", \"DOA\", \"DOS\", \"DOD\", \"Plan of treatment\", \"Approved Amount\",\"Claim Amount\", \"TDS\",\"Net Amount\",\"Hospital charges\",\"Medicine charges\",\"Implant charges\"]\n sl_no = 1\n @claims.each do |claim|\n csv <<[sl_no,\"#{claim.scheme rescue \"\"}\",\"#{claim.ip_no rescue \"\"}\",\"#{claim.patient_name rescue \"\"}\",\"#{claim.uhid_no rescue \"\"}\",\"#{claim.reg_no rescue \"\"}\",\"#{claim.date_of_admission}\",\"#{claim.date_of_surgery}\",\"#{claim.date_of_discharge}\",\"#{claim.plan_of_treatment rescue \"\"}\",(claim.approved_amount rescue 0.0),(claim.claim_amount rescue 0.0),(claim.tds_amount rescue 0.0),(claim.net_amount rescue 0.0),(claim.hospital_charge rescue 0.0),(claim.medicine_charge rescue 0.0),(claim.implant_charge rescue 0.0)]\n sl_no +=1\n end\n end\n csv_string\n end", "def process_outputs(campaigns, stats)\n @output_manager.process_data campaigns, stats\n end", "def to_csv_data( options={} )\n inject( [] ) do |arr,model_instance|\n arr.push( *model_instance.to_csv_data( options ) )\n end\n end", "def save_to_csv(players)\n CSV.open('../docs/collegeQBstats.csv', 'wb') do |csv|\n csv << [\"NAME\", \"POS\", \"CMP\", \"ATT\", \"CMP%\", \"YDS\", \"AVG\", \"LNG\", \"TD\", \"INT\", \"SACK\", \"RTG\"]\n for i in 0..players.length-1\n csv << [players[i].name, players[i].college, players[i].pos, players[i].cmp, players[i].att, players[i].cmp_pct, \n players[i].yds, players[i].lng, players[i].td, players[i].int, players[i].sack, players[i].rtg]\n end\n end\nend", "def out_file\n options.csv_matrix\n end" ]
[ "0.65281975", "0.6377574", "0.63710546", "0.63386273", "0.6303656", "0.618765", "0.618765", "0.60035276", "0.599142", "0.5974149", "0.59554225", "0.59412813", "0.5920252", "0.59174967", "0.5897788", "0.5889055", "0.58872867", "0.588151", "0.5874404", "0.58655864", "0.58565867", "0.5819552", "0.58164716", "0.5813754", "0.57902247", "0.57896876", "0.57819015", "0.57816046", "0.57630855", "0.57577807", "0.5756796", "0.57536954", "0.575075", "0.574375", "0.57424486", "0.5731822", "0.57166034", "0.570623", "0.5699935", "0.5688425", "0.5680634", "0.5675595", "0.567475", "0.56688994", "0.56584364", "0.56556356", "0.56526965", "0.56491226", "0.5646055", "0.56425744", "0.5639791", "0.56289905", "0.5623761", "0.5623002", "0.56205183", "0.56170493", "0.5616635", "0.5609956", "0.5609401", "0.5602811", "0.56021214", "0.55990505", "0.5597714", "0.55906606", "0.5572585", "0.5565863", "0.5556304", "0.55454046", "0.55420357", "0.5531394", "0.5523122", "0.55225796", "0.55201894", "0.551658", "0.55083704", "0.5502561", "0.5494511", "0.54938495", "0.5492761", "0.54911214", "0.5488899", "0.54884785", "0.5481885", "0.547912", "0.5477544", "0.54769677", "0.5476723", "0.5473464", "0.547061", "0.54692495", "0.5466989", "0.54669315", "0.5463366", "0.5450264", "0.54485446", "0.54483616", "0.5438112", "0.54360056", "0.5425041", "0.54232925" ]
0.58811414
18
Wrap test runs in a transaction to replicate AR's transactional fixtures See:
def run(*args, &block) Sequel::Model.db.transaction(rollback: :always, auto_savepoint: true) { super } end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def spy_transaction!\n @within_transaction = nil\n allow(TestSqlCaller).to receive(:transaction).and_wrap_original do |meth, *args, &block|\n @within_transaction = true\n meth.call(*args, &block)\n ensure\n @within_transaction = false\n end\n end", "def with_transaction\n ActiveRecord::Base.transaction { yield }\n end", "def setup_with_fixtures\n ActiveRecord::Base.send :increment_open_transactions\n ActiveRecord::Base.connection.begin_db_transaction\n load_fixtures\n end", "def with_transaction(&block)\n base_model.transaction(&block)\n end", "def test_transactions(table=\"test_monetdb_transactions\", columndefs=['col1 INT', 'col2 VARCHAR(255)'])\n test_create_table(table, columndefs)\n \n data = [1, 'aa'] \n values = \"\"\n \n data.each do |d| values += '\\'' + d.to_s + '\\'' + ',' end\n values = values.chop # remove last ',' character \n \n insert = \"INSERT INTO \" + table + \" VALUES \" + \" ( \" + values + \" )\"\n \n @db.query('START TRANSACTION')\n @db.auto_commit(flag=false) # if @db.auto_commit?\n @db.query(insert)\n\n @db.query(\"COMMIT\") \n \n res = @db.query('SELECT * FROM ' + table)\n rows_committed = res.fetch_all\n res.free\n \n # create a save point\n @db.save\n @db.query(\"SAVEPOINT #{@db.transactions} ;\")\n \n @db.query(insert)\n \n # rollback to savepoint\n @db.query(\"ROLLBACK TO SAVEPOINT #{@db.transactions};\")\n @db.release\n \n res = @db.query('SELECT * FROM ' + table)\n rows_rolled_back = res.fetch_all\n res.free\n \n assert_equal(rows_committed, rows_rolled_back)\n \n # restore autocommit for remaining tests\n @db.auto_commit(flag=true) \n end", "def within_transaction; end", "def within_transaction; end", "def transaction; end", "def transaction; end", "def transaction; end", "def transaction\n start_transaction\n\n yield\n ensure\n end_transaction if transaction_started?\n end", "def transaction(&block)\n ActiveRecord::Base.transaction(&block)\n end", "def transaction(&block); end", "def use_transactions; end", "def use_transactions; end", "def transaction\n start\n yield self\n rescue Object => ex\n rollback\n debug \"#{ex.class}: #{ex.message}\"\n ex.backtrace.each { |line| debug line }\n else\n commit\n end", "def transaction(&block)\n yield\n commit\n end", "def scaffold_transaction(&block)\n transaction(&block)\n end", "def run_in_transaction isolation\n if isolation\n Base.transaction isolation: isolation do\n yield\n end\n else\n yield\n end\n end", "def run_in_transaction isolation\n if isolation\n Base.transaction isolation: isolation do\n yield\n end\n else\n yield\n end\n end", "def within_transaction(object); end", "def transaction(&block)\n yield\n end", "def transaction(&block)\n yield\n end", "def transaction(&block)\n block.call\n end", "def run_steps_in_transaction(steps, errors=[])\n return run_steps(steps, errors) unless defined?(ActiveRecord)\n ActiveRecord::Base.transaction do\n yield if block_given?\n r = run_steps(steps, errors)\n fail ActiveRecord::Rollback unless r\n r\n end\n end", "def create_data_set_outside_tx(options = {})\n Rails.logger.info '** Creating data set outside transaction **'\n puts \"#{Time.now} ** Creating data set outside transaction **\"\n Thread.new do\n ActiveRecord::Base.connection_pool.with_connection do\n SeedSupport.setup\n create_data_set options\n end\n end.join\n end", "def transaction\n @database.transaction { yield self }\n end", "def transaction(start_db_transaction=true)\n yield\n end", "def transaction\n start_transaction!\n\n result = yield\n\n query 'COMMIT'\n\n result\n rescue\n query 'ROLLBACK'\n raise\n\n ensure\n end_transaction!\n end", "def within_transaction\n if use_transaction\n first.within_transaction do\n yield\n success?\n end\n else\n yield\n end\n end", "def transaction(model_class)\n raise \"you must override #transaction in an adapter subclass, it must yield\"\n end", "def transaction(&blk)\n tap(&blk)\n end", "def transaction(object)\n object.db.transaction {raise ::Sequel::Error::Rollback unless yield}\n end", "def transaction(&block)\n self['AutoCommit'] = false\n self.do_transaction(&block)\n self['AutoCommit'] = true\n end", "def transaction( &block )\n connect do | conn |\n conn.transaction do | conn |\n yield SqlRunner.new(SingleConnectionPool.new( conn ))\n end\n end\n end", "def restart_transaction\n ActiveRecord::Base.connection.execute(\"COMMIT\")\n ActiveRecord::Base.connection.execute(\"BEGIN\")\n end", "def transaction(&block)\n @in_transaction += 1\n begin\n yield self\n self.commit if @in_transaction > 0\n rescue => e\n self.rollback\n raise e\n ensure\n @in_transaction -= 1 unless @in_transaction == 0\n end\n end", "def begin\n db.transaction do\n yield\n end\n end", "def in_transaction(opts = {})\n yield\n end", "def transaction\n use do |connection|\n connection.transaction do |conn|\n begin\n yield conn\n rescue Rollback\n return\n end\n end\n end\n end", "def transaction(mode = :deferred, &block)\n @db.transaction(mode, &block)\n end", "def begin_db_transaction() end", "def begin_db_transaction() end", "def begin_transaction\n return System.begin_transaction\n end", "def transaction(options = {}, &block)\n run_on_shard { @klass = klass.transaction(options, &block) }\n end", "def auto_transaction(run_with_auto_tx)\n @auto_transaction = run_with_auto_tx\n end", "def transaction(options = {}, &block)\n @klass.connection.run_queries_on_shard(@shard) do\n @klass = @klass.connection.transaction(options, &block)\n end\n end", "def testTransaction1()\n t = Scalaris::Transaction.new()\n t.close_connection()\n end", "def testTransaction1()\n t = Scalaris::Transaction.new()\n t.close_connection()\n end", "def create_or_update_with_transaction\n Assessment.transaction do\n create_or_update_without_transaction\n end\n end", "def transaction(opts={}, &blk)\n Toshi.db.transaction(opts, &blk)\n end", "def transaction(&block)\n db\n persister\n\n result = nil\n start_time = Time.now\n begin\n db.transaction(:rollback => :reraise, :isolation => :repeatable,\n :retry_on => @retry_on_error, :num_retries => 3) do\n result = yield block\n end\n total = Time.now.to_ms - start_time.to_ms\n debug \"Transaction committed (#{total} ms)\"\n result\n rescue StandardError => e\n total = Time.now.to_ms - start_time.to_ms\n warn \"Transaction failed (#{total} ms)\"\n raise e\n ensure\n GC.start\n end\n end", "def rollback_db_transaction() end", "def rollback_db_transaction() end", "def transaction\n sanity_check\n raise InterfaceError, \"No block given\" unless block_given?\n\n commit\n begin\n yield self\n commit\n rescue Exception\n rollback\n raise\n end\n end", "def test_transaction1()\n t = Scalaroid::Transaction.new()\n t.close_connection()\n end", "def transaction(&block)\n begin\n @store.transaction\n block.call(@store)\n @store.commit\n rescue SQLite3::Exception => exception\n raise \"SQLite exception: #{exception}\"\n end\n end", "def within_transaction\n if use_transactions && !empty?\n first.within_transaction do\n yield\n success?\n end\n else\n yield\n end\n end", "def ddl_transaction(migration)\n if use_transaction?(migration)\n ActiveRecord::Base.transaction { yield }\n else\n yield\n end\n end", "def transaction\n begin\n if block_given?\n begin_db_transaction\n result = yield\n commit_db_transaction\n result\n end\n rescue Exception => database_transaction_rollback\n rollback_db_transaction\n raise\n end\n end", "def on_prepare_transaction_commit(unit, transaction); end", "def redo_in_transaction\n self.in_transaction = true\n ActiveRecord::Base.transaction { src_obj.replicate(self) }\n end", "def transaction(opts = {})\n commit, rollback = start_transaction! opts\n tx_depth = nil\n begin\n if Pacer.verbose == :very\n tx_depth = threadlocal_graph_info[:dx_depth]\n puts \"--#{self.class.name} transaction #{ tx_depth } --> \"\n puts caller[0,3]\n end\n r = yield commit, rollback\n commit.call(false)\n r\n rescue Exception => e\n rollback.call e.message\n raise\n ensure\n puts \"--#{self.class.name} #{ tx_depth } <-- \" if Pacer.verbose == :very\n finish_transaction!\n end\n end", "def transaction\n raise ArgumentError, 'No block was given' unless block_given?\n\n with_client do |client|\n begin\n client.query('BEGIN')\n yield client\n client.query('COMMIT')\n rescue StandardError => e\n client.query('ROLLBACK')\n raise e\n end\n end\n end", "def exec_rollback_db_transaction\n @connection.rollback\n @connection.autocommit = true\n end", "def transaction(start_db_transaction = true)\n transaction_open = false\n begin\n if block_given?\n if start_db_transaction\n begin_db_transaction\n transaction_open = true\n end\n yield\n end\n rescue Exception => database_transaction_rollback\n if transaction_open\n transaction_open = false\n rollback_db_transaction\n end\n raise unless database_transaction_rollback.is_a? ActiveRecord::Rollback\n end\n ensure\n if transaction_open\n begin\n commit_db_transaction\n rescue Exception => database_transaction_rollback\n rollback_db_transaction\n raise\n end\n end\n end", "def fork_with_new_connection(config, klass = ActiveRecord::Base)\n raise StandardError, \"can't use forks with transactional fixtures\" if self.use_transactional_fixtures?\n fork do\n begin\n klass.establish_connection(config)\n yield\n ensure\n klass.remove_connection\n end\n end\n end", "def ar_default_transaction_handler\n ar_model.transaction do\n yield\n end\n end", "def testTransaction3()\n t = Scalaris::Transaction.new(conn = Scalaris::JSONConnection.new(url = Scalaris::DEFAULT_URL))\n t.close_connection()\n end", "def testTransaction3()\n t = Scalaris::Transaction.new(conn = Scalaris::JSONConnection.new(url = Scalaris::DEFAULT_URL))\n t.close_connection()\n end", "def begin_db_transaction\n @transaction = @connection.transaction('READ COMMITTED')\n end", "def transaction(options={}, &block)\n connection.transaction(options.update(:requires_new => true), &block)\n end", "def test_supports_transaction_isolation\n assert ActiveRecord::Base.connection.supports_transaction_isolation?\n\n assert ActiveRecord::Base.connection.supports_transaction_isolation_level?(:read_uncommitted)\n assert ActiveRecord::Base.connection.supports_transaction_isolation_level?(:read_committed)\n assert ActiveRecord::Base.connection.supports_transaction_isolation_level?(:repeatable_read)\n assert ActiveRecord::Base.connection.supports_transaction_isolation_level?(:serializable)\n end", "def begin_db_transaction\n log('begin transaction', nil) do\n begin_isolated_db_transaction(default_transaction_isolation)\n end\n end", "def transaction(&block)\n raise InvalidDbError if @stale\n\n return transaction_in_staging(true, &block) if self.staging?\n\n begin\n transaction_in_staging(false, &block)\n ensure\n self.unstage\n end\n end", "def commit_db_transaction() end", "def commit_db_transaction() end", "def lint_transaction_support\n result = gateway_instance.transaction { 1 }\n\n complain \"#{gateway_instance} must return the result of a transaction block\" if result != 1\n\n gateway_instance.transaction do |t|\n t.rollback!\n\n complain \"#{gateway_instance} must interrupt a transaction on rollback\"\n end\n end", "def perform(&block)\n within_transaction do\n if before\n persist\n run_actions(&block)\n after\n rollback unless success?\n end\n end\n \n success?\n end", "def run(result, &progress_block) # :nodoc:\n rollback_transaction do\n mack_test_case_run(result, &progress_block)\n end\n end", "def checked_transaction(opts=OPTS)\n use_transaction?(opts) ? db.transaction({:server=>this_server}.merge!(opts)){yield} : yield\n end", "def checked_transaction(opts=OPTS)\n use_transaction?(opts) ? db.transaction({:server=>this_server}.merge!(opts)){yield} : yield\n end", "def test_transaction3()\n t = Scalaroid::Transaction.new(conn = Scalaroid::JSONConnection.new(url = Scalaroid::DEFAULT_URL))\n t.close_connection()\n end", "def start_transaction!\n fail DbMod::Exceptions::AlreadyInTransaction if @in_transaction\n @in_transaction = true\n\n query 'BEGIN'\n end", "def transactional\n @transactional ||= getTransactionalData()\n end", "def testTransactionSingleOp1()\n conn = Scalaris::TransactionSingleOp.new()\n conn.close_connection()\n end", "def testTransactionSingleOp1()\n conn = Scalaris::TransactionSingleOp.new()\n conn.close_connection()\n end", "def transaction(key, settings = {}, &block)\n t = init_transaction(key, settings)\n block.call\n t.finish\n rescue\n t.rollback\n raise\n ensure\n t.clean_publisher\n end", "def transaction\n raise Mysql2::Error, 2002 if @my.nil?\n\n if block_given?\n begin\n @my.query('START TRANSACTION WITH CONSISTENT SNAPSHOT')\n yield # Start executing the query black.\n @my.query('COMMIT')\n rescue Mysql2::Error => e\n @my.query('ROLLBACK')\n raise e\n end\n end\n end", "def test_duplicate_on_first_insert_with_locking\n reset_transaction do\n with_locking do\n duped_env = @env.dup\n base_session = SessionHash.new(SmartSessionApp, duped_env)\n base_session.send :load!\n\n setup_base_session {|s| s[:name] = 'fred'}\n\n base_session[:foo] = 'bar'\n SmartSessionApp.send :set_session, duped_env, '123456', base_session.to_hash, {}\n\n assert_final_session 'foo' => 'bar', 'name' => 'fred'\n\n end\n end\n end", "def run_active_record_migrations!\n ActiveRecord::Migration.verbose = false\n ActiveRecord::Migrator.migrate([\"test/fixtures/migrate\"])\n end", "def execute\n Trade.transaction do\n execute_without_transaction!\n end\n end", "def test_transaction_single_op1()\n conn = Scalaroid::TransactionSingleOp.new()\n conn.close_connection()\n end", "def destroy_with_transaction\n Assessment.transaction do\n destroy_without_transaction\n end\n end", "def transaction\n raise Mysql::Error, 'Not Connected' if @my.nil?\n\n if block_given?\n begin\n @my.query('START TRANSACTION WITH CONSISTENT SNAPSHOT')\n yield # Start executing the query black.\n @my.query('COMMIT')\n rescue Mysql::Error => e\n @my.query('ROLLBACK')\n raise e\n end\n end\n end", "def invoke(source, &block)\n if source.parent\n block.()\n else\n ActiveRecord::Base.transaction(&block)\n end\n end", "def in rails_env = Bj.rails_env, &block\n transaction(:rails_env => rails_env.to_s, &block)\n end", "def transactions\n @connection.savepoint\n end", "def after_commit(unit); end", "def test_affiliate_transactions \n\n aff_customer = customers(:bob)\n aff_customer.ssn = '987654321'\n aff_customer.save\n\n trans1 = AffiliateTransaction.create(:transaction_type => 'C',\n :affiliate_customer_id => aff_customer.id,\n :referred_customer_id => 1,\n :amount => 5.0,\n :date => Date.today)\n\n assert_equal(1, aff_customer.affiliate_transactions.size)\n assert_equal(0, aff_customer.affiliate_payment_transactions.size)\n assert_equal(5.0, aff_customer.affiliate_balance)\n\n trans2 = AffiliateTransaction.create(:transaction_type => 'C',\n :affiliate_customer_id => aff_customer.id,\n :referred_customer_id => 2,\n :amount => 5.0,\n :date => Date.today)\n\n # Reload, because transactions are cached\n aff_customer.reload\n\n assert_equal(2, aff_customer.affiliate_transactions.size)\n assert_equal(0, aff_customer.affiliate_payment_transactions.size)\n assert_equal(10.0, aff_customer.affiliate_balance)\n\n trans3 = AffiliateTransaction.create(:transaction_type => 'P',\n :affiliate_customer_id => aff_customer.id,\n :referred_customer_id => nil,\n :amount => -5.0,\n :date => Date.today)\n\n aff_customer.reload\n\n assert_equal(3, aff_customer.affiliate_transactions.size)\n assert_equal(1, aff_customer.affiliate_payment_transactions.size)\n assert_equal(5.0, aff_customer.affiliate_balance)\n end" ]
[ "0.7172905", "0.7034204", "0.70049345", "0.69483507", "0.692453", "0.67865217", "0.67865217", "0.67567015", "0.67567015", "0.67567015", "0.6741999", "0.67283875", "0.6690304", "0.66793776", "0.66793776", "0.66781414", "0.6628899", "0.6626953", "0.658819", "0.658819", "0.65201247", "0.6517306", "0.6517306", "0.64562917", "0.64365774", "0.6434454", "0.64317596", "0.63824475", "0.6378403", "0.63360053", "0.63169557", "0.63072926", "0.6288193", "0.62452674", "0.6241302", "0.62397444", "0.62345594", "0.62181413", "0.62052673", "0.6181831", "0.6175977", "0.6116602", "0.6116602", "0.6105604", "0.6099039", "0.6092586", "0.6082704", "0.60636836", "0.60636836", "0.6063567", "0.605067", "0.60404646", "0.6039809", "0.6039809", "0.6038264", "0.6019762", "0.6013311", "0.60049075", "0.59965223", "0.5978063", "0.594608", "0.5944972", "0.5894996", "0.58758384", "0.5864772", "0.58466685", "0.5822307", "0.58175385", "0.5797623", "0.5797623", "0.5791434", "0.5785423", "0.5780532", "0.57793605", "0.57765937", "0.57738674", "0.57738674", "0.57629913", "0.5762106", "0.5752322", "0.573719", "0.573719", "0.57303697", "0.57303524", "0.5716219", "0.5715935", "0.5715935", "0.57073367", "0.5696355", "0.5690469", "0.5688029", "0.56839573", "0.56518877", "0.56396264", "0.5638557", "0.56228644", "0.56203115", "0.56142795", "0.5613711", "0.5593612" ]
0.6446898
24
true when value was already assigned
def filled? @value.positive? end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def assign?\n\t\ttrue\n\tend", "def explicitly_set_value?\n @explicitly_set_value == true\n end", "def value_constructed_by_mass_assignment?(_value)\n false\n end", "def value_set?\n @value_set == true\n end", "def assigning?\n !assign_stack.empty?\n end", "def setnx(value)\n if exists?\n false\n else\n set value\n true\n end\n end", "def value_set?\n @value_set\n end", "def has_value?\n false\n end", "def try_put(new_value)\n return false if @tvar.value != EMPTY\n\n @tvar.value = new_value\n true\n end", "def has_value?\n true\n end", "def set?\n return @set == true\n end", "def has_value?(p0) end", "def is_equal_value?\n return true unless value_to_compare?\n assigns[@name] == @options[:with]\n end", "def set_present\n set_value( true )\n end", "def value?(value)\n\t\treturn self.value == value\n\tend", "def value?(value)\n\t\treturn self.value == value\n\tend", "def is_assigned=(value)\n @is_assigned = value\n end", "def is_assigned=(value)\n @is_assigned = value\n end", "def value_if_true=(value)\n @value_if_true = value\n end", "def value?(value) true end", "def assignment?\n selector.to_s[-1] == ASSIGN_SUFFIX\n end", "def value?(value); end", "def value?(value); end", "def value_present?\n [email protected]?\n end", "def assigned?(name)\n ivar_defined?(name)\n end", "def has_value?(value); end", "def single_value?\n return false\n end", "def has_value? value; value? value; end", "def is_value?\n true\n end", "def modified?\n getvalue() != @original_value\n end", "def modified?\n getvalue() != @original_value\n end", "def set?\n self == :set\n end", "def hasValue\n @valueCreator.hasValue\n end", "def assignment?\n Types::ASSIGNABLE_VARIABLES.include?(node.type)\n end", "def assignment_method?\n false\n end", "def single_value?\n @single_value\n end", "def set?\n !value.nil? or file? or baclava?\n end", "def value_if_false=(value)\n @value_if_false = value\n end", "def emit_value?\n !mlhs? && !no_value_parent?\n end", "def value=(value)\n p assigned: value\nend", "def requires_assignment\n _assign if !@_assigned\n end", "def assignment?(node); end", "def assignment?(node); end", "def assignment?(node); end", "def set(value)\n super(value == :true)\n end", "def try_put!(value)\n @mutex.synchronize do\n if unlocked_empty?\n @value = value\n @full_condition.signal\n true\n else\n false\n end\n end\n end", "def has_value?(value)\n super(convert_value(value))\n end", "def is_assigned\n return @is_assigned\n end", "def is_assigned\n return @is_assigned\n end", "def can_have_value?\n return false\n end", "def can_have_value?()\n return true\n end", "def value?(p0) end", "def changed?\n @value_was != value\n end", "def value\n true\n end", "def is_set?(*key)\n synchronize do\n val = @data.get(*key)\n return false if val.nil?\n return false if val.is_a?(MonitorMixin::ConditionVariable)\n true\n end\n end", "def delivered?\n\t\tinstance_variable_defined? :@value\n\tend", "def is?(val)\n return true if @val == val\n end", "def value?\n return !self.flag?\n end", "def set?\n\t\t\tbegin\n\n\t\t\t\tvalue = @lookup.inject(@obj_with_keys) { |deep_obj, this_key|\n\t\t\t\t\t# Has to be an object that can have keys\n\t\t\t\t\treturn false unless deep_obj.respond_to?(:[])\n\n\t\t\t\t\tif deep_obj.respond_to?(:fetch)\n\t\t\t\t\t\t# Hash, Array and Struct all respond to fetch\n\t\t\t\t\t\t# We've monkeypatched fetch to Struct\n\t\t\t\t\t\tif deep_obj.is_a?(Array)\n\t\t\t\t\t\t\t# Check array separately as must fetch numeric key\n\t\t\t\t\t\t\treturn false unless Keys.index?(this_key)\n\t\t\t\t\t\tend\n\t\t\t\t\t\tnext_obj = deep_obj.fetch(this_key, Keys::MISSING)\n\t\t\t\t\telse\n\t\t\t\t\t\treturn false\n\t\t\t\t\tend\n\n\t\t\t\t\t# No need to go any further\n\t\t\t\t\treturn false if Keys::MISSING == next_obj\n\n\t\t\t\t\t# Reinject value to next loop\n\t\t\t\t\tnext_obj\n\t\t\t\t}\n\n\t\t\trescue\n\t\t\t\t# If fetch throws a wobbly at any point, fail gracefully\n\t\t\t\treturn false\n\t\t\tend\n\t\t\t# No errors - yield the value if desired\n\t\t\tif block_given?\n\t\t\t\tyield(value)\n\t\t\tend\n\t\t\t# Return true\n\t\t\treturn true\n\t\tend", "def value?\n @count > 0\n end", "def just_u_value?()\n @construction.check_keyword?(\"U-VALUE\")\n end", "def must_have_value?\n return false\n end", "def valid?\n @value ? true : false\n end", "def can_have_value?\n return true\n end", "def value?(key)\n !!value(key) || @object.value?(key)\n end", "def valide?\n !association_invalide\n end", "def just_u_value?()\n @construction.check_keyword?(\"U-VALUE\")\n end", "def set\n false\n end", "def __empty?\n return false unless super\n __empty(__value)\n end", "def defined?(value_key)\n true\n end", "def set?(name)\n @values.key?(name.to_sym)\n end", "def delivered?\n\t\[email protected] {\n\t\t\tinstance_variable_defined? :@value\n\t\t}\n\tend", "def valid?\n value\n end", "def assignment_method?\n !comparison_method? && method_name.to_s.end_with?('=')\n end", "def duplicate?\n @duplicate == true\n end", "def is_set?\n @is_set\n end", "def has_value?(value)\n raise NotImplementedError\n end", "def saved_already?\n @id != \"\" && @id != nil\n end", "def run(_value)\n false\n end", "def func1 val\n if val = 1 # should be conditional assignment ==\n return true\n else\n return false\n end\nend", "def new_record?\n key_value.nil?\n end", "def value=(value)\n @changed = true if value != @value\n @value = value\n end", "def set?\n [email protected]?\n end", "def set?\n [email protected]?\n end", "def unique_val?\n @lower == @upper\n end", "def complete?(v = nil)\n super(v || value)\n end", "def invariable?\n false\n end", "def value?(value)\n values.include? value\n end", "def value!\n\t\[email protected] {\n\t\t\t@old = true\n\n\t\t\t@value unless @exception\n\t\t}\n\tend", "def value_if_true\n return @value_if_true\n end", "def changed_in_place?(raw_old_value, new_value)\n false\n end", "def value_valid?\n return true\n end", "def _compare_and_set(old_value, new_value)\n return false unless @mutex.try_lock\n begin\n return false unless @value.equal? old_value\n @value = new_value\n ensure\n @mutex.unlock\n end\n true\n end", "def single_value?\n raise NotImplementedError\n end", "def unknown?\n @value.nil?\n end", "def value?(name)\n name = name.to_s\n return false unless key?(name)\n return false if self[name].nil?\n return true\n end", "def nil?\n self == LAZY_VALUE\n end", "def unchecked_value?\n self[:value] ||= {}\n is_checked?(/unchecked/)\n end", "def must_have_value?()\n return @df_int == nil\n end", "def []=(_key, _value)\n false\n end", "def value_if_false\n return @value_if_false\n end" ]
[ "0.78393275", "0.73788786", "0.7273575", "0.7266546", "0.70290065", "0.7000641", "0.6976678", "0.6906508", "0.67959327", "0.6760182", "0.6749322", "0.6699247", "0.66718113", "0.6659968", "0.66290164", "0.66290164", "0.6603629", "0.6603629", "0.6565545", "0.6535013", "0.65138364", "0.6485874", "0.6485874", "0.64777094", "0.6458714", "0.6443997", "0.6417934", "0.64163584", "0.641323", "0.6411128", "0.6411128", "0.64094335", "0.63925374", "0.63924426", "0.63693184", "0.6357167", "0.63569355", "0.6352429", "0.62626344", "0.6234234", "0.6227037", "0.6220864", "0.6220864", "0.6220864", "0.6209907", "0.62081957", "0.61959946", "0.61764246", "0.61764246", "0.61758596", "0.6146439", "0.6142231", "0.6141725", "0.6140206", "0.61358094", "0.61341536", "0.61208457", "0.6116593", "0.6107848", "0.6107594", "0.60619885", "0.60615915", "0.60588443", "0.6045666", "0.6043581", "0.60354495", "0.60268587", "0.60260564", "0.599866", "0.5997759", "0.5994489", "0.5990037", "0.5973097", "0.59668154", "0.5953365", "0.59456336", "0.59235543", "0.59207714", "0.59146863", "0.5910829", "0.5907853", "0.59022343", "0.58805376", "0.58805376", "0.5874655", "0.5861264", "0.58466846", "0.5844869", "0.5834447", "0.58331156", "0.58226055", "0.5821143", "0.58172256", "0.5813006", "0.58113986", "0.5809737", "0.5800685", "0.5797914", "0.5783855", "0.57806456", "0.5764081" ]
0.0
-1
number of possible values at this position
def num_possible return -1 if filled? @possible.size end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def count\n values.inject(0){|m, v| m + v.length}\n end", "def num_possible\n return -1 if filled?\n @possible.size\n end", "def value_count\n values.count\n end", "def count; @value.size; end", "def num_possible\n @possible\n end", "def length\n values.length\n end", "def size\n values.flatten.size\n end", "def size\n values.flatten.size\n end", "def size\n values.flatten.size\n end", "def count\n @valueset.length\n end", "def size\n values.flatten.size\n end", "def length\n @values.length\n end", "def possibilities\n @dictionary.length\n end", "def size\n @value.inject(0) {|acc, it| acc + it.size}\n end", "def size\n value.map(&:size).inject(0, :+)\n end", "def size\n BitCounter.count(@val)\n end", "def number_of_values\n Integer((@upper - @lower) / @step) + 1\n end", "def getCount\n return @nodeValues.size\n end", "def count()\n @list_of_positions.length\n end", "def columns_count\n @values.empty? ? 0 : @values.first.count\n end", "def n\n x.size\n end", "def cardinality\n tuples.size\n end", "def variable_count\n variables.size\n end", "def bc_count\n \[email protected] + @initial.size + @final.size + @cyclic.size\n end", "def num_potential_pairs\n potential_pairs.size\n end", "def rows_count\n @values.count\n end", "def value_count\n $capitals.values.length\nend", "def tuple_count\n target.count\n end", "def num_parameters\n @parameters.size\n end", "def size\n value.size\n end", "def n\n points.size\n end", "def size\n val.size\n end", "def countNbVides()\n @nbVides = 0\n\n @grille.each do |ligne|\n ligne.each do |c|\n if( c[\"value\"] == nil)\n @nbVides += 1\n end\n end\n end\n end", "def size\n @pairs.size\n end", "def length\n value.length\n end", "def size\n data.values.inject(0){|m,v| m+=v.size}\n end", "def item_count\n item_values.values.compact.sum { |v| v.is_a?(Array) ? v.size : 1 }\n end", "def n_support\n @model[:sv_indices].size\n end", "def length; count end", "def qv_count\n self.qv_mappings.size\n end", "def num_vars\n constraint.lhs.size\n end", "def getNofInitParamSet()\n return @maxCombination ;\n end", "def legal_values n\n return [] if @pz[n]\n (((1..@size).to_a - rows[row_id n]) - columns[column_id n]) - boxes[box_id n]\n end", "def size\n count = 0\n @properties.each do |gid, values|\n if ! values.empty?\n count += 1\n end\n end\n return count\n end", "def one_rv\n count = 0\n for x in [email protected]\n if @elements[x].remaining_vals.length == 1\n count+=1\n end\n end\n count\n end", "def nitems\n count = 0\n i = 0\n lim = self.__size\n while i < lim\n count += 1 unless self.__at(i)._equal?(nil)\n i += 1\n end\n count\n end", "def size\n @components.values.inject(0) { |component_count, attribute| component_count + attribute.size }\n end", "def num_points\n @size * @size\n end", "def appearances\n positions.size\n end", "def count\n @type_variants.values.reduce(0) { |m, o| m + o.size }\n end", "def size\n @n\n end", "def size\n @n\n end", "def n\n @clensing_data.size\n end", "def cardinality\n @rhs.size\n end", "def length\n @poss.length\n end", "def size!\n @elements.values.map(&:size!).inject(0) {|total,size| total + size}\n end", "def val_count(cards)\n\t\t\"123456789TJQK\".chars.collect{|char| cards.count {|card| card.val == char}}\n\tend", "def count\n underlying_array.length\n end", "def num_states\n @state.size\n end", "def num_states\n @state.size\n end", "def num_states\[email protected]\nend", "def length\n @val.length\n end", "def total_poss\n @nrow * @ncol * @nbox\n end", "def size()\n @VAR_STACK.length()\n end", "def nnz; @elements.size end", "def params_size\n accum = 0\n @params.each do |p|\n accum += 1 if p.needs_word?\n end\n accum\n end", "def countingValleys(n, s)\n valleys = 0\n pre = 0\n ac = 0\n for i in 0..n\n pre = ac\n ac += val(s[i]) \n valleys +=1 if pre == -1 && ac == 0\n end\n return valleys\nend", "def count value=false\n @vector.count value\n end", "def num_keys\n end", "def size\n C.LLVMCountParams(@fun)\n end", "def length\n @driver_instance.count_list_value(@key)\n end", "def size\n @sequence.size\n end", "def count value=false, &block\n if block_given?\n @data.select(&block).count\n elsif value\n count { |val| val == value }\n else\n size - indexes(*Daru::MISSING_VALUES).size\n end\n end", "def length\n count(:up)\n end", "def size \n if @map.empty?\n return 0\n else \n _max_arity\n end\n end", "def size\n\t\tlengths.reduce(&:*)\n\tend", "def size\n @rules.inject(0) { |acc, (lhs, rhs)| acc + rhs.size }\n end", "def size() end", "def size() end", "def size() end", "def size() end", "def size() end", "def size() end", "def numbers_of_planes\n\t\t@landed_planes.count \n\tend", "def num_queens\n\t\tboard.nil? ? 0 : board.values.select {|a| a == 1}.count\n\tend", "def size()\n #This is a stub, used for indexing\n end", "def size()\n #This is a stub, used for indexing\n end", "def size\n variables.size\n end", "def number_of_fields(params = self.triggers)\n\t\tif params.present?\n\t\t\tparams.values.inject(0) do |sum, value|\n\t\t\t\tif value.is_a?(Hash)\n\t\t\t\t\t# puts value.keys.inspect\n\t\t\t\t\tsum + (value.keys.include?(\"o\") ? 1 : number_of_fields(value) )\n\t\t\t\telse\n\t\t\t\t\tsum + 1\n\t\t\t\tend\n\t\t\tend\n\t\telse\n\t\t\t0\n\t\tend\n\tend", "def state_length\r\n @state.length\r\n end", "def get_number_of_states\n return @dfa.get_number_of_states\n end", "def length\n count\n end", "def size\n if @size.nil?\n @size = @mset.inject(0) do |total, pair|\n value = pair[1]\n if value.is_a? Array\n total + value.size\n else\n total + 1\n end\n end\n end\n\n @size\n end", "def size\n @sequence.size\n end", "def count\n if pair?\n if @value2.pair?\n countsofar = @value2.count\n 1 + countsofar\n elsif @value2.nil?\n 1\n else\n 2\n end\n elsif nil?\n 0\n else\n 1\n end\n end", "def number_of_verts\n\t\t@number_of_verts ||= begin\n\t\t\tsize = 0\n\t\t\[email protected] do |primitive|\n\t\t\t\tprimitive[:verts].each do |index|\n\t\t\t\t\tvert = @verts[index]\n\t\t\t\t\tsize += vert[:vector].length\n\t\t\t\tend\n\t\t\tend\n\t\t\tsize\n\t\tend\n\tend", "def length\n @count\n end", "def length()\n #This is a stub, used for indexing\n end", "def length\n len = 0\n @locations.each do |x|\n if x.sequence\n len += x.sequence.size\n else\n len += (x.to - x.from + 1)\n end\n end\n len\n end", "def size\n Integer.new.tap do |count|\n each { |element| count += 1 }\n end\n end" ]
[ "0.75349104", "0.74319696", "0.7428594", "0.7411988", "0.72394055", "0.72371763", "0.7222108", "0.7222108", "0.7222108", "0.71478707", "0.708246", "0.7055507", "0.7026769", "0.6987042", "0.67609555", "0.6747905", "0.6729629", "0.6700157", "0.66934097", "0.6627457", "0.6586765", "0.6571627", "0.6563214", "0.6541461", "0.65238136", "0.6509288", "0.6500768", "0.6465176", "0.6454385", "0.64393175", "0.64221036", "0.64058465", "0.6400655", "0.6397848", "0.63725126", "0.6370082", "0.6366354", "0.6354686", "0.6351393", "0.63458514", "0.6334126", "0.63222593", "0.6305346", "0.63019603", "0.6290052", "0.6280475", "0.6246519", "0.62376577", "0.62121713", "0.6206229", "0.61772823", "0.61772823", "0.6176586", "0.6173909", "0.61659336", "0.615744", "0.61488354", "0.6129973", "0.61238873", "0.61238873", "0.6112488", "0.6098301", "0.6093726", "0.6089873", "0.6066111", "0.6060029", "0.60497844", "0.6049327", "0.6041907", "0.6038151", "0.6032627", "0.6032482", "0.60286975", "0.60113543", "0.59953094", "0.5994633", "0.598894", "0.5986954", "0.5986954", "0.5986954", "0.5986954", "0.5986954", "0.5986954", "0.5986248", "0.5979063", "0.5965348", "0.5965348", "0.5960127", "0.5954684", "0.59527904", "0.5942495", "0.594035", "0.59384555", "0.5935525", "0.5931418", "0.5919109", "0.5918123", "0.591231", "0.59104747", "0.5908767" ]
0.74891293
1
exclude possibility return true if number was deleted
def exclude(num) return true if !filled? && @possible.delete(num) false end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def exclude(num)\n return true if !filled? && @possible.delete(num)\n false\n end", "def cannot_be_deleted\n return false if self.id == 1\n end", "def test_ok_to_delete\n assert_equal(false, Intensity.ok_to_delete?(2))\n assert_equal(false, Intensity.ok_to_delete?(1))\n #at this time, I have no intensity of type 3, but there are only 3 intensity ids in my current program\n # assert_equal(true, Intensity.ok_to_delete?(3))\n assert_equal(true, Intensity.ok_to_delete?(0))\n end", "def deleted?\n false\n end", "def valid_for_removal \n if Librarytransaction.all.map(&:accession_id).include?(id)\n return false\n else\n return true\n end\n end", "def deleted?\n end", "def deleted?\n end", "def exclude(num)\n @arr_possible.delete(num)\n if @arr_possible.length == 1\n set(@arr_possible[0])\n return true\n end\n return false if @possible == @arr_possible.length\n @possible = @arr_possible.length\n true\n end", "def is_deletable\n not is_default_cash_gift\n end", "def deleted?\n deleted == true\n end", "def deleted?\n deleted == true\n end", "def valid_for_deletion?\n return false if id.nil? || sync_token.nil?\n id.value.to_i > 0 && !sync_token.to_s.empty? && sync_token.to_i >= 0\n end", "def skip_deletion\n @mark_for_delete = false\n end", "def test_ok_to_delete\n assert_equal(false, Studio.ok_to_delete?(11))\n end", "def deleted?\n return (withdrawn || record.nil?)\n end", "def deleted?\n return true if !@data and !@id\n return false\n end", "def valid_for_deletion?\n return false if(id.nil? || sync_token.nil?)\n id.value.to_i > 0 && !sync_token.to_s.empty? && sync_token.to_i >= 0\n end", "def can_delete?\n\n entries = TimesheetEntry.count_by_sql(\"select count(*) from timesheet_entries where rate_id = #{id}\")\n \n return entries == 0\n \n end", "def valid_for_deletion?\n return false if(id.nil? || sync_token.nil?)\n id.to_i > 0 && !sync_token.to_s.empty? && sync_token.to_i >= 0\n end", "def valid_for_deletion?\n return false if(id.nil? || sync_token.nil?)\n id.to_i > 0 && !sync_token.to_s.empty? && sync_token.to_i >= 0\n end", "def valid_for_deletion?\n return false if(id.nil? || sync_token.nil?)\n id.to_i > 0 && !sync_token.to_s.empty? && sync_token.to_i >= 0\n end", "def valid_for_deletion?\n return false if(id.nil? || sync_token.nil?)\n id.to_i > 0 && !sync_token.to_s.empty? && sync_token.to_i >= 0\n end", "def valid_for_deletion?\n return false if(id.nil? || sync_token.nil?)\n id.to_i > 0 && !sync_token.to_s.empty? && sync_token.to_i >= 0\n end", "def valid_for_deletion?\n return false if(id.nil? || sync_token.nil?)\n id.to_i > 0 && !sync_token.to_s.empty? && sync_token.to_i >= 0\n end", "def valid_for_deletion?\n return false if(id.nil? || sync_token.nil?)\n id.to_i > 0 && !sync_token.to_s.empty? && sync_token.to_i >= 0\n end", "def delete\n return false\n end", "def test_ok_to_delete\n assert_equal(false, ExerciseType.ok_to_delete?(1))\n assert_equal(true, ExerciseType.ok_to_delete?(0))\n assert_equal(true, ExerciseType.ok_to_delete?(ExerciseType.all.last.id + 10))\n end", "def ok_to_delete?(id)\n true\n end", "def RevisionDelete(type_id, listado_productos)\n val = true\n listado_productos.each do |productos|\n if productos.tipo.to_i == type_id\n val = false\n break\n end\n end\n return val\n end", "def isRemoved?\n is_removed?\n end", "def check_deleted(newrev)\n if newrev == \"0000000000000000000000000000000000000000\"\n puts \"[POLICY} You can not delete objects\"\n exit 1\n end\nend", "def test_ok_to_delete\n assert_equal(false, Rating.ok_to_delete?(3))\n end", "def valid_for_deletion?\n return false if(id.nil? || sync_token.nil?)\n id.to_i > 0 && !sync_token.to_s.empty? && sync_token.to_i >= 0\n end", "def should_destroy?\n should_destroy.to_i == 1\n end", "def delete\n\t\tfalse\n\tend", "def deleted?\n !!deleted\n end", "def deletable_by?(user)\n false\n end", "def erase(num)\n i = @array.bsearch_index { |ele| ele >= num }\n !!i && @array[i] == num && [email protected]_at(i) \n end", "def deleted?(name, options = T.unsafe(nil)); end", "def deleted?\n @deleted == true\n end", "def can_be_deleted?\n return can_be_modified?\n end", "def reason_not_to_delete\n d = @data_object\n if d.order_amount_paid > 0.0\n return \"Cannot delete an order after it has been partially paid\"\n end\n\n pays = Pays.for_order(self)\n unless pays.empty?\n return \"Cannot delete an order after payments have been applied\"\n end\n\n each_line do |line|\n if line.li_date_shipped\n return \"Cannot delete: '#{line.li_desc}' has been shipped\"\n end\n end\n\n nil\n end", "def pending_deletable?\n !pending_delete_prohibited?\n end", "def deleted?\n !(%w(deleted removed) & flags).empty?\n end", "def deleted?\n @deleted ||= new_record? ? false : @data[0, 1] == '*'\n end", "def remove(val)\n return false unless @number_index_map.key?(val)\n index = @number_index_map[val]\n last_element = @number_list[-1]\n @number_list[index] = last_element\n\n @number_list.pop\n @number_index_map[last_element] = index\n @number_index_map.delete(val)\n true\n end", "def test_deleted\n assert_difference('User.count', -1 , \"Delete False\") do\n user = User.find(980190962);\n user.delete\n end\n end", "def deletion_method(hash)\r\n\r\n hash.delete_if{|spell,number| number<4}\r\n\r\nend", "def delete?(entry)\n not(entry.exist?) or model[:older].nil? or model[:older] >= entry.mtime.to_date\n end", "def contains_number!\n return unless self.regular?\n self.status = :contains_number\n self.store\n end", "def valid_for_removal\n if timetable_in_use.include?(id)\n return false\n else\n return true\n end\n end", "def RevisionDelete(brand_id, listado_productos)\n val = true\n listado_productos.each do |productos|\n if productos.marca.to_i == brand_id\n val = false\n break\n end\n end\n return val\n end", "def deletable?\n parent.access?(:delete)\n end", "def can_delete?\n can?('d')\n end", "def delete\n if empty?\n @perforce.run(\"change\", \"-d\", @number)\n end\n end", "def user_can_delete?\n false\n end", "def deletable_by?(user)\n resource.orders_count.zero? && (user.is_admin? || is_mill?(user))\n end", "def eliminated?\n\t\t@eliminated\n\tend", "def validate_not_deleted(target)\n @v2_data_criteria_to_delete[target] = false\n end", "def deleted?\n !(%w(deleted removed) & flags).empty?\n end", "def deletable?\n votes.each do |v|\n return false if (v.user_id != user_id) && v.value.positive? && v.favorite\n end\n true\n end", "def is_deleted?(participant)\n return false if participant.nil?\n return receipt_for(participant).first.deleted\n end", "def deleted?\n (status == DELETED)\n end", "def deleted?\n (status == DELETED)\n end", "def clean number\n clean! number && number.dup\n end", "def deleteable?\n if smo_code == 'SPECIAL'\n # Can't delete the special code\n false\n elsif signs.exists?\n # Can't delete if there are assets using the code\n false\n else\n true\n end\n end", "def delete_index(display_number)\n @list.each_with_index do |display, i|\n if display_number == display.number\n @list[i..i] = []\n return true\n end\n end\n false\n end", "def delete(name)\n return super(name) if name.is_a?(Fixnum)\n remove(name)\n end", "def deletable?\n is_regular_page?\n end", "def can_remove?( record, type = '*' )\n return false unless can_change?( record, type )\n\n return false if !remove_records?\n\n true\n end", "def remove(num)\n self[num].delete(num) if include?(num)\n end", "def check_if_deleted\n raise ActiveRecord::ReadOnlyRecord unless deleted?\n end", "def removed?\n (status == REMOVED)\n end", "def removed?\n (status == REMOVED)\n end", "def removed?\n (status == REMOVED)\n end", "def removed?\n (status == REMOVED)\n end", "def removed?\n (status == REMOVED)\n end", "def deleted?\n values.length == 0 && values(true).length > 0\n end", "def delete_dirty?\n false\n end", "def deleted?(obj, index)\n deletes_for(index).include? obj.id\n end", "def delete_if\n [email protected] {|item| yield item }.values\n remove_prob=(remove_values==[]) ? 0 : remove_values.inject(:+)\n if (remove_prob<1)\n @d.delete_if {|item| yield item }\n @d=(self.mult(Rational(1,1-remove_prob))).d\n else\n # TODO\n end\n end", "def is_removed?\n self.has_current_status_of?(ArticleSubmission::ADMIN_REMOVED) || self.has_current_status_of?(ArticleSubmission::REMOVED)\n end", "def can_delete?\n order.can_delete?\n end", "def locked_candidates_2_from_col col, number\n\t\tcol_cells = get_col_cells col\n\t\tcol_cells_with_number = col_cells.select { |i| @candidates[i].include?(number) }\n\t\tif col_cells_with_number.empty?\n\t\t\treturn false\n\t\tend\n\n\t\tcol_cell_blocks = col_cells_with_number.map { |i| get_block i }\n\n\t\tonly_in_block = col_cell_blocks.count(col_cell_blocks[0]) == col_cell_blocks.length ? true : false\n\n\t\tif only_in_block\n\t\t\tnumber_of_deleted_items = 0\n\t\t\tget_block_cells(col_cell_blocks[0]).select { |i| !col_cells_with_number.include?(i) }.each do |i|\n\t\t\t\tdeleted_item = @candidates[i].delete number\n\t\t\t\tnumber_of_deleted_items += 1 if !deleted_item.nil?\n\t\t\tend\n\t\t\treturn true if number_of_deleted_items > 0\n\t\tend\n\n\t\treturn false\n\tend", "def locked_candidates_2_from_row row, number\n\t\trow_cells = get_row_cells row\n\t\trow_cells_with_number = row_cells.select { |i| @candidates[i].include?(number) }\n\t\tif row_cells_with_number.empty?\n\t\t\treturn false\n\t\tend\n\n\t\trow_cell_blocks = row_cells_with_number.map { |i| get_block i }\n\n\t\tonly_in_block = row_cell_blocks.count(row_cell_blocks[0]) == row_cell_blocks.length ? true : false\n\n\t\tif only_in_block\n\t\t\tnumber_of_deleted_items = 0\n\t\t\tget_block_cells(row_cell_blocks[0]).select { |i| !row_cells_with_number.include?(i) }.each do |i|\n\t\t\t\tdeleted_item = @candidates[i].delete number\n\t\t\t\tnumber_of_deleted_items += 1 if !deleted_item.nil?\n\t\t\tend\n\t\t\treturn true if number_of_deleted_items > 0\n\t\tend\n\n\t\treturn false\t\n\tend", "def delete(value)\n end", "def delete(value)\n end", "def remove(found, number)\n if found\n $ballers.remove('number' => number)\n 'You were removed from the database.'\n else\n 'You are not in the database.'\n end\nend", "def destroy?\n user.rank > UNIT_RANK\n end", "def destroy?\n user.rank > UNIT_RANK\n end", "def markToDelete\n #N Without this it won't be marked for deletion\n @toBeDeleted = true\n end", "def should_index?\n\t\t!deleted?\n \tend", "def flag_as_deleted!\n set(:deleted, true)\n end", "def is_deleted?(participant)\n return false unless participant\n return receipts_for(participant).deleted.count == receipts_for(participant).count\n end", "def deleted?\n status == :deleted\n end", "def deleted?\n @deleted\n end", "def orphan?\n !target_type || !notes.match?(/\\A\\d{14}/)\n end", "def delete?(o)\n delete(o) if include?(o)\n end", "def destroy?\n index? && user.id != record.id\n end", "def deleted?\n @deleted\n end" ]
[ "0.6699805", "0.6688743", "0.65604806", "0.63156945", "0.62685853", "0.62591946", "0.62591946", "0.6227051", "0.61972034", "0.61782956", "0.61782956", "0.6139152", "0.6083896", "0.6063883", "0.60551536", "0.6054716", "0.60399336", "0.60314626", "0.60247487", "0.60247487", "0.60247487", "0.60247487", "0.60247487", "0.60247487", "0.60247487", "0.6005695", "0.59877825", "0.5980918", "0.5968367", "0.5962415", "0.59442234", "0.59406203", "0.59179986", "0.5887908", "0.5884879", "0.58746076", "0.58680797", "0.5867558", "0.5851651", "0.5847462", "0.58255893", "0.5822006", "0.5810243", "0.5810138", "0.5803489", "0.580263", "0.57975197", "0.57880974", "0.57862836", "0.5771881", "0.5771787", "0.57714945", "0.57694465", "0.5748292", "0.574287", "0.57217705", "0.57166004", "0.57067096", "0.56943464", "0.56859285", "0.5677402", "0.5638072", "0.5628558", "0.5628558", "0.56240946", "0.561802", "0.56173825", "0.56100845", "0.55990285", "0.5596277", "0.55956995", "0.55903625", "0.55882466", "0.55882466", "0.55882466", "0.55882466", "0.55882466", "0.55844563", "0.55799586", "0.5573315", "0.55722046", "0.5570293", "0.5556984", "0.5554406", "0.5538264", "0.55268323", "0.55268323", "0.5526122", "0.55232376", "0.55232376", "0.55231786", "0.552273", "0.55167437", "0.5515716", "0.55153376", "0.5507801", "0.5498383", "0.5498375", "0.5497843", "0.5497408" ]
0.67659307
0
include possibility return true if number was included
def include(num) return true if !filled? && @possible.insert(num - 1, num) false end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def include?(num)\n self[num].include?(num)\n end", "def number_included(number)\n arr = [1, 3, 5, 7, 9, 11]\n puts \"number\": number\n puts arr.include? number\nend", "def include?(arr, num)\n arr.each{ |element| return true if element == num}\n return false\nend", "def include?(arr, number)\n arr.any? { |num| num == number }\nend", "def include?(ary, number)\n return false if ary.size == 0\n result = ary.detect do |item|\n item == number\n end\n !!(result == number)\nend", "def check_include(value)\n (1..10).include?(value)\nend", "def include?(p0) end", "def include?(p0) end", "def include?(p0) end", "def include?(p0) end", "def include?(p0) end", "def include?(p0) end", "def include?(array, integer)\r\n array.each do |element|\r\n return true if element == integer\r\n end\r\n\r\nfalse\r\nend", "def numeral(section, digit)\n section.include? digit \nend", "def contains?(number)\n @numbers.include?(number)\n end", "def include?(position)\r\n to_expanded_i.include?(position.to_i)\r\n end", "def include? item\n @succ.include? item\n end", "def include?(arr, search)\n result = false\n arr.each { |num| result = true if num == search }\n result\nend", "def include?(array, value)\n array.each do |integer|\n return true if integer == value\n end\n return false\nend", "def include?(arg0)\n end", "def using_include(array, element)\n\tarray.include?(element)\nend", "def include?(i)\n @range.include?(i)\n end", "def contains_number?\n self.status == :contains_number\n end", "def include?(x)\n inf <= x && x <= sup\n end", "def include?(*args)\n args.inject(true) {|val, x| val = self.single_include?(x)}\n end", "def include?(value)\n element.include? value\n end", "def include?(arg)\n arg.is_a?(Module) ? !!included_modules.detect{ |m| m === arg } : store.include?(arg)\n end", "def include?(something); end", "def include?(arr, search_value)\n arr.each do |num|\n if num == search_value\n return true\n end\n end\n false\nend", "def include?(arr, value)\n includes = false\n arr.each { |n| includes = true if n == value }\n includes\nend", "def include?(item)\n return true\n end", "def include?(position); end", "def element(num)\n a = ['10', '2', '30', '5']\n if (a.include?(num) == true)\n puts \"#{num} is Present in the array\"\n else\n puts \"#{num} is not present in the array\"\n end\nend", "def include?(item)\n end", "def contains_number!\n return unless self.regular?\n self.status = :contains_number\n self.store\n end", "def validate_may_include(_record, attribute, value)\n return unless options[:may_include]\n surplus = value - options[:may_include]\n errors.add(attribute, \"has invalid values: #{surplus}\") if surplus.any?\n end", "def include?(arr, include_item)\n arr.each { |item| return true if item == include_item }\n false\nend", "def array_42(z)\n if z.include? 42\n return true\n else\n return false\n end\n\nend", "def include?(name)\n includes?(name)\n end", "def include?(value)\n each do |index, list_value|\n return true if list_value == value\n end\n return false\n end", "def include?(list,tst)\n list.each {|itm| return true if itm == tst}\n false\nend", "def include?(v)\n case v\n when Range\n include_range?(v)\n when RangeSet\n include_rangeset?(v)\n else\n include_scalar?(v)\n end\n end", "def include inc\n @includes << inc\n end", "def include?\n @options[:include]\n end", "def show_number_valid?\n (1..3).include? @show_no \n end", "def include_with_range?(value)\n if value.is_a?(::Range)\n operator = exclude_end? ? :< : :<=\n end_value = value.exclude_end? ? last.succ : last\n include?(value.first) && (value.last <=> end_value).send(operator, 0)\n else\n include_without_range?(value)\n end\n end", "def include(x)\n REPLSupport.inc(x)\nend", "def include?(val)\n self.each {|item|\n if item.equal?(val)\n return true\n end\n }\n return false\n end", "def insert(num)\n false if include?(num)\n self[num] << num\n true\n end", "def included?(range, number)\n if range.exclude_end?\n number >= range.begin && number < range.end\n else\n number >= range.begin && number <= range.end\n end\nend", "def has_number?\n number.present?\n end", "def include?(array, value)\r\n array.count(value) > 0\r\nend", "def check_num(number)\n if (1..10).include? number\n puts \"Valid\"\n else\n puts \"Invalid\"\n end\nend", "def include?(name); end", "def include?(key)\n value.include?(key)\n end", "def include?(*args_)\n _offset_for_args(args_) ? true : false\n end", "def include?(element)\n @ary.include? element\n end", "def include?(other)\n `return self.indexOf(other) == -1 ? Qfalse : Qtrue;`\n end", "def include?(other)\n `return self.indexOf(other) == -1 ? Qfalse : Qtrue;`\n end", "def includes() return @includes end", "def test_8_accepts_includes_numbers\n result = includes_number?(\"Hat123!!\")\n assert(result, \"Hat123!! has non alphanumeric, should be valid\")\n end", "def include?(something)\n output&.include?(something) || content.include?(something)\n end", "def include?(other)\n cover?(other) && precision == other.precision\n end", "def include?(item)\r\n $game_party.usable?(item)\r\n end", "def include?(value)\n return super if value.is_a?(Module)\n\n !self[value].nil?\n end", "def compare(num1, num2, num3, num4, num5, num6)\n num_array = [num1, num2, num3, num4, num5]\n if num_array.include?(num6)\n p \"The number #{num6} appears in #{num_array}\"\n else\n p \"The number #{num6} does not appear in #{num_array}\"\n end\nend", "def include_variable?\n return VariableTable.check_include_variable(@content)\n end", "def include?(o)\n return false unless valid_member?(o)\n @val[o] != 0\n end", "def rinclude\n -> v, x { x.include?(v) }.curry\n end", "def include?(value)\n !get(value).nil?\n end", "def includes item\n msg = \"#{self} did not include #{item}\"\n check_if self.include?(item), msg\n end", "def include\n -> x, v { x.include?(v) }.curry\n end", "def test_0210_includeq\n @@log.debug \"test_0210_includeq starts\" if @@log.debug?\n assert_respond_to(@list, :include?, \"test_0210_includeq_respond\")\n # Test does include\n assert(@list.include?(@bsb),\"test_0210_includeq_basic\")\n # Test does not include\n ta = Person.new(\"A\", \"B\", \"C\", 456)\n assert(@list.include?(ta) == false,\"test_0210_includeq_backwards\")\n\n @@log.debug \"test_0210_includeq ends\" if @@log.debug?\n end", "def include_variable?\n VariableTable.check_include_variable(@value)\n end", "def include?(element)\n @element_list.include? element\n end", "def include? v\n v = @period.value_of(v) unless v.is_a? Integer\n @values.include? v\n end", "def include?(value)\n !!connection.zrank(key_label, value)\n end", "def include?(low, high = nil)\n return @data.include?(low) if high.nil?\n (low..high).each { |i| return false if get(i).nil? }\n return true\n end", "def include?(array, value)\n array.each do |elem|\n return true if elem == value\n end\n false\nend", "def include?(filename)\n \n end", "def includes?(data)\n list.includes?(data)\n end", "def is_present_in_row?(number, cords)\n row = get_row(cords)\n row.include? number\n end", "def include?\n return false if eval_shop_condition(@hidden_condition)\n return true\n end", "def my_include?(array, target)\n return false if array.empty?\n return true if array.first == target\n my_include?(array.drop(1), target)\nend", "def include?(item)\n self.__contains__(item).rubify\n end", "def include?(el)\n list.include?(el)\n end", "def include?(array, search_value)\n # array.each do |element|\n # return true if element == search_value\n # end\n # false\n array.count(search_value) > 0\nend", "def include?(id)\n return false if id < @min_id || @max_id < id\n\n page.include?(id)\n end", "def include?(item)\n return false if item.nil?\n return true\n end", "def appears?(numbers, last_num)\n return 'appears' if numbers.include?(last_num)\n \"doesn't appear\"\nend", "def test_Range_InstanceMethods_include?\n\t\tr = 1..10\n\t\tassert_equal(true, r.include?(5))\n\t\tassert_equal(true, r.include?(5.5))\n\t\tassert_equal(true, r.include?(10))\n\n\t\tr = 1...10\n\t\tassert_equal(false, r.include?(10))\n\n\t\tr = 'a'..'z'\n\t\tassert_equal(true, r.include?('b'))\n\t\tassert_equal(false, r.include?('ruby'))\n\tend", "def featured_num?(num)\n num.odd? && num % 7 == 0 && num.digits.uniq == num.digits\nend", "def includes\n end", "def includes\n end", "def last_include\n result = []\n\n puts \"Enter your first number\"\n first = gets.chomp\n result << first\n\n puts \"Enter your second number\"\n second = gets.chomp\n result << second\n\n puts \"Enter your third number\"\n third = gets.chomp\n result << third\n\n puts \"Enter your fourth number\"\n fourth = gets.chomp\n result << fourth\n\n puts \"Enter your fifth number\"\n fifth = gets.chomp\n result << fifth\n\n puts \"Enter your sixth number\"\n sixth = gets.chomp\n\n if result.include?(sixth)\n puts \"The number #{sixth} appears in #{result}.\"\n else\n puts \"The number #{sixth} doesn't appear in #{result}\"\n end\nend", "def contains_int?\r\n \treturn false if self.empty?\r\n \treturn self.match(/^[0-9]+$/)\r\n end", "def include?(array, query)\n array.each do |el|\n return true if el == query\n end\n false\nend", "def include?(key); end", "def include?(key); end", "def includes?(data)\n node = @head\n include_array = []\n until node.nil?\n include_array << node.data\n node = node.next_node\n end\n include_array.include?(data)\n end" ]
[ "0.76985776", "0.7622094", "0.74142003", "0.7167877", "0.71227515", "0.70524013", "0.7003844", "0.700377", "0.700377", "0.700377", "0.700377", "0.700377", "0.68770146", "0.6713544", "0.66451263", "0.66409093", "0.6621245", "0.65714586", "0.65215695", "0.6519412", "0.65112996", "0.64790416", "0.64709127", "0.6462968", "0.64621675", "0.6461071", "0.64451", "0.64200854", "0.63642734", "0.63579345", "0.63304573", "0.62918043", "0.62655056", "0.62614757", "0.6243634", "0.62291217", "0.61953646", "0.61675286", "0.6088134", "0.6086503", "0.60749364", "0.6045749", "0.6040343", "0.6035529", "0.60257447", "0.6000736", "0.5990337", "0.5960609", "0.5941082", "0.59362787", "0.5935374", "0.59308976", "0.59292173", "0.5912521", "0.5907764", "0.58977693", "0.5892096", "0.58855164", "0.58855164", "0.58815026", "0.5880214", "0.5878392", "0.58778757", "0.58690625", "0.5868946", "0.58604836", "0.5844536", "0.5843202", "0.5841539", "0.5836117", "0.5832105", "0.58268154", "0.5825063", "0.58219403", "0.5820919", "0.58140767", "0.5806348", "0.57970756", "0.5795014", "0.57940215", "0.57829225", "0.57818574", "0.5779975", "0.5779338", "0.57790124", "0.5775523", "0.57708186", "0.5764929", "0.57648325", "0.5750444", "0.57404983", "0.5738262", "0.5734507", "0.5734507", "0.57336456", "0.57325625", "0.5725581", "0.5714002", "0.5714002", "0.57121885" ]
0.7425215
2
For getters and setters
def initialize char1 @p=char1 end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getters; end", "def name\n return @name\nend\ndef age\n return @age\nend\n#setters, allows you to change or set something\ndef name=(name)\n @name = name\nend", "def to_setter\n\t\t\t\t(to_getter.to_s+\"=\").to_sym\n\t\t\tend", "def setter_to_getter(node); end", "def setter_method\n :\"#{self[:name]}=\"\n end", "def _setter_method\n :\"_#{self[:name]}=\"\n end", "def attribute_to_set; end", "def to_getter\n\t\t\t\tis_setter? ? self.to_s[0...-1].to_sym : self\n\t\t\tend", "def set; end", "def set; end", "def setter_method\n :\"#{self[:name]}=\"\n end", "def get()\n \n end", "def method_missing(meth, *args, &blk)\n match = meth.to_s.match(/^([a-zA-Z\\_]+)(=|$)$/)\n if match\n attribute, setter = match[1], !match[2].blank?\n if setter\n write_attribute(attribute, args.first)\n else\n read_attribute(attribute)\n end\n else\n super(meth, *args, &blk)\n end\n end", "def setter\r\n @setter ||= Field.setter(@name)\r\n end", "def _setter_method\n :\"_#{self[:name]}=\"\n end", "def attribute; end", "def attribute; end", "def attribute; end", "def attribute; end", "def attribute; end", "def attribute; end", "def attribute; end", "def setter_methods\n return @setter_methods if @setter_methods\n @setter_methods = get_setter_methods\n end", "def setter_methods\n return @setter_methods if @setter_methods\n @setter_methods = get_setter_methods\n end", "def attr_reader(*)\n end", "def age=(value)\n @age = value\nend", "def age=(value)\n @age = value\nend", "def getter_method_names; end", "def method_missing(meth, *args)\n return if not method_missing_hook(meth, args)\n\n d = self.class.structure_field_names\n m = meth.to_s\n\n setter = (m[-1].chr == \"=\") ? true : false\n m = m[0..-2] if setter\n\n puts \"WARNING: assignment to @value as struct field\" if setter and m == \"value\"\n\n if (i = d[m.intern])\n if setter\n self[m.intern] = args[0]\n else\n self[m.intern]\n end\n else\n super(meth, *args)\n end\n end", "def inject_acl_object_getter_setter(method_name)\n inject_acl_object_check(method_name, :read)\n inject_acl_object_check(\"#{method_name}=\",:write)\n end", "def get(object); end", "def method_missing(meth,*args,&block)\n\n method = meth.to_s # convert method name to a string\n setter = method.end_with?('=') # determine if this is a setter method (which would have the last character \"=\" in the method name)\n attribute = setter ? method.chop : method # the attribute name needs to have the \"=\" removed if it is a setter\n multivalued_field = attribute.end_with?(self.class.multivalued_field_marker) # in place editing fields can end with the special character marker, which will join arrays when return; and split them when setting\n attribute.gsub!(self.class.multivalued_field_marker,'') if multivalued_field\n\n solr_field_config=self.class.field_mappings[attribute.downcase.to_sym] # lookup the solr field for this accessor\n if solr_field_config\n solr_field_name=solr_field_config[:field].downcase\n default_value=solr_field_config[:default] || ''\n if setter # if it is a setter, cache the edit if it has changed\n old_values=self[solr_field_name]\n new_values=args.first\n if !self.class.is_equal?(old_values,new_values,multivalued_field) # we should only cache the edit if it actually changed\n value = (multivalued_field ? new_values.split(\"|\") : new_values) # split the values when setting if this is an in place edit field\n cache_edit({solr_field_name.to_sym=>value})\n return value\n else\n return old_values\n end\n else # if it is a getter, return the value\n value = unsaved_edits[solr_field_name.to_sym] || self[solr_field_name] # get the field value, either from unsaved edits or from solr document\n value = default_value if value.blank?\n return (multivalued_field && value.class == Array ? value.join(\" | \") : value) # return a joined value if this is an in place edit field, otherwise just return the value\n end\n else\n super # we couldn't find any solr fields configured, so just send it to super\n end\n\n end", "def attr_accessor_sybling(method)\n attr_reader?(method) ? to_attr_writer(method) : to_attr_reader(method)\n end", "def __setter__\n \"#{self}=\"\n end", "def method_missing(method, *args)\n is_set_method = (method.to_s =~ /(.*)=$/)\n key = is_set_method ? $1 : method.to_s\n # No such key\n super unless @data.has_key?(key)\n # Set\n return @data[key] = args[0] if is_set_method\n # Get\n return @data[key]\n end", "def attr; end", "def method_missing(method_name, *args, &block)\n if method_name.to_s.end_with?('=')\n set_attribute(method_name, *args)\n elsif has_attribute?(method_name)\n get_attribute(method_name)\n else\n super\n end\n end", "def method_missing(method, *args, &block)\n \n # The getter\n if @metadata.has_key?(method)\n return attribute_get(method)\n else\n # The setter\n \n if (attribute=method.to_s.match(/(.+)=$/).to_a.last)\n attribute_set(attribute, args.first)\n return\n end\n end\n \n super\n \n end", "def method_missing(method,val=nil)\n if val.nil?\n get(method)\n else\n method = method.to_s[0..-2].to_sym # get rid of = sign\n set(method,val)\n end\n end", "def attr_accessor( * )\n fail \"Remember, an Entity is immutable. Use a Services::Service to mutate the underlying data.\"\n end", "def get\n raise NotImplementedError\n end", "def get_setter_methods\n meths = instance_methods.map(&:to_s).select{|l| l.end_with?('=')} - RESTRICTED_SETTER_METHODS\n meths -= Array(primary_key).map{|x| \"#{x}=\"} if primary_key && restrict_primary_key?\n meths\n end", "def method_missing(method, *args)\n # Give OpenStruct a chance to create getters and setters for the\n # corresponding field\n super method, *args\n\n if field = setter?(method)\n # override setter logic to apply any existing sanitization rules before\n # assigning the new value to the field\n override_setter_for(field) if sanitize?(field)\n # uses the newly created setter to set the field's value and apply any\n # existing sanitization rules\n send(method, args[0])\n end\n end", "def method_missing(method, *args, &block)\n meth, setter = /^(.*?)(=?)$/.match(method).values_at(1,2)\n if valid_temporal_attribute?(meth.to_sym) \n setter.empty? ? get(meth, *args) : set(meth, *args)\n else super\n end\n end", "def setter\n @setter ||= :\"#{@name}=\"\n end", "def setter( name ) (name.to_s + '=').to_sym end", "def get_value\n @value \n end", "def attr_writer( * )\n fail \"Remember, an Entity is immutable. Use a Services::Service to mutate the underlying data.\"\n end", "def properties\n super\n end", "def create_accessors(attribute_name)\n class_eval do\n define_method(attribute_name) do\n odata_entity[property_map[attribute_name]]\n end\n\n define_method(\"#{attribute_name}=\") do |value|\n # unless entity[property_map[attribute_name]] == value\n # send(\"#{attribute_name}_will_change!\") if defined?(::ActiveModel)\n # end\n\n odata_entity[property_map[attribute_name]] = value\n end\n end\n\n nil\n end", "def properties; end", "def properties; end", "def properties; end", "def properties; end", "def properties; end", "def properties; end", "def properties; end", "def properties; end", "def property(name); end", "def get\n val\n end", "def get\n val\n end", "def type=(v)\n case v\n when :get then super('get')\n when :set then super('set')\n when :result then super('result')\n when :error then super('error')\n else super(nil)\n end\n end", "def value=(val); end", "def accessors\n self.class.accessors\n end", "def values_for_properties; end", "def value=(_); end", "def setter\n @setter ||= \"#{name}=\"\n end", "def get(key)\n \n end", "def value!\n raise NotImplementedError\n end", "def method_missing(setter, value)\n case setter.to_s\n when /^set_(.+)/\n variable_name = \"@#{$1}\"\n if instance_variable_defined?(variable_name)\n instance_variable_set(variable_name, value)\n else\n raise NoMethodError.new(\"Undefined setter '#{setter.to_s}' for #{self.class}.\")\n end\n else\n super\n end\n end", "def method_missing(method, *args)\n if method.to_s[-1,1] == '='\n self.put(method.to_s.tr('=',''), *args)\n else\n self.get(method)\n end\n end", "def accessor\n @@accessor ||= nil\n @@accessor\n end", "def value() end", "def value; end", "def value; end", "def value; end", "def value; end", "def value; end", "def value; end", "def value; end", "def value; end", "def value; end", "def value; end", "def value; end", "def value; end", "def value; end", "def value; end", "def value; end", "def value; end", "def value; end", "def value; end", "def value; end", "def value; end", "def value; end", "def value; end", "def method_missing(method, *args, &block)\n attribute = method.to_s\n\n if attribute =~ /=$/ # Define property -- does not have to exist\n attribute = attribute.chop\n self.changed_attributes[attribute] = args[0]\n self.attributes[attribute] = args[0]\n else\n return super unless self.attributes.include?(attribute)\n self.attributes[attribute]\n end \n \n end", "def getter\r\n @getter ||= Field.getter(@name)\r\n end", "def create_setter\n @model.class_eval <<-EOS, __FILE__, __LINE__\n #{writer_visibility}\n def #{name}=(value)\n self[#{name.inspect}] = value\n end\n EOS\n end", "def old_value; end", "def method_missing(method, *args)\n if self.respond_to?(method)\n super\n else\n method_name = method.to_s\n \n #set a value for a variable\n if method_name =~ /=$/\n var_name = method_name.gsub('=', '')\n value = args.first\n self[var_name] = value\n \n #retrieve a value\n else\n self[method_name]\n end\n end\n end", "def value\n \n end" ]
[ "0.8170303", "0.7327501", "0.7088328", "0.6928124", "0.68670124", "0.67980784", "0.67834765", "0.67760384", "0.6766563", "0.6766563", "0.67421234", "0.67149687", "0.66916895", "0.6628344", "0.66181093", "0.65703", "0.65703", "0.65703", "0.65703", "0.65703", "0.65703", "0.65703", "0.65027785", "0.65027785", "0.6478357", "0.645235", "0.645235", "0.6424369", "0.6400204", "0.63941234", "0.6376939", "0.63679767", "0.6357665", "0.6340591", "0.63124025", "0.6311787", "0.6294526", "0.6294278", "0.6278388", "0.6242464", "0.6211134", "0.62029225", "0.6200061", "0.61934304", "0.61800736", "0.6166084", "0.6138083", "0.61279225", "0.61107475", "0.61057436", "0.6105091", "0.6105091", "0.6105091", "0.6105091", "0.6105091", "0.6105091", "0.6105091", "0.6105091", "0.610277", "0.6095888", "0.6095888", "0.6089905", "0.6076925", "0.6076843", "0.60740906", "0.60733837", "0.60692793", "0.60647297", "0.60626847", "0.6060202", "0.60581", "0.60357744", "0.603145", "0.6031372", "0.6031372", "0.6031372", "0.6031372", "0.6031372", "0.6031372", "0.6031372", "0.6031372", "0.6031372", "0.6031372", "0.6031372", "0.6031372", "0.6031372", "0.6031372", "0.6031372", "0.6031372", "0.6031372", "0.6031372", "0.6031372", "0.6031372", "0.6031372", "0.6031372", "0.60304457", "0.6020966", "0.60178345", "0.601458", "0.5985749", "0.5984909" ]
0.0
-1
Looks to see if device is statically configured, if not then call the set methods
def initialize # API support utilizes excon, if it isn't found an error will be raised require 'excon' # rXg production environments should ALWAYS have a valid certificate # If no valid certificate for device uncomment line below # Excon.defaults[:ssl_verify_peer] = false # Configure a static device address and API key here # Device address format example: https://google.com @device_address = set_device_address @device_api_key = set_api_key(@device_address) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setupDevice(devtype,param) \n case devtype\n when 'gtk', :gtk\n @device = MyCanvasGtk.new(param) ;\n## when 'gtk2'\n## @device = MyCanvasGtk2.new(param) ;\n when 'tgif', :tgif\n @device = MyCanvasTgif.new(param) ;\n when 'tk', :tk\n @device = MyCanvasTk.new(param) ;\n else\n $stderr.printf(\"Error:unknown device type : %s\\n\",devtype.to_s) ;\n fail ;\n end\n end", "def device?\n type == :device\n end", "def set_device_type\n @device_type = DeviceType.find(params[:id])\n check_internal_device\n end", "def check_platforms\n default_platform_attrs = ProductModel.platforms.stringify_keys\n self.design_platform = default_platform_attrs.merge(booleanize_hashs(design_platform))\n self.customize_platform = default_platform_attrs.merge(booleanize_hashs(customize_platform))\n end", "def has_physical_device=(value)\n @has_physical_device = value\n end", "def initialize(devtype,param)\n setupDevice(devtype,param) ;\n end", "def thridDeviceLaunch\n\tthirdRealDeviceLaunch \nend", "def set_defaults\n #self.required_by_manufacturer ||= true\n end", "def has_physical_device=(value)\n @has_physical_device = value\n end", "def set_device\n # if HTTP_USER_AGENT is blank/nil defaults to blank, i.e. desktop \n agent = request.env[\"HTTP_USER_AGENT\"].blank? ? \"\" : request.env[\"HTTP_USER_AGENT\"].downcase \n if agent =~ tablet_agents\n \"tablet\"\n elsif (agent =~ mobile_agents_one) || (agent[0..3] =~ mobile_agents_two)\n \"mobile\"\n else\n \"desktop\"\n end \n end", "def devices; end", "def check_devices\n\n\n end", "def set_devices(storage_config)\n storage_config = deep_copy(storage_config)\n Builtins.y2milestone(\"entering set_devices with %1\", storage_config)\n first_set = false\n failed = false\n auto_targetmap = Builtins.listmap(storage_config) do |drive|\n device = \"\"\n Builtins.y2milestone(\"Working on drive: %1\", drive)\n # FIXME: Check if physical drives > 1\n if Ops.get_string(drive, \"device\", \"\") == \"ask\"\n dev = DiskSelectionDialog()\n if dev != nil\n first_set = true\n device = dev\n end\n\n next { device => drive }\n end\n if !first_set &&\n (Ops.get_string(drive, \"device\", \"\") == \"\" ||\n Ops.get_string(drive, \"device\", \"\") == \"ask\")\n device = Storage.GetPartDisk\n Builtins.y2milestone(\"device: %1\", device)\n first_set = true\n next { device => drive }\n elsif Ops.get_string(drive, \"device\", \"\") != \"\"\n dev = Ops.get_string(drive, \"device\", \"\")\n if dev == \"\"\n dev = \"error\"\n Builtins.y2error(\"Missing device name in partitioning plan\")\n failed = true\n end\n\n next { dev => drive }\n end\n end\n\n return nil if failed\n\n auto_targetmap = Builtins.mapmap(auto_targetmap) do |device, d|\n # Convert from Old Style\n if Builtins.haskey(d, \"use\")\n Builtins.y2milestone(\n \"converting from \\\"use\\\" to new style: %1\",\n device\n )\n if Ops.get_string(d, \"use\", \"\") == \"free\"\n Ops.set(d, \"prefer_remove\", false)\n elsif Ops.get_string(d, \"use\", \"\") == \"all\"\n Ops.set(d, \"prefer_remove\", true)\n elsif Ops.get_string(d, \"use\", \"\") == \"linux\"\n Ops.set(d, \"keep_partition_num\", GetNoneLinuxPartitions(device))\n Ops.set(d, \"prefer_remove\", true)\n else\n uselist = Builtins.filter(\n Builtins.splitstring(Ops.get_string(d, \"use\", \"\"), \",\")\n ) { |s| s != \"\" }\n Builtins.y2milestone(\"uselist: %1\", uselist)\n keeplist = []\n all = GetAllPartitions(device)\n Builtins.y2milestone(\"all list: %1\", all)\n Builtins.foreach(all) do |i|\n if !Builtins.contains(uselist, Builtins.sformat(\"%1\", i))\n keeplist = Builtins.add(keeplist, i)\n end\n end\n Builtins.y2milestone(\"keeplist: %1\", keeplist)\n Ops.set(d, \"keep_partition_num\", keeplist)\n\n if Ops.greater_than(Builtins.size(keeplist), 0)\n Ops.set(d, \"prefer_remove\", true)\n end\n end\n else\n Ops.set(d, \"use\", \"all\")\n end\n # see if <usepart> is used and add the partitions to <keep_partition_num>\n Builtins.foreach(Ops.get_list(d, \"partitions\", [])) do |p|\n if Ops.get_integer(p, \"usepart\", -1) != -1\n Ops.set(\n d,\n \"keep_partition_num\",\n Builtins.add(\n Ops.get_list(d, \"keep_partition_num\", []),\n Ops.get_integer(p, \"usepart\", -1)\n )\n )\n end\n end\n Ops.set(\n d,\n \"keep_partition_num\",\n Builtins.toset(Ops.get_list(d, \"keep_partition_num\", []))\n )\n { device => d }\n end\n\n Builtins.y2milestone(\n \"processed autoyast partition plan: %1\",\n auto_targetmap\n )\n deep_copy(auto_targetmap)\n end", "def do_adapter_specific_setup; end", "def devices=(value)\n @devices = value\n end", "def set_device\n @device = Device.find(params[:device_id])\n end", "def device_type=(s)\n self[:type] = s\n end", "def device_type=(s)\n self[:type] = s\n end", "def device_type\n self[:type]\n end", "def set_devicetype\n @devicetype = Devicetype.find(params[:id])\n end", "def device_type\n self[:type]\n end", "def set_device\n @device = Device.find(params[:id])\n end", "def set_device\n @device = Device.find(params[:id])\n end", "def set_device\n @device = Device.find(params[:id])\n end", "def set_device\n @device = Device.find(params[:id])\n end", "def set_device\n @device = Device.find(params[:id])\n end", "def set_device\n @device = Device.find(params[:id])\n end", "def set_device\n @device = Device.find(params[:id])\n end", "def set_device\n @device = Device.find(params[:id])\n end", "def set_device\n @device = Device.find(params[:id])\n end", "def set_device\n @device = Device.find(params[:id])\n end", "def set_device\n @device = Device.find(params[:id])\n end", "def set_device\n @device = Device.find(params[:id])\n end", "def set_device\n @device = Device.find(params[:id])\n end", "def set_device\n @device = Device.find(params[:id])\n end", "def set_device\n @device = Device.find(params[:id])\n end", "def set_device\n @device = Device.find(params[:id])\n end", "def set_device\n @device = Device.find(params[:id])\n end", "def set_device\n @device = Device.find(params[:id])\n end", "def set_device\n @device = Device.find(params[:id])\n end", "def set_device\n @device = Device.find(params[:id])\n end", "def simulatorDeviceLaunch\n\tsimulatorLaunch \nend", "def device_configuration=(value)\n @device_configuration = value\n end", "def check_devices\n\traise \"No connected device was found.\" if no_device?\nend", "def applicable_device_types=(value)\n @applicable_device_types = value\n end", "def set_current_device\n return false if ENV['create_device'].nil? && ENV['device_id'].nil?\n device_id = fetch_variable('device_id')\n\n TestChamber::Device.new.tap do |device|\n device.udid = device_id if device_id\n TestChamber.current_device = device\n puts \"Using device with ID: #{device.udid}\"\n end\nend", "def set_device\n @device = Device.find(params[:id])\n end", "def handle_missing_devices(xml_base, changes)\n ['InternalSdCard', 'IntegratedRaid'].each do |dev_attr|\n #Check if Attribute name exists in the xml, and if it doesn't, check if we're trying to set to disabled. If so, delete from the list of changes.\n if xml_base.at_xpath(\"//Attribute[@Name='#{dev_attr}']\").nil?\n value = changes['partial']['BIOS.Setup.1-1'][dev_attr]\n if ['Off', 'Disabled'].include?(value)\n Puppet.debug(\"Trying to set #{dev_attr} to #{value}, but the relevant device does not exist on the server. The attribute will be ignored.\")\n changes['partial']['BIOS.Setup.1-1'].delete(dev_attr)\n end\n end\n end\n end", "def set_setting\n end", "def allowed_types\n\t\t[Device]\n\tend", "def check_platform\n mobile_override = params[:mobile] && params[:mobile] == \"1\"\n desktop_override = params[:mobile] && params[:mobile] == \"0\"\n if ( (browser.mobile? and !browser.ipad?) or mobile_override ) and !request.xhr? and !desktop_override\n @platform = 'mobile'\n request.format = :mobile\n else\n @platform = 'desktop'\n end\n end", "def simulator?\n !physical_device?\n end", "def no_device?\n\ttrue if devices.size < 1\nend", "def set_device_type\n @device_type = DeviceType.find(params[:id])\n end", "def set_device_type\n @device_type = DeviceType.find(params[:id])\n end", "def device_type=(value)\n @device_type = value\n end", "def app_configured?; end", "def secondDeviceLaunch\n\tsecondRealDeviceLaunch \nend", "def device_type\n self[:type]\n end", "def configured?; false; end", "def initialize()\n super\n @odata_type = \"#microsoft.graph.androidGeneralDeviceConfiguration\"\n end", "def multiple_devices?\n\ttrue if devices.size > 1\nend", "def device_type=(s)\n self[:type] = s\n end", "def initialize()\n super\n @odata_type = \"#microsoft.graph.iosGeneralDeviceConfiguration\"\n end", "def set_device\n @device = current_user.devices.find(params[:id])\n end", "def physical_device?\n if udid.nil?\n stack = Kernel.caller(0, 6)[0..-1].join(\"\\n\")\n raise RuntimeError,\n %Q[udid is nil\n\n#{stack}\n\n name: #{name}\nversion: #{version}\n]\n end\n !udid[DEVICE_UDID_REGEX, 0].nil?\n end", "def physical_device?\n arches.any? do |arch|\n arch[/arm/, 0]\n end\n end", "def initialize_configuration\n\n #Pulse the Program pin via JTAG.\n self.instruction = :jprogram\n\n #Put the device into configuration mode, and give it 14,000 cycles to start up.\n self.instruction = :cfg_in\n run_test(ConfigurationStartup)\n\n end", "def use_camera=(setting)\n end", "def set_device\n @device = current_user.devices.where(id:params[:id]).first\n end", "def dev() end", "def set_system_setting\n @system_setting = System.settings\n end", "def use_device_licensing=(value)\n @use_device_licensing = value\n end", "def set_admin_device\n @admin_device = AdminDevice.find(params[:id])\n end", "def set_device\n @device = ::Pushar::Core::Device.includes(:app).find(params[:id])\n end", "def update_types\n\t\t[Device]\n\tend", "def update_types\n\t\t[Device]\n\tend", "def use\n Cumo::CUDA::Runtime.cudaSetDevice(@id)\n end", "def set_devis_configuration\n @devis_configuration = DevisConfiguration.find(1)\n end", "def initialize devref #:nodoc:\n @device = devref\n \n @hardware_pwm_enabled = :unknown\n @hardware_pwm_prescale = :unknown\n @hardware_pwm = [0, 0]\n @software_pwm_enabled = :unknown\n @software_pwm = [0, 0, 0]\n \n # shut everything down, trying to setup littlewire in consistent initial state in case previous programs\n # messed with it's state\n self.software_pwm_enabled = false\n self.hardware_pwm_enabled = false\n self.pin_mode(pin1: :input, pin2: :input, pin3: :input, pin4: :input)\n self.digital_write(pin1: :gnd, pin2: :gnd, pin3: :gnd, pin4: :gnd)\n end", "def set_device_manufacturer\n @device_manufacturer = DeviceManufacturer.find(params[:id])\n end", "def set_bootable_devices(boot_source_type, options)\n bootable_devices = options[:bootable_devices]\n logger.info(\"bootable devices are %s\" % [bootable_devices.to_s])\n unless bootable_devices.empty?\n bootable_devices_instance_ids = []\n bootable_devices.each do |bootable_device|\n bootable_device = find_boot_device(bootable_device)\n if bootable_device\n logger.info(\"parsed boot device is %s\" % [bootable_device.to_s])\n bootable_devices_instance_ids << bootable_device[:instance_id]\n end\n end\n instance_ids = boot_source_settings.map {|e| e[:instance_id]}\n logger.info(\"instance ids are %s\" % [instance_ids.to_s])\n bootable_devices_to_disable = instance_ids.reject {|instance_id| bootable_devices_instance_ids.include? instance_id}\n logger.info(\"instance ids to disable are %s \" % [bootable_devices_to_disable.to_s])\n bootable_devices_to_disable.each do |bootable_device_to_disable|\n logger.info(\"Disabling boot device %s\" % [bootable_device_to_disable])\n change_boot_source_state(:instance_id => boot_source_type, :enabled_state => \"0\",\n :source => bootable_device_to_disable)\n end\n end\n end", "def config\n\n status=true\n\n # Set the audio carrier, but only if it's not false (so other\n # items can be updated without worrying about a little bit of\n # drift due to AFC).\n if @carrier\n @[email protected]_i\n if @carrier!=self.get_carrier().to_i\n self.sendcmd(\"modem.set_carrier\", @carrier)\n end\n end\n \n # Set the modem. Also, take a stab at setting the timeouts for\n # that modem. ToDo: Add to these as additional modems are checked\n # out, and save the user some hassle.\n if @modem!=@modem_old\n case @modem\n when \"BPSK31\"\n @start_wait=5\n @char_wait=1\n when \"BPSK63\"\n @start_wait=5\n @char_wait=0.5\n when \"BPSK125\"\n @start_wait=5\n @char_wait=0.5\n when \"BPSK250\"\n @start_wait=5\n @char_wait=0.5\n else\n @start_wait=10\n @char_wait=2\n end\n \n if @modem==self.get_modem()\n @modem_old=@modem\n else\n self.sendcmd(\"modem.set_by_name\", @modem)\n if @modem==self.get_modem()\n @modem_old=@modem\n else\n self.error(\"modem.set_by_name failed with value #{@modem}\")\n puts \"modem.set_name failed\" if @debug\n status=false\n end\n end\n end\n \n # Turn spot on/off (true/false).\n if @spot!=@spot_old\n if torf(self.sendcmd(\"spot.get_auto\"))==@spot\n @spot_old=@spot\n else\n self.sendcmd(\"spot.set_auto\", @spot)\n if torf(self.sendcmd(\"spot.get_auto\"))==@spot\n @spot_old=@spot\n else\n self.error(\"spot.set_auto failed with value #{@spot}\")\n puts \"spot.set_auto failed\" if @debug\n status=false\n end\n end\n end\n\n # Turn AFC on/off (true/false). Some modes don't work with\n # AFC. There seems to be a great deal of inconsistency (a bug,\n # maybe?) in reading the AFC value back from FLDigi. Every test I\n # can come up with points to a bug in their code, not mine. Until\n # we can get this sorted out, don't consider failure to set AFC as\n # fatal. Just unset it, and continue on. ToDo: Verify bug in\n # FLDigi, then fix.\n if (@afc!=@afc_old)\n if torf(self.sendcmd(\"main.get_afc\"))==@afc\n @afc_old=@afc\n else\n self.sendcmd(\"main.set_afc\", @afc)\n sleep 0.25\n if torf(self.sendcmd(\"main.get_afc\"))==@afc\n @afc_old=@afc\n else\n @afc=false\n puts \"main.set_afc failed, so leaving turned off\" if @debug\n end\n end\n end\n\n # Set the sideband (\"USB\"/\"LSB\"). ToDo: make sure this is\n # correct.\n if @sideband!=@sideband_old\n if @sideband==self.sendcmd(\"main.get_sideband\")\n @sideband_old=@sideband\n else\n self.sendcmd(\"main.set_sideband\", @sideband)\n if @sideband==self.sendcmd(\"main.get_sideband\")\n @sideband_old=@sideband\n else\n self.error(\"main.set_sideband failed with value #{@sideband}\")\n puts \"main.set_sideband failed\" if @debug\n status=false\n end\n end\n end\n\n # Turn RSID receive on/off (true/false).\n if @rsid!=@rsid_old\n if torf(self.sendcmd(\"main.get_rsid\"))==@rsid\n @rsid_old=@rsid\n else\n self.sendcmd(\"main.set_rsid\", @rsid)\n if torf(self.sendcmd(\"main.get_rsid\"))==@rsid\n @rsid_old=@rsid\n else\n self.error(\"main.set_rsid failed with value #{@rsid}\")\n puts \"main.set_rsid failed\" if @debug\n status=false\n end\n end\n end\n \n # Turn squelch on/off (true/false).\n if @squelch!=@squelch_old\n if torf(self.sendcmd(\"main.get_squelch\"))==@squelch\n @squelch_old=@squelch\n else\n self.sendcmd(\"main.set_squelch\", @squelch)\n if torf(self.sendcmd(\"main.get_squelch\"))==@squelch\n @squelch_old=@squelch\n else\n self.error(\"main.set_squelch failed with value #{@squelch}\")\n puts \"main.set_squelch failed\" if @debug\n status=false\n end\n end\n end\n \n # Set the squelch value (3.0 seems to work well).\n if @slevel!=@slevel_old\n @slevel_old=@slevel\n if @slevel.to_f==self.sendcmd(\"main.get_squelch_level\").to_f\n @[email protected]_f\n else\n self.sendcmd(\"main.set_squelch_level\", @slevel)\n if @slevel==self.sendcmd(\"main.get_squelch_level\")\n @[email protected]_f\n else\n self.error(\"main.set_squelch_level failed with value #{@slevel}\")\n puts \"main.set_squelch_level failed\" if @debug\n status=false\n end\n end\n end\n\n # Set the radio frequency (in hz). If the user has specified no\n # rig control, it simply returns true and ignores the provided\n # value (this is so people who don't have rig control can still\n # use the other features of the library, they just can't set the\n # radio frequency). Otherwise, it returns true if successful in\n # setting the frequency, false if it fails. The sleep here gives\n # the radio time to change frequencies before checking. 0.5\n # seconds work with all of my radios, but it's possible this will\n # need to be tweaked. Send me an e-mail if this value is not\n # adequate for your radio, and I'll figure out a plan. So far, it\n # works on my IC-706MkII, my IC-756Pro, and my FT-817. The value\n # for @offset is added to the desired frequency. This is for use\n # when you want all of your radios to be on a very specific given\n # frequency. You must choose one as \"the standard\", then figure\n # out the offset for each rig from that standard. For example, my\n # FT-817 transmits 180hz lower (for a given equal temperature).\n # Assuming I've chosen my IC-706MkII as my standard (of course,\n # you could use WWV or some such, as well), I need to set @offset\n # to -180 whenever using my FT-817 if I want them to be on the\n # exact same frequency. This value could be added to either the\n # dial frequency or the carrier. I chose the dial frequency,\n # since a lot of people reference the carrier more often than the\n # dial. That way, when one person says he's at \"1000\", it'll be\n # \"1000\" on the other radio, as well. There's no good, clean,\n # all-purpose solution to this one, but at least it allows for\n # consistent and automated use of the library without having to do\n # the conversions in your own code. We give ourselves two tries to\n # get the freq right, since some rigs seem to act a bit odd when\n # changing bands.\n @dial_freq=@dial_freq.to_i\n if (@dial_freq!=@dial_freq_old or @offset!=@offset_old) and @rigctl\n @dial_freq_old=@dial_freq\n @offset_old=@offset\n if @[email protected]_i!=self.sendcmd(\"main.get_frequency\").to_f\n self.sendcmd(\"main.set_frequency\", @[email protected]_f)\n sleep 0.5\n if @[email protected]_i!=self.sendcmd(\"main.get_frequency\").to_f\n self.sendcmd(\"main.set_frequency\", @[email protected]_f)\n sleep 0.5\n if @[email protected]_i!=self.sendcmd(\"main.get_frequency\").to_f\n self.error(\"main.set_frequency failed with value #{@dial_freq}\")\n puts \"main.set_frequency failed\" if @debug\n status=false\n end\n end\n end\n end\n\n return status\n end", "def used?(device)\n sync do\n dev = devices.detect { |v| v == device }\n next(false) unless dev\n !dev.inherited?\n end\n end", "def is_device?(type)\n\t\trequest.user_agent.to_s.downcase.include?(type.to_s.downcase)\n\tend", "def set_catalog_device\n @catalog_device = Catalog::Device.find(params[:id])\n end", "def update!(**args)\n @local_default_devices = args[:local_default_devices] if args.key?(:local_default_devices)\n @nearby_default_devices = args[:nearby_default_devices] if args.key?(:nearby_default_devices)\n end", "def _init_configuration\n\t\t# Set defaults\n\t\t@setup = { \"create\" => false, \"environment\" => false, \"test\" => true, \"destroy\" => false }\n\t\t@config = false\n\tend", "def has_physical_device\n return @has_physical_device\n end", "def type=(value)\n if value == @defaults['ai.device.type']\n @values.delete 'ai.device.type' if @values.key? 'ai.device.type'\n else\n @values['ai.device.type'] = value\n end\n end", "def update!(**args)\n @is_on_device_smart_home_supported = args[:is_on_device_smart_home_supported] if args.key?(:is_on_device_smart_home_supported)\n end", "def device_id=(value)\n if value == @defaults['deviceId']\n @values.delete 'deviceId' if @values.key? 'deviceId'\n else\n @values['deviceId'] = value\n end\n end", "def set_admin_device_brand\n @device_brand = DeviceBrand.find(params[:id])\n end", "def adb_device options\n id = options[:id] || 1\n dev = DeviceAPI::Android::Device.new(\n serial: \"serial#{id}\",\n state: 'device',\n version: options[:os_version] || '1.2.3'\n )\n allow(dev).to receive(:wifi_mac_address) { options[:wifi_mac_addresss] || '00:11:22:33:44:%02d' % id }\n allow(dev).to receive(:ip_address) { options[:ip_address] || '192.168.100.%d' % id }\n allow(dev).to receive(:manufacturer) { options[:brand] || 'Test Brand' }\n allow(dev).to receive(:model) { options[:model] || 'Test Model' }\n allow(dev).to receive(:imei) { options[:imei] || '123456%d' % id }\n allow(dev).to receive(:version) { options[:os_version] || '1.2.3' }\n allow(dev).to receive(:get_device_type) { :default }\n dev.instance_variable_set(:@remote, options[:remote] ? true : false)\n dev\nend", "def valid_for_platform?; true; end", "def is_dev?\n @dev\n end", "def physical_device_id=(value)\n @physical_device_id = value\n end", "def platform; end", "def platform; end", "def platform; end", "def ensure_all\n sync do\n devices.each { |dev| parent.devices.provide(dev) }\n end\n end" ]
[ "0.6319103", "0.6297391", "0.61364865", "0.6117838", "0.61052877", "0.6014249", "0.59481823", "0.59150475", "0.58944905", "0.58941984", "0.5853575", "0.5830868", "0.57593066", "0.5757208", "0.5737723", "0.5730444", "0.570976", "0.56861484", "0.5684176", "0.567845", "0.5640153", "0.56394726", "0.56394726", "0.56394726", "0.56394726", "0.56234723", "0.56234723", "0.56234723", "0.56234723", "0.56234723", "0.56234723", "0.56234723", "0.56234723", "0.56234723", "0.56234723", "0.56234723", "0.56234723", "0.56234723", "0.56234723", "0.56234723", "0.56234723", "0.5616917", "0.55811447", "0.5559769", "0.5546136", "0.55344886", "0.55344605", "0.55237544", "0.5513242", "0.55072504", "0.54988444", "0.5498027", "0.5492024", "0.5491722", "0.5491722", "0.54790974", "0.54649365", "0.5435017", "0.5434317", "0.5411936", "0.5393826", "0.53936744", "0.53929126", "0.5386405", "0.5375113", "0.5365847", "0.5353048", "0.53474116", "0.5341303", "0.5338334", "0.53367335", "0.5331976", "0.5326686", "0.53228563", "0.53194195", "0.53144175", "0.53144175", "0.5312994", "0.53085834", "0.5298937", "0.528758", "0.52851295", "0.52849287", "0.52831703", "0.5282427", "0.52693075", "0.52500725", "0.5249555", "0.5248303", "0.52444077", "0.5237037", "0.5235462", "0.5231289", "0.52297866", "0.5220554", "0.5219916", "0.52087593", "0.5207116", "0.5207116", "0.5207116", "0.52057284" ]
0.0
-1
Invoked if device address is not defined, validates connection.
def set_device_address address_input = nil while address_input.nil? puts 'Enter device address (e.g.: https://google.com):' begin Excon.get($stdin.gets.chomp!, connect_timeout: 15) rescue StandardError puts 'Unable to connect to device, please check the address.' else address_input = $_ end end address_input end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_devices\n\traise \"No connected device was found.\" if no_device?\nend", "def check_address(_)\n raise NotImplementedError\n end", "def validate_devices\n [gateway_serial, transmitter_serial].each do |_serial|\n errors.add_to_base(\"Device #{_serial} is not available. Please verify the serial number.\") unless (_serial.blank? || Device.available?( _serial, senior))\n end\n end", "def eaddrnotavail?() EADDRNOTAVAIL == @error_code; end", "def check_host\n # rubocop:disable Style/GuardClause\n if config[:host] == '0.0.0.0'\n logger.warn 'Will listen on all interfaces (0.0.0.0).' \\\n ' Consider using 127.0.0.1 (--host option).'\n end\n # rubocop:enable Style/GuardClause\n end", "def check_host\n # rubocop:disable Style/GuardClause\n if config[:host] == '0.0.0.0'\n logger.warn 'Will listen on all interfaces (0.0.0.0).' \\\n ' Consider using 127.0.0.1 (--host option).'\n end\n # rubocop:enable Style/GuardClause\n end", "def address_present\n if address.blank?\n errors.add(:address, \"Can't be empty\")\n end\n end", "def got_address(_)\n self.getting = false\n connect!\n end", "def device_ipaddress=(_arg0); end", "def device_ipaddress=(_arg0); end", "def cross_check_mac_address_with_ouis\n oui = self.mac_address.to_s.upcase.gsub(/[^A-F0-9]/,'')[0,6]\n if Oui.where(:value => oui).first == nil or Oui.where(:value => oui).first.manufacturer != self.phone_model.manufacturer\n errors.add( :mac_address, \"The given mac address doesn't match to the OUIs of the manufacturer #{self.phone_model.manufacturer.name}.\" )\n end\n end", "def no_device?\n\ttrue if devices.size < 1\nend", "def validateaddress(coinaddress)\n coind.validateaddress\n end", "def cross_check_mac_address_with_ouis\n\t\toui_str = self.mac_address.to_s().upcase().gsub( /[^A-F0-9]/, '' )[0,6]\n\t\toui_obj = Oui.where( :value => oui_str ).first\n\t\tif oui_obj == nil \\\n\t\t|| (self.phone_model && self.phone_model.try(:manufacturer) != oui_obj.manufacturer)\n\t\t\terrors.add( :mac_address, I18n.t(:mac_address_not_matching_oui, :manufacturer => self.phone_model.try(:manufacturer).try(:name) ))\n\t\tend\n\tend", "def validate_from_input\n if outbound_request? && !account.phone_numbers.where(number: from).exists?\n errors.add(:from, I18n.t('errors.messages.parameter_not_found'))\n end\n end", "def device?\n type == :device\n end", "def server_exists_at(address)\n DNSSD.register!(address.name, TYPE, nil, address.port)\n end", "def physical_device?\n if udid.nil?\n stack = Kernel.caller(0, 6)[0..-1].join(\"\\n\")\n raise RuntimeError,\n %Q[udid is nil\n\n#{stack}\n\n name: #{name}\nversion: #{version}\n]\n end\n !udid[DEVICE_UDID_REGEX, 0].nil?\n end", "def error?\n\t\tEventMachine::report_connection_error_status(@signature) != 0\n\tend", "def validate!\n logger.debug \"Starting validation for #{description}\"\n raise NotFound.new name, connection unless exists?\n logger.info \"Successfully validated #{description}\"\n self\n end", "def validate_connection\n request_token = request.params[\"token\"] || request.env[\"HTTP_AUTHORIZATION\"]\n unless request_token\n render \"missing token\", event: :error\n finish\n return false\n end\n\n service = Localhook::EndpointService.new\n @endpoint = service.endpoint_with_token(request_token)\n unless @endpoint\n render \"invalid token #{request_token}\", event: :error\n finish\n return false\n end\n\n render @endpoint.name, event: :endpoint\n return true\n end", "def test_port(port)\n !EventMachine::Synchrony::TCPSocket.new('127.0.0.1', port).nil?\nrescue Errno::ECONNREFUSED, Errno::EHOSTUNREACH\n false\nend", "def manage_address\n enable_dhcp if new_resource.bootproto == 'dhcp' && current_resource.bootproto != 'dhcp'\n return unless new_resource.bootproto == 'static'\n\n config_static unless new_resource.address.nil? || (current_resource.bootproto == 'static' && ip_subnet_exist?)\n config_gateway unless new_resource.gateway.nil? || (current_resource.bootproto == 'static' && current_resource.gateway == new_resource.gateway)\n end", "def valid_connection?\n @connection && @connection.valid?\n end", "def validate_ip_address(item)\n error(msg: 'Invalid IP address string', item: __method__.to_s) if (begin\n IPAddr.new(item)\n rescue StandardError\n nil\n end).nil?\n end", "def socket?() end", "def addr_valid?(res)\n res << \"Provided IP addr isn't valid!\" unless ip_validation_lib.valid? addr\n end", "def initialize(address)\n @address = address\n handle_socket_errors { super address.unix }\n end", "def validate_from\n @from = validate_address(FROM)\n end", "def validate_to_address\n raise StanzaErrors::FeatureNotImplemented.new(self, 'cancel') unless pubsub\n end", "def check_devices\n\n\n end", "def address(format = :full)\n fail\n end", "def connect_mock(_, addr)\n port, = Socket.unpack_sockaddr_in(addr)\n if [1234, 4321].include?(port)\n raise Errno::EISCONN\n end\n raise Errno::ECONNREFUSED\n end", "def validate_datasource!\n return if datasource[:access] == 'proxy' && datasource[:type] == 'prometheus'\n\n raise_error 'Only Prometheus datasources with proxy access in Grafana are supported.'\n end", "def can_activate?\n if custom?\n unless server.blank? || port.blank? || username.blank? || password.blank? || from_address.blank?\n true\n else\n false\n end\n elsif wundercoach_standard?\n unless replyto_address.blank?\n true\n else\n false\n end\n end\n end", "def connection_valid?\n begin\n result = client.call(:fe_dummy).body[:fe_dummy_response][:fe_dummy_result]\n @observations << \"app_server: #{result[:app_server]}, db_server: #{result[:db_server]}, auth_server: #{result[:auth_server]}\"\n result[:app_server] == \"OK\" and result[:db_server] == \"OK\" and result[:auth_server] == \"OK\"\n rescue => e\n @errors << e.message\n @backtrace = e.backtrace\n false\n end\n end", "def validate_network_configuration!(network_name, root_options, network_options, driver)\n if root_options[:ip] &&\n driver.network_containing_address(root_options[:ip]) != network_name\n raise Errors::NetworkAddressInvalid,\n address: root_options[:ip],\n network_name: network_name\n end\n if network_options[:subnet] &&\n driver.network_containing_address(network_options[:subnet]) != network_name\n raise Errors::NetworkSubnetInvalid,\n subnet: network_options[:subnet],\n network_name: network_name\n end\n true\n end", "def validate\n unless address =~ /^(\\d{1,3}\\.){3}\\d{1,3}$/\n raise ValidationError, \"Invalid address\"\n end\n end", "def validate\n notify_devs and return false if @client_id < 1 ||\n Event.sources.keys.exclude?(@event_source) ||\n Event.names.keys.exclude?(@event_name) ||\n @event_data.blank? ||\n @event_timestamp == 0 ||\n Event::NAME_CONFIG[@event_name.to_s][:inavlid_source].include?(@event_source)\n true\n end", "def check_options\n unless @options[:stub]\n STDERR.puts \"Please specify a host to connect to using --host\" unless @options[:host]\n STDERR.puts \"Please specify a model to check using --model\" unless @options[:model]\n return false unless @options[:host] && @options[:model]\n end\n\n true\n end", "def allow_net_connect?\n allow_net_connect\n end", "def has_required_host?\n true\n end", "def checkConnection\n unless connected?\n raise DictError.new(), \"Not connected.\"\n end\n end", "def checkConnection\n unless connected?\n raise DictError.new(), \"Not connected.\"\n end\n end", "def validate_address( address, symbol )\n get(\"/validateAddress/#{address}/#{symbol}\")\n end", "def connect(address)\n attach(:connect, address)\n true\n end", "def parse_address\n address = get_element('//t:RequestSecurityTokenResponse/wsp:AppliesTo/addr:EndpointReference/addr:Address')\n @validation_errors << \"Address field is empty.\" and return if address.nil?\n @validation_errors << \"Address field is incorrect.\" unless address == self.class.realm\n end", "def address?\n if self.address\n return false\n else\n return true\n end\n end", "def sms_adapter_is_valid\n errors.add(:default_outgoing_sms_adapter, :is_invalid) unless default_outgoing_sms_adapter.blank? ||\n Sms::Adapters::Factory.name_is_valid?(default_outgoing_sms_adapter)\n end", "def post_init\n get_ip_address_and_port_or_close_connection\n end", "def rescue_sock_error\n print \"--- Enter Enotify host [localhost:5000]: \".yellow\n host_and_port = STDIN.gets.strip\n if blank_string?(host_and_port)\n @enotify_host, @enotify_port = ['localhost', @default_options[:enotify_port]]\n else\n @enotify_host, @enotify_port = host_and_port.split(/\\s:\\s/)\n @enotify_port = @enotify_port.to_i\n end\n enotify_connect\n end", "def gateway_check\n raise \"A gateway connection is necessary to call this method! You'll have to do it inside any event (e.g. `ready`) or after `bot.run :async`.\" unless connected?\n end", "def check_server\n raise Exception.new(\"Could not reach Apocalypse server please check the address and port and try again.\") unless server_reachable?\n end", "def device_exists?(domain,input_name,device_ip)\n Chef::Log.debug(\"Loggly/#{domain}: Checking to see if device IP #{device_ip} exists on input #{input_name}...\")\n answer = false\n devices = get_devices(domain,input_name)\n unless devices.nil?\n devices.each do |device|\n if device.has_value?(device_ip)\n Chef::Log.debug(\"Loggly/#{domain}: Found existing device for IP #{device_ip} on input #{input_name}.\")\n answer = true\n else\n Chef::Log.debug(\"Loggly/#{domain}: Did not find existing device for IP #{device_ip} on input #{input_name}.\")\n end\n end\n end\n return answer\n end", "def connect()\n\n\t\t# if already connected\n\t\treturn nil if self.connected?\n\n\t\tiPort = @mPortOptions[:ethernetPort]\n\n\t\tsIP = @mPortOptions[:ethernetIPbroadcast]\n\t\tsIPme = @mPortOptions[:ethernetIP]\n\t\taIPignore = SssSEMapp.get(:ethernetIPsIgnore, [sIPme])\n\n\t\tbegin\n\n\t\t\t@oUDPsocketBroadcast = EM::open_datagram_socket(sIP, iPort, SssSEMServer, sIP, aIPignore)\n\n\t\trescue Exception => e\n\n\t\t\t@oUDPsocketBroadcast = nil\n\t\t\tputs 'KO:error when binding to ' << sIP << ':' << iPort.to_s\n\t\t\t#raise e\n\n\t\tensure\n\n\t\tend # try, catch binding broadcast\n\n\t\tbegin\n\n\t\t\t@oUDPsocketToMe = EM::open_datagram_socket(sIPme, iPort, SssSEMServer, sIPme, aIPignore)\n\n\t\trescue Exception => e\n\n\t\t\t@oUDPsocketToMe = nil\n\t\t\tputs 'KO:error when binding to ' << sIPme << ':' << iPort.to_s\n\t\t\t#raise e\n\n\t\tensure;\n\n\t\tend # try, catch binding self\n\n\t\treturn YES\n\n\t\t# TODO: start settings synchronizer, event manager? We need to read or at least write settings to Arduinos and provide information to SkyTab\n\t\tputs 'TODO: settings synchronizer'\n\n\tend", "def arguments_valid?\n if @arguments.length == 1\n @options.port = @arguments[0].to_i\n return true\n end\n @arguments.length == 0\n end", "def test_noneuclideon\n\t\tassert_raise( RuntimeError ) { @room.connect(1, Direction::NORTH, Door.new(0)) }\n\tend", "def check_ip_any_alias\n case @options[:ipaddr]\n when nil, '', 'any', /^\\s*$/\n @options[:ipaddr] = '0.0.0.0'\n @options[:netmask] = 0\n end\n end", "def valid?\n ping\n end", "def sip_descriptor_valid?\n validate_sip_descriptor unless @sip_descriptor_errors\n @sip_descriptor_errors.empty?\n end", "def valid_attendee\n #puts \"USER DEVICE TOKENS #{@device_tokens}\"\n return (!@phone_number.nil? && !@phone_number.empty?)\n # && !@first_name.nil? && !@first_name.empty? && !@last_name.nil? && !@last_name.empty?)\n end", "def validate_client_ip_address\n return unless Rails.configuration.valid_client_ips\n\n client_ip = request.remote_ip\n fail_if_invalid_client_ip(client_ip, Rails.configuration.valid_client_ips)\n end", "def emergency_contact_for?(host)\n self.is_emergency_contact_for? host\n end", "def validate\n r = super\n return r unless r.success?\n\n r = fetch_and_validate_client\n return r unless r.success?\n\n return error_with_identifier(\"no_configurator_access\", \"am_cc_gpd_v_1\") if [email protected]_web_host_setup_done?\n\n success\n end", "def configure(conf)\n super\n log.trace \"configure(conf)\"\n check_port(conf)\n check_buffer(conf)\n end", "def lint_adapter_reader\n if gateway_instance.adapter != identifier\n complain \"#{gateway_instance} must have the adapter identifier set to #{identifier.inspect}\"\n end\n rescue MissingAdapterIdentifierError\n complain \"#{gateway_instance} is missing the adapter identifier\"\n end", "def valid?\n (ip.nil? || name.nil?) ? false : true\n end", "def address_bound?(ip, port)\n out, err, rc = shellCmd(\"/usr/sbin/lsof -i @#{ip}:#{port}\")\n return rc != 0\n end", "def create\n if Device.find_by :macaddress => params[:macaddress]\n redirect_to new_device_url, :notice => 'Device already registered, If there is any issue please contact admin.'\n else\n @device = Device.new(device_params)\n @device.user = current_user\n\n respond_to do |format|\n if @device.save\n format.html { redirect_to @device, notice: 'Device was successfully created.' }\n format.json { render action: 'show', status: :created, location: @device }\n else\n format.html { render action: 'new' }\n format.json { render json: @device.errors, status: :unprocessable_entity }\n end\n end\n end\n end", "def eaddrinuse?() EADDRINUSE == @error_code; end", "def valid_address? address\n code = call_blockchain_api(\"checkaddress/#{address}\")\n\n !['X5', 'SZ', 'CK'].include?(code)\n end", "def validate_pos_invoice\n if self.customer_id && !self.pos_invoice_addresses\n raise_error('Billing address Required!!!!')\n end\nend", "def valid_endpoint(ep)\n if ep == nil\n raise ArgumentError, \"Request: An endpoint must be set\", caller\n elsif not ep.is_a? String\n raise TypeError, \"Request: An endpoint must be a string\", caller\n elsif ep.length == 0 # If endpoint was set to \"\"\n raise TypeError, \"Request: Endpoint cannot be empty\", caller\n elsif ep[0] != '/'\n raise ArgumentError, \"Request: Endpoint must begin with a '/'\", caller\n end\n\n # If we make it to here, we're good - so just return the endpoint\n ep\n end", "def validate!\n super\n if admin_mysql_ifaces\n # Check for .sock or IP:PORT\n unless admin_mysql_ifaces =~ /\\.sock|\\:\\d+/\n raise \"Provide admin_variables['mysql_ifaces'] in a form of \"\\\n \"'127.0.0.1:6032' or '/var/lib/mysql/mysql.sock'\"\n end\n else\n raise \"Provide admin_variables['mysql_ifaces'] attribute\"\n end\n if admin_credentials\n if admin_credentials.split(':').size != 2\n raise \"Provide admin_variables['admin_credentials'] in a \"\\\n \"form of 'admin:admin'\"\n end\n else\n raise \"Provide admin_variables['admin_credentials'] attribute\"\n end\n\n output = Mixlib::ShellOut.new('which mysql').run_command\n unless output.status.success?\n raise 'Install MySQL client not found for loading '\\\n 'config to RUNTIME'\n end\n end", "def validateaddress(namecoin_address)\n request :validateaddress, namecoin_address\n end", "def validate_config!\n raise NotImplementedError, \"Implement #{__callee__} in #{self.class.to_s}\"\n end", "def valid?\n (internal_ip.nil? || name.nil?) ? false : true\n end", "def attempt_to_connect(mac_address, sleep_for = SLEEP_BETWEEN)\n connected = false\n response = cmd('connect', mac_address)\n response_lines = response.split(\"\\n\")\n if response_lines.first != MSGS.attempting % mac_address\n abort MSGS.unexpected_connection_msg % response_lines.join(\"\\n\")\n end\n case response_lines.last\n when BLUETOOTHCTL.successful_connect\n connected = true\n puts MSGS.connected_to % mac_address\n when BLUETOOTHCTL.failed_to_connect\n puts MSGS.failed_connected_to % mac_address...\n unless connected\n puts MSGS.sleeping % sleep_for\n sleep sleep_for\n end\n else\n abort MSGS.unrecognized_response % response_lines.join(\"\\n\")\n end\n connected\nend", "def has_address(interface)\n ip = Facter::Util::IP.get_interface_value(interface, 'ipaddress')\n if ip.nil?\n false\n else\n true\n end\nend", "def gateway_check\n return if connected?\n\n raise \"A gateway connection is necessary to call this method! You'll have to do it inside any event (e.g. `ready`) or after `bot.run :async`.\"\n end", "def optional_port; end", "def should_validate?(adapter)\n # settings for the default outgoing adapter should always be validated\n return true if default_outgoing_sms_adapter == adapter\n\n # settings for an adapter should be validated if any settings for that adapter are present\n case adapter\n when \"Twilio\"\n twilio_phone_number.present? || twilio_account_sid.present? || twilio_auth_token1.present?\n when \"FrontlineCloud\"\n frontlinecloud_api_key.present?\n end\n end", "def invalid_host?(host)\n !host.class.to_s.eql?(\"String\") || host.empty?\n end", "def nagios_check\n create_and_parse_cli_options\n return 1 unless check_options\n\n exit_code = Nagios::UNKNOWN\n\n begin\n # Instantiate the proper device\n device = create_device(options[:vendor], options[:model])\n # check if snmp community is correct or not for authentication\n if device.is_snmp_community_correct?(options[:community], options[:host])\n status = device.query_snmp(options)\n else\n status = Status.new(Nagios::WARNING, \"Invalid SNMP community string specified\")\n end\n exit_code = status.code\n message = status.message\n rescue StandardError => e\n message = \"%s\" % e\n exit_code = Nagios::UNKNOWN\n ensure\n puts message\n end\n exit_code\n end", "def check_context(device)\n\n dev = device\n\n dev\n end", "def host_allowed?(arg)\n true\n end", "def verify_connection!(uri); end", "def use_default_address\n return unless self.address\n self.address = client.addresses.first\n end", "def device_ipaddress; end", "def device_ipaddress; end", "def check_ip; end", "def validate(address)\n result = fetch_result(address)\n return result.verified? || result.unknown?\n end", "def validate_to_address\n to = validate_to\n unless to.nil? || to.bare == stream.user.jid.bare\n raise StanzaErrors::Forbidden.new(self, 'auth')\n end\n end", "def ip_valid?\n return if ip.blank?\n\n IPAddr.new(ip.strip, Socket::AF_INET)\n rescue IPAddr::InvalidAddressError, IPAddr::AddressFamilyError\n errors.add(:ip, :invalid)\n end", "def test_nil_connparms\n @conn.disconnect\n #\n assert_nothing_raised do\n @conn = Stomp::Connection.open(nil, nil, host, port, false, 5, nil)\n end\n checkEmsg(@conn)\n end", "def dhcp?\n config[\"dhcp\"]\n end", "def iphone_required(udid = params[:udid])\n udid = udid.to_s.strip\n \n raise \"IPhone UDID isn't given\" if udid.blank?\n raise \"IPhone UDID is invalid\" unless /^[0-9a-f]{40}$/i =~ udid\n \n @iphone = Iphone.find_or_create_by_udid(udid)\n\n raise \"IPhone is not allowed to send free messages\" unless @iphone.can_send_free_messages?\n end", "def check_params #:doc:\n if params[:username] !~ /.{1,}/ or params[:password] !~ /.{1,}/ or\n params[:devicename] !~ /.{1,}/ or params[:dev_type] !~ /.{1,}/ or\n (params[:port] != nil and params[:port] !~ /\\d{1,10}/)\n return false\n else\n return true\n end\n end", "def handles_usage_errors?\n !usage_error_handler.nil?\n end", "def connect\n require 'littlewire' unless defined?(::LittleWire)\n @littlewire = ::LittleWire.new(connect_to_usb)\n super\n return true\n end" ]
[ "0.6138281", "0.5802578", "0.56302357", "0.5494217", "0.54739004", "0.54739004", "0.5449092", "0.5441649", "0.5281649", "0.5281649", "0.5233416", "0.5214443", "0.5205222", "0.51662564", "0.5141159", "0.51387495", "0.51349705", "0.51312983", "0.5108274", "0.5100574", "0.5093327", "0.5085419", "0.50756484", "0.50739837", "0.5043163", "0.50426817", "0.5039068", "0.5037485", "0.50302994", "0.5030088", "0.5008026", "0.50079834", "0.50067043", "0.500081", "0.4998775", "0.49850073", "0.49808574", "0.49766564", "0.49617764", "0.4958673", "0.49549952", "0.49522635", "0.49472123", "0.49472123", "0.4934134", "0.49217325", "0.49082768", "0.49029464", "0.48995513", "0.4896931", "0.48950195", "0.489385", "0.48936588", "0.4891502", "0.48881724", "0.48842233", "0.48813453", "0.4874069", "0.48727372", "0.4868042", "0.4859493", "0.48578566", "0.4856674", "0.48555735", "0.4855372", "0.48527893", "0.4850172", "0.484332", "0.48432305", "0.4836919", "0.48356706", "0.4833417", "0.48238793", "0.4822241", "0.48216897", "0.48203394", "0.48135957", "0.48128843", "0.48118058", "0.4801335", "0.48002458", "0.4799753", "0.47862965", "0.47821885", "0.47819373", "0.47778302", "0.47723457", "0.4772284", "0.47649622", "0.47649622", "0.4764501", "0.47588268", "0.47569287", "0.47564206", "0.47442997", "0.47384042", "0.4734718", "0.4734137", "0.47331235", "0.47296605" ]
0.5636548
2
Invoked if API key is not defined, check if response status is 200 for successful request
def set_api_key(address) api_key_input = nil while api_key_input.nil? puts 'Enter your API key.' puts 'API key can be found at System > Admin, select your user and click Show.' get_response = Excon.get("#{address}/admin/scaffolds/switch_devices/index.json?api_key=#{$stdin.gets.chomp!}") if get_response.status === 200 api_key_input = $_ else puts 'Invalid key.' end end api_key_input end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_api_key\n return if api_key\n @logger.display('api_key_missing')\n exit 1\n end", "def check_api_key\n # Return 403 Forbidden if there is no api key in the request headers\n head :forbidden unless self.current_api_key\n end", "def verify_api_key\n # Confirm that it's a json request. This is irrelevant otherwise.\n if request.format && request.format.symbol && request.format.symbol == :json\n # We must have a key, either way. If no key, pass forbidden response.\n if params[:key].nil? && (request.env['HTTP_REFERER'] =~ Regexp.new(request.env['HTTP_HOST'])).nil?\n render :json => { :errors => \"Invalid API key.\" }, :status => :forbidden\n else\n if (request.env['HTTP_REFERER'] =~ Regexp.new(request.env['HTTP_HOST'])).nil?\n # Find by key\n @key = ApiKey.find_by_key(params[:key])\n if @key.nil?\n # Throw error if no key found.\n render :json => { :errors => \"Invalid API key.\" }, :status => :forbidden\n end\n end\n end\n end\n end", "def verify_key\n @user = get_api_user\n if @user.nil?\n render :nothing => true, :status => 503\n else\n render :layout => false\n end\n end", "def verify_key\n\t\t\tunless (Happyfunrun::app_id==params[:app_id] and Happyfunrun::api_key==params[:api_key])\n\t\t\t\trender :json=>{:status=>'300', :error=>'Access Denied'}\n\t\t\t\treturn\n\t\t\tend\n\t\tend", "def verify_api_key\n api_key = request.headers['HB-APIKey']\n if (api_key.nil?)\n api_key = params[:api_key]\n if api_key.nil?\n render :status => 403, :json => {:message => \"Api key missing\"}\n return\n end\n end\n \n api_key_object = ApiKey.find_by_key(api_key)\n if api_key_object.nil?\n render :status => 403, :json => {:message => 'Invalid API Key'}\n return\n end\n end", "def verify_access_with_api_key\n api_key = request.headers[\"HTTP_API_KEY\"] || params[:api_key]\n andrew_id = request.headers[\"HTTP_ANDREW_ID\"] || params[:andrew_id]\n if (api_key.nil? || andrew_id.nil?)\n render json: {error: \"Error, bad request\"}, status: 400\n elsif !(key_matches?(api_key, andrew_id))\n render json: {error: \"Error, unauthorized user or API key\"}, status: 401\n # Inactive users are not allowed to use their keys for any reason.\n elsif !@cur_user.active\n render json: {error: \"Error, the account associated with this andrew ID has been suspended\"}, status: 401\n end\n end", "def validate_api_key\n unless self.api_key #&& (api_key[\"-\"] || self.api_endpoint)\n raise KeyyoError, \"You must set an api_key prior to making a call\"\n end\n end", "def require_api_key\n api_key = params[:api_key]\n return if api_key == ENV['ADMIN_API_KEY'] && !ENV['ADMIN_API_KEY'].nil?\n response = {\n status: 'error',\n version: 1,\n error: 'Invalid API key'\n }\n respond_with response, status: :forbidden\n end", "def authenticate_api_key!\n return if current_auth_api_key.present?\n\n render nothing: true, status: :unauthorized\n end", "def check_api_key_auth\n User.current = User.find_by_api_key(params[:key])\n unless User.current\n render :text => \"Not Authorized\", :status => 403\n return\n end\n end", "def verify_api_key\n return :false if invalid_options?\n value = http_post http_instance, 'verify-key', :key => options[:api_key], :blog => options[:blog]\n self.verified_key = (value == \"valid\") ? true : :false\n end", "def have_valid_api_key?\n @have_valid_api_key\n end", "def api_authenticate\n unless params[:api_key] == \"oz\" || api_user\n render :json => {:error => \"API key not found\"}, :status => :unauthorized\n end\n end", "def verify_api_key\n http = Net::HTTP.new(@@detectors[@detector], 80, @proxy_host, @proxy_port)\n resp, data = http.post('/1.1/verify-key', \"key=#{@api_key}&blog=#{@blog}\", STANDARD_HEADERS)\n @verified_key = (data == \"valid\") ? true : :false\n end", "def api_key\n api_key = request.headers['X-ApiKey']\n return true\n end", "def api_key?\n !api_key.nil?\n end", "def handler_check(event)\n if not @enabled\n event.respond \"Missing API Key\"\n false\n else\n true\n end\n end", "def authenticate\n @apikey = request.headers[:apikey]\n if @apikey==nil || @apikey!= APIKEY\n json_response={\n error: 'autorization error'\n }\n respond_with json_response, location: nil\n end\n end", "def show_api_key\n if @current_user.api_key != nil\n # If the user already has an api key, return it\n render json: { api_key: @current_user.api_key }, status: :ok\n else\n head :not_found\n end\n end", "def check_api_key(key_value)\n\n if Key.find_by_key_value(key_value).nil?\n\n redirect_to unauthorized_key_path\n\n return false\n\n end\n\n return true\n\n end", "def api_key_check(api_key)\n if Java::HarbingerSdkData::AppAuthenticationToken.firstWith({\".authToken\" => api_key},@entity_manager)\n true\n else\n false\n end\n end", "def key_check\n logger.debug \"checking authorization key \"\n unauthorized \"No authorization key provided. Please pass the API token as GET parameter named 'key'. Example: ?key=abcdef\" if params[:key].nil?\n logger.debug \"token is \" + params[:key] unless params[:key].nil?\n end", "def api_key\r\n user_application = UserApplication.find_by_api_key(params[:api_key])\r\n unless user_application\r\n error_message = ErrorMessage.new('401','Applikationen saknar giltig api-nyckel',\r\n \"Serverfel. Kontakta utvecklare.\")\r\n render json: error_message, status: :unauthorized\r\n end\r\n end", "def restrict_access\n # check if the request has an API key as part of it...\n end", "def valid_api_key?(api_header_val, api_key_lookup_val, env)\n !api_header_val.nil? && api_header_val != '' &&\n !api_key_lookup_val.nil? && api_key_lookup_val != ''\n end", "def api_key?\n !!(api_key)\n end", "def api_auth\n api_response(403, \"Invalid Authorization header\") unless api_user\n end", "def valid_api_key?\n !api_key.nil? && api_key =~ API_KEY_REGEX\n end", "def allow_api_key\n @api_key_allowed = true\n end", "def authenticate_with_api_key(api_key)\n self.api_key == api_key\n end", "def api_available?\n begin\n response = self.api_status\n if response.is_a?(Hash) && response['ok']\n true\n else\n false\n end\n rescue => e\n false\n end\n end", "def api_key?\n key = CoreExtensions::String.new api_key.to_s\n key.present?\n end", "def check_authorization\n return head :unauthorized if request.env['HTTP_CAPKEY'].nil?\n\n head :forbidden unless request.env['HTTP_CAPKEY'] == Settings.API_KEY\n end", "def verify(api_key = nil)\n raise ConfigurationError, \"You must provide an API key to verify\" if api_key.nil? && self.api_key.nil?\n perform(:verify, { :providerkey => provider_key, :apikey => api_key || self.api_key }, :get, Success)\n end", "def developer_key_authentication\n authenticate_or_request_with_http_token do |token|\n Api.exists?(key: token)\n end\n end", "def unauthorized_api_key(env)\n body_text = 'The API key provided is not authorized.'\n [401, {'Content-Type' => 'text/plain; charset=utf-8',\n 'Content-Length' => body_text.size.to_s}, [body_text]]\n end", "def check_key_valid\n @key_warning = false unless defined?(@key_warning)\n if !configuration.valid_api_key? && !@key_warning\n configuration.warn(\"No valid API key has been set, notifications will not be sent\")\n @key_warning = true\n end\n end", "def check_api_key!\n self.mirror_urls\n rescue\n raise \"Please check your TVDB API Key!\"\n end", "def api_request?\n false\n end", "def ensure_configured\n raise ApiKeyError unless config.api_key\n raise AppKeyError unless config.app_key\n end", "def get_api_key\r\n\t\t#we search for the keys if existed\r\n\t\t#binding.pry\r\n\t\tif request.headers[\"XAPIKEY\"]\r\n\t if request.headers[\"ENVAPIKEY\"] && request.headers[\"ENVAPIKEY\"] == APP_CONFIG['API_Key'] \r\n\t \tapi_key =request.headers[\"XAPIKEY\"]\r\n\t \tenv_api_key = request.headers[\"ENVAPIKEY\"]\r\n\t \tif api_key.empty?\r\n\t\t \treset_session #kill session ite hacker attemted\r\n\t\t \ttoken_error\r\n\t \telse\r\n\t\t \t@user=User.where({authentication_token: api_key}).first\r\n\t\t\t if @user.nil?\r\n\t\t\t \treset_session #kill session ite hacker attemted\r\n\t\t\t \tlogin_token_error\r\n\t\t\t else\r\n\t\t\t \tsession[:user] = @user\r\n\t\t\t \tparams[:user] = @user\r\n\t\t\t \tparams[:env_api_key] = env_api_key\r\n\t\t\t \tparams[:api_key] = api_key #we on development so we do not need make sure that header has X-API-KEY in requests\r\n\t\t\t end\r\n\t \tend\r\n \telse \r\n\t\t \treset_session #kill session ite hacker attemted\r\n\t\t \ttoken_error\r\n \tend\r\n else\r\n \ttoken_error\r\n end\r\n\tend", "def authorize!\n api_key = ApiKey.find_by_access_token(params[:access_token])\n head :unauthorized unless api_key\n return false\n end", "def check_tenant_api_key\n\n # If key is not present or is not valid, return 401: Not Authorized\n if !check_key || !Tenant.exists?(api_key: params[:tenantkey])\n \n head(401)\n #redirect_to '/404.html'\n\n # If Tenant API Key exists\n elsif Tenant.exists?(api_key: params[:tenantkey])\n\n # Getting Tenant by API Key\n tenant = Tenant.where(['tenants.api_key = ?', params[:tenantkey]]).first\n\n ######################\n # Throttle Middleware\n ######################\n\n # Total requests made for current date and tenant (count_requests: defined in model)\n ttlrequests = tenant.count_requests\n \n # Get last tenant request\n tenantrequest = \n Tenantsrequest.where(['tenant_id = ?', tenant.id])\n .order('created_at desc').first\n\n # If request counter is greater than 100, throttle to 1 request per 10 seconds.\n if ttlrequests > 100\n \n # Time now UTC lower than last request saved + 10 seconds\n if Time.current.utc <= (tenantrequest.created_at + 10.seconds)\n head(503) # return Service Unavailable\n else\n # Create a new Tenantsrequest\n trequest = Tenantsrequest.new(:tenant_id => tenant.id)\n trequest.save\n end\n\n else\n # Create a new Tenantsrequest\n trequest = Tenantsrequest.new(:tenant_id => tenant.id)\n trequest.save\n end\n\n end\n end", "def authorize_api_key\n # If it's blank, send a 403\n if params[:api_key].blank?\n head :forbidden\n return\n end\n\n # Search for the key\n key = ApiKey.where(key: params[:api_key])\n\n # If there's no key, it's not authorized, 403 it\n if key.blank?\n head :forbidden\n return\n end\n\n # Update the key with the current time so we know it's being used\n key.first.update last_used: DateTime.now\n session[:current_key_id] = key.first.id\n end", "def authenticate_json_request\n #return true unless Rails.env.production?\n\n # TODO Turn this back after making it correctly check for API requests\n if false && APIKeysActive == true && Rails.env.production?\n # Is it safe to suppose that ALL JSON requests will be API requests?? -SR\n #we'll check the mime types once 1.0 is deprecated, and 2.0 servers both html and json - RJ\n\n #case request.format\n #when Mime::JSON\n #/^Token token=\"(.+?)\"$/ - This should be integrated in the near future because we want\n # to check for the Token token portion of the header value.\n regex = /^.*\\\"([\\w]+)\\\"$/.match(request.authorization)\n regex ||= Array.new #guarantees the array accessor works on the next line\n key = regex[1]\n render :json => OldApi.error(403, \"Invalid Api Key\"), :status => 403 and return unless ApiKey.exists?(key: key)\n #end\n end\n end", "def master_api_key?\n params[:master_api_key] == \"thisisatest\"\n end", "def authenticate_client_access!\n return if api_key_from_params.nil?\n\n return require_api_client if access_from_localhost?\n return require_api_client if access_from_preview_hosting? && restricted_api_access_mode?\n return require_api_client if access_from_production_hosting?\n return require_api_client if access_from_public_hosting?\n\n true # Do not halt otherwise\n end", "def require_api_token\n end", "def get_evn_api_key\r\n\t\t#binding.pry\r\n\t\tsession[:user] = nil\r\n if request.headers[\"ENVAPIKEY\"] && request.headers[\"ENVAPIKEY\"] == APP_CONFIG['API_Key'] \r\n \tparams[:env_api_key] = request.headers[\"ENVAPIKEY\"]\r\n \telse \r\n\t \ttoken_error\r\n \tend\r\n\tend", "def validate_code\n \n # Don't validate in dev mode\n return true if Rails.env==\"development\"\n \n begin\n \n id = params[:key].to_s.split(\"-\")[0]\n code = params[:key].to_s.split(\"-\")[1]\n \n @version = params[:version].to_i\n \n @api_key = ApiKey.find( id )\n if ( @api_key && @api_key.enabled? && @api_key.code==code )\n return true\n else\n reject and return false\n end\n \n rescue\n reject and return false\n end\n \n end", "def exists?\n username && api_key\n end", "def validate_input(values)\n return true unless values['api_key'].strip.empty?\n\n CommonDialogs.showWarning(\"Please provide a #{NAME} API Key.\")\n false\n end", "def test_if_valid_api_key_is_accepted\n real_key_value = \"abcdefgh-ijkl-mnop-qrst-uvwxyz012345\"\n\n begin\n Yo.api_key = real_key_value\n api_key = Yo.api_key\n rescue YoException => e\n api_key = \"\"\n end\n\n assert_equal api_key, real_key_value, \"Yo.api_key does not accept a valid Yo API key\"\n end", "def authenticate_manual \n api_key = request.headers['X-Api-Key']\n @app = App.where(api_key: api_key).first if api_key\n\n unless @app\n head status: :unauthorized\n return false\n end\n end", "def validate_api_filter\n if params[:token].nil?\n token = params[params.keys.reject { |k| [:action, :controller].include?(k) }.first][:token]\n unless token.nil?\n params[:token] = token\n params[params.keys.reject { |k| [:action, :controller].include?(k) }.first].delete(:token)\n end\n end\n @user = User.api_key_authentication(params[:token]) if params[:token]\n unless @user\n render :xml => \"<xml><error><response>Invalid API Key</response></error></xml>\", :status => :forbidden\n false\n end\n end", "def api_key?\n ENV.has_key?(\"#{sub_klass.upcase}_URL_KEY\")\n end", "def api_key\n '' \nend", "def store_api_key\n if !params[:key][:api_key] || params[:key][:api_key] == \"\"\n render :json => {:status => 'failure', :error => \"Key cannot be empty\"}\n else\n session[:api_key] = params[:key][:api_key]\n session[:user] = nil\n render :json => {:status => 'success', :value => {\"api_key\" => session[:api_key]}}\n end\n end", "def custom_authenticate_member(current_member)\n if current_member == nil && params[:master_api_key] != \"thisisatest\" \n render :json => { :errors => [\"Authorized users only.\"] }, status: 401\t\n end\n end", "def authenticate_knox_api_key!\n authenticate_for ApiKey\n end", "def initialize(api_key)\n @api_key = api_key\n check_api_key!\n end", "def is_api?\n current_user\n @env[API_KEY]\n end", "def raise_if_non_authed\n raise 'No access key to make request' unless [email protected]?\n end", "def api_has_param?(key)\n @api[:optional].has_key?(key)\n end", "def cannot_access_api?\n !request.env[\"REQUEST_METHOD\"].eql?(\"GET\") &&\n !request.headers['mw-token'].eql?(ENV[\"api_access_token\"])\n end", "def call_ok?(response = {})\n response['showapi_res_code'] == 1 && response['showapi_res_error'] == 'ok'\n rescue StandardError\n false\n end", "def restrict_access\n unless App.exists?(key: params[:access_token])\n render json: { error: \"API-key invalid. Access denied.\" }, status: :unauthorized\n end\n end", "def api_key; end", "def api_key; end", "def api_error; end", "def check_enabled\n User.current = nil\n parse_request\n unless @api_key.present? and @api_key == Setting.mail_handler_api_key\n render :text => 'Access denied. Redmine API is disabled or key is invalid.', :status => 403\n false\n end\n end", "def get_api_key\n @api = Hash.new\n if authenticate!\n if current_user.api_key.nil?\n current_user.generate_new_api_key!\n end\n @api = {:api_key => current_user.api_key}\n else\n @api = {:error => \"The combination of login and password is invalid.\"}\n end\n respond_to do |format|\n format.json do\n render :json => @api.as_json, :callback => params[:jsoncallback]\n end\n end\n end", "def perform_get_request\n # Validate preventing request error\n\n # setup params, like API Key if needed\n\n # Perform the request\n get_request\n end", "def key_based?\n @username && [email protected]? && @api_key && !@api_key.empty?\n end", "def a_pi_key_enable_with_http_info(api_key_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: APIKeyApi.a_pi_key_enable ...\"\n end\n # verify the required parameter 'api_key_id' is set\n fail ArgumentError, \"Missing the required parameter 'api_key_id' when calling APIKeyApi.a_pi_key_enable\" if api_key_id.nil?\n # resource path\n local_var_path = \"/apiKey/enable\".sub('{format}','json')\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n\n # HTTP header 'Accept' (if needed)\n local_header_accept = ['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript']\n local_header_accept_result = @api_client.select_header_accept(local_header_accept) and header_params['Accept'] = local_header_accept_result\n\n # HTTP header 'Content-Type'\n local_header_content_type = ['application/json', 'application/x-www-form-urlencoded']\n header_params['Content-Type'] = @api_client.select_header_content_type(local_header_content_type)\n\n # form parameters\n form_params = {}\n form_params[\"apiKeyID\"] = api_key_id\n\n # http body (model)\n post_body = nil\n auth_names = []\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'APIKey')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: APIKeyApi#a_pi_key_enable\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def find_user_by_api_key\n unless Rails.env.test?\n @user = User.find_by_api_key(params[:key])\n unless @user || user_signed_in?\n redirect_to root_path, notice: \"Please append your api key to your request\"\n end\n end\n end", "def authenticate_api!\n Rails.logger.info(\"Enter Authenticate Api\")\n \n # just to test we are using HTTP_HOST in test mode as HTTP_ORIGIN cant be set\n Rails.env == \"test\" ? origin = request.env['HTTP_HOST'] : origin = request.env['HTTP_ORIGIN']\n\n if !params[\"token\"].blank? and origin.blank? # API Access\n\n account_id = AccountsCache.access_token(params[\"token\"])\n\n raise et(\"application.unauthorized\") if account_id.blank?\n \n # set account_id in params\n if params[:controller] == \"accounts\" and current_account\n params[:id] = current_account._id.to_s if params[:id].blank?\n else\n params[:account_id] = current_account._id.to_s if params[:account_id].blank?\n end\n\n # set the request type\n params[:request_type] = AppConstants.request_type_api\n\n # mark already authenticated\n set_authenticated\n\n # make api request synchronous as of now\n make_sync_request\n end\n rescue => e \n Rails.logger.error(\"**** ERROR **** #{er(e)}\")\n head :unauthorized\n end", "def restrict_access\n api_key = ApiKey.find_by_access_token(request.headers[\"token\"])\n head :unauthorized unless api_key \n end", "def authorize_application\n render json: { ok: false, message: 'unauthorized' }, status: 401 unless request.headers['App-Key'] == 'R141p7THbd5vRdH20xb5SaOsN6bJ5J5S'\n end", "def authenticate_manual \n api_key = request.headers['X-Api-Key']\n @member = Member.where(api_key: api_key).first if api_key\n\n unless @member\n head status: :unauthorized\n return false\n end\n end", "def check_response\n if @res.blank? || @res.error\n flash[:error] = t \"errors.api\"\n end\n end", "def verified?\n (@verified_key ||= verify_api_key) != :false\n end", "def restrict_access\n authenticate_or_request_with_http_token do |token, options|\n ApiKey.exists?(access_token: token)\n end\nend", "def valid_key(token)\n @api_client && ActiveSupport::SecurityUtils.secure_compare(@api_client&.key, Digest::SHA512.hexdigest(token))\n end", "def verify_akismet_key()\n http = Net::HTTP.new('rest.akismet.com', 80, @proxyHost, @proxyPort)\n path = '/1.1/verify-key'\n\n data=\"key=#{@akismetKey}&blog=#{@akismetBlog}\"\n\n resp, data = http.post(path, data, STANDARD_HEADERS)\n @verifiedKey = (data == \"valid\")\n end", "def test_invalid_api_key\n Prowl.any_instance.expects(:perform).returns(666)\n assert_equal 666, Prowl.add(:apikey => \"my api key\", :application => \"Fishes\", :event => \"Silly\", :description => \"Blah\")\n end", "def api_keys?\n !!(Filepreviews.api_key && Filepreviews.secret_key)\n end", "def fetch_api?\n @id && !@data\n end", "def authenticate\n token = params['api_key']\n return if token.nil?\n\n @visitor = Visitor.find_by_api_key(token)\n return if @visitor.present?\n\n response_json = { status: Visitor::INVALID_API_KEY }\n respond_with :api, :v1, response_json, status: :unauthorized\n end", "def check_access_token\r\n\t\t\t\ttoken = request.headers[\"X-Access-Token\"] || nil\r\n\t\t\t\t\r\n\t\t\t\tif token\r\n\t\t\t\t\tfind_token = ApiKey.find_by_access_token(token)\r\n\r\n\t\t\t\t\tif find_token.nil?\r\n\t\t\t\t\t\tinvalid_access_token\r\n\t\t\t\t\tend\r\n\t\t\t\telse\r\n\t\t\t\t\tinvalid_access_token\r\n\t\t\t\tend\r\n\t\t\tend", "def any_key_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: UsageApi.any_key ...'\n end\n # resource path\n local_var_path = '/any'\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'Object'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['api_key', 'api_key_query']\n\n new_options = opts.merge(\n :operation => :\"UsageApi.any_key\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: UsageApi#any_key\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def initialize(api_key)\n @api_key = api_key\n @last_error = \"\"\n @last_error_code = \"\" \n end", "def valid?\n core_client.api_valid?\n end", "def check_response\n errors.add(:base, :invalid) unless response.present? && response[\"res\"] == 1\n end", "def facebook_api_key\n raise APIKeyNeededException\n end", "def facebook_api_key\n raise APIKeyNeededException\n end", "def initialize(api_key)\n raise ArgumentError, 'api_key is required' if api_key == nil || api_key.empty?\n @api_key = api_key\n\t\tend", "def api_key=(_arg0); end", "def api_key=(_arg0); end", "def api_key=(_arg0); end" ]
[ "0.8188738", "0.7993975", "0.7755387", "0.7746557", "0.77318454", "0.76907986", "0.75247633", "0.74387705", "0.742795", "0.7313595", "0.730159", "0.7284073", "0.7218826", "0.72139716", "0.71080416", "0.7087966", "0.7086078", "0.7060449", "0.70391995", "0.70286494", "0.69969946", "0.69726515", "0.69409305", "0.692355", "0.69048166", "0.6891998", "0.68887234", "0.6882557", "0.67926145", "0.6790239", "0.67316544", "0.6691804", "0.6664476", "0.66419894", "0.6640102", "0.662783", "0.6620256", "0.65686077", "0.6547047", "0.6546423", "0.6530792", "0.65036196", "0.6478564", "0.6396345", "0.6394443", "0.63905674", "0.6383104", "0.63825554", "0.6376058", "0.636632", "0.6356729", "0.63503313", "0.63404053", "0.6336944", "0.6333402", "0.63117874", "0.6310466", "0.6307798", "0.6302357", "0.62743616", "0.6274073", "0.62683797", "0.62468016", "0.6240384", "0.62246853", "0.62185127", "0.6179757", "0.6164951", "0.6148092", "0.6148092", "0.61397815", "0.6135973", "0.6130128", "0.61080885", "0.6105316", "0.60839236", "0.60792494", "0.60707706", "0.606784", "0.6060703", "0.6050401", "0.6021149", "0.6006979", "0.5998695", "0.5997991", "0.5987341", "0.5984767", "0.59777176", "0.5975707", "0.5970951", "0.5963052", "0.59567213", "0.59489805", "0.5947266", "0.5933188", "0.5932952", "0.5932952", "0.59250027", "0.5909015", "0.5909015", "0.5909015" ]
0.0
-1
Connects to API and delivers payload via post Uses threads for parallel processing of post requests
def create_entry(payload_array, scaffold) api_url = "#{@device_address}/admin/scaffolds/#{scaffold}/create.json?api_key=#{@device_api_key}" api_connection = Excon.new(api_url, persistent: true) payload_array.map do |payload| Thread.new do api_connection.post( body: JSON[record: payload], headers: { 'Content-Type' => 'application/json' }, persistent: true ) end end.each(&:join) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def post path_and_params, post_body\n start if not @pid\n @lock.synchronize do\n @last_use = Time.new.to_f\n\n # Make request to xtractr\n Net::HTTP.start('localhost', @port) do |http|\n http.request_post \"/#{path_and_params}\", post_body do |response|\n headers = {}\n response.each_header {|name,val| headers[name] = val}\n return response.code.to_i, headers, response.body\n end\n end\n end\n end", "def post\n RestClient.post(url, @body, @header) do |rso, req, res|\n setup(rso, req, res)\n end\n end", "def post(payload = {})\n request! do\n api[url.path].post(to_payload(payload), API_HEADERS)\n end\n end", "def post url, payload\n RestClient::Request.execute(:method => :post, :url => url, :payload => payload, :headers => lbaas_headers, :timeout => @timeout, :open_timeout => @open_timeout)\n end", "def execute_bulk_requests\n begin\n @responses = Hash.new\n @headers = Hash.new\n @all_urls = Hash.new\n data = ActiveSupport::JSON.decode(params[:data])\n @detailed_results = data[\"detailed_results\"]\n num_threads = data[\"threads\"].to_i\n data[\"lines_to_send\"].threadify(num_threads) { |line|\n path = data[\"path\"].gsub(/XXXCHANGEMEXXX/, line)\n headers = data[\"headers\"].gsub(/XXXCHANGEMEXXX/, line)\n body = data[\"body\"].gsub(/XXXCHANGEMEXXX/, line)\n data.each do |key, value|\n if key.start_with?(\"replace_\") && !key.end_with?(\"_by\")\n path.gsub!(\"XXX#{value}XXX\",data[key + \"_by\"]) if data[key + \"_by\"] != \"\"\n headers.gsub!(\"XXX#{value}XXX\",data[key + \"_by\"]) if data[key + \"_by\"] != \"\"\n body.gsub!(\"XXX#{value}XXX\",data[key + \"_by\"]) if data[key + \"_by\"] != \"\"\n end\n end\n if @cloud\n if @cloud.api == \"Atmos\"\n @responses[line], @headers[line], @all_urls[line] = atmos_request(data[\"http_method\"], path, headers, body)\n elsif @cloud.api == \"Amazon\"\n @responses[line], @headers[line], @all_urls[line] = amazon_request(data[\"http_method\"], path, headers, body)\n elsif @cloud.api == \"Swift\"\n @responses[line], @headers[line], @all_urls[line] = swift_request(data[\"http_method\"], path, headers, body)\n end\n else\n url = path\n uri = URI.parse(url)\n headers_to_send = Hash.new\n headers.split(\"\\n\").each do |row|\n hash = eval(row)\n headers_to_send[hash.keys.first.downcase] = hash.values.first.to_s\n end\n @responses[line] = http_request(url, uri.port, data[\"http_method\"], headers_to_send, body)\n @headers[line] = headers_to_send\n end\n }\n rescue Exception => e\n @exception = e\n end\n respond_to do |format|\n format.js { render 'shared/execute_bulk_requests' }\n end\n end", "def post_rest_api(endpoint, data, http)\n rest_api_endpoint = \"/classifier-api/v1/#{endpoint}\"\n\n # Create an HTTP POST request against the specified REST API endpoint\n request = Net::HTTP::Post.new(rest_api_endpoint)\n # Set the Content-Type and data of the HTTP POST request\n request.content_type = \"application/json\"\n request.body = data\n # Submit the request\n response = http.request(request)\n # Return the response bosy (JSON containing the result of the POST operation)\n response.body\nend", "def post\n messages.each do |message|\n Flowdock::Client.new(flow_token: @token).post_to_thread(message)\n end\n end", "def post *args, &block\n handle @conn.post *args, &block\n end", "def post\n @response_body = make_request(\"#{api_url}#{endpoint}\", request_body.to_json)\n puts \"GLIMR POST: #{endpoint} - #{request_body.to_json}\" if ENV.key?('GLIMR_API_DEBUG')\n end", "def send\n post_params = {}\n self.parameters.each { |key, value|\n if value.is_a? Array\n i = 0\n value.each { |value_value|\n post_params[key.to_s + '[' + i.to_s + ']'] = value_value.to_s\n i += 1\n }\n elsif value.is_a? Hash\n value.each { |value_key, value_value|\n post_params[key.to_s + '[' + value_key.to_s + ']'] = value_value.to_s\n }\n else\n post_params[key.to_s] = value.to_s\n end\n }\n\n url = URI.parse(@@API_URL)\n http_request = Net::HTTP::Post.new(url.path)\n http_request.form_data = post_params\n http_request.basic_auth url.user, url.password if url.user\n\n response = Spree::PAYONE::Proxy::Response.new\n connection = Net::HTTP.new(url.host, url.port)\n load_ca_file connection\n connection.use_ssl = true\n connection.start { |http|\n http_response = http.request(http_request)\n response.response_body= http_response.body\n }\n\n response\n end", "def post endpoint, data\n do_request :post, endpoint, data\n end", "def http_call(payload); end", "def upload_submission(sub_info)\n uri = URI.parse(TARGET_API)\n http = Net::HTTP.new(uri.host, uri.port)\n\n req = Net::HTTP::Post.new(\"/ontologies/#{sub_info['ontology']['acronym']}/submissions\")\n req['Content-Type'] = 'application/json'\n req['Authorization'] = \"apikey token=#{TARGET_APIKEY}\"\n\n # Check if the source BioPortal is pulling the ontology from an URL\n # If yes then we will pull the ontology from this place (allow auto update of the ontology when the ontology is changed in its source URL)\n if sub_info['pullLocation'].nil?\n pull_location = \"#{sub_info['ontology']['links']['download']}?apikey=#{SOURCE_APIKEY}\"\n else\n pull_location = sub_info['pullLocation']\n end\n\n # Extract contacts\n contacts = []\n sub_info['contact'].each do |contact|\n contacts.push({'name': contact['name'],'email': contact['email']})\n end\n\n # Build the json body\n # hasOntologyLanguage options: OWL, UMLS, SKOS, OBO\n # status: alpha, beta, production, retired\n req.body = {\n 'contact': contacts,\n 'hasOntologyLanguage': sub_info['hasOntologyLanguage'],\n 'released': sub_info['released'],\n 'ontology': \"#{TARGET_API}/ontologies/#{sub_info['ontology']['acronym']}\",\n 'description': sub_info['description'],\n 'status': sub_info['status'],\n 'version': sub_info['version'],\n 'homepage': sub_info['homepage'],\n 'documentation': sub_info['documentation'],\n 'publication': sub_info['publication'],\n 'naturalLanguage': sub_info['naturalLanguage'],\n 'pullLocation': pull_location\n }.to_json\n\n #puts req.body.to_s\n response = http.start do |http|\n http.request(req)\n end\n\n return response\nend", "def post(*args, &block); end", "def post_init\n @builder, @parser = Hatetepe::Builder.new, Hatetepe::Parser.new\n @builder.on_write << method(:send_data)\n # @builder.on_write {|data| p \"|--> #{data}\" }\n @parser.on_response << method(:receive_response)\n\n @queue = []\n\n @app = proc {|request| send_request(request) }\n\n self.comm_inactivity_timeout = config[:timeout]\n self.pending_connect_timeout = config[:connect_timeout]\n\n start_tls if config[:ssl]\n end", "def send_post(endpoint, payload = {})\n payload['token'] = @token unless payload['token']\n http = Net::HTTP.new(@root_uri.host, @root_uri.port)\n http.use_ssl = true\n http.verify_mode = OpenSSL::SSL::VERIFY_NONE if @test_environment\n request = Net::HTTP::Post.new(\"#{@root_uri}#{endpoint}\")\n request.set_form_data(payload)\n process_response(http.request(request))\n end", "def run\n @cluster.retriable(api_name) do\n process(retriable_requests)\n\n # Stop retrying when there are no more requests to retry\n retriable_requests.empty?\n end\n\n responses\n end", "def fetch_apis_asynchronously \n threads = []\n threads << Thread.new { @resp1 = RestClient.get 'https://reqres.in/api/unknown' }\n threads << Thread.new { @resp2 = RestClient.get 'https://reqres.in/api/products' }\n threads.each { |thr| thr.join } \n end", "def api_call(payload); end", "def run_request(method, url, body, headers); end", "def submit\n http = Net::HTTP.new(URL.host, URL.port)\n http.use_ssl = true\n http.start { |send| send.request(self) }.body\n end", "def do_post(request, out_stream)\n raise Error.new(\"HTTPS over a proxy is not supported.\") if !@use_http and @proxy_host\n\n reset_response_data()\n\n request.basic_auth(@user_name, @api_key)\n request.add_field('User-Agent', @user_agent)\n\n while true\n begin\n return exec_request(request, out_stream)\n rescue Error => err\n if (err.getCode() == '502' or err.getCode() == '503') and @retry_count > @retry\n @retry += 1\n sleep(@retry * 0.1)\n else\n raise\n end\n end\n end\n end", "def log(data)\n t = Thread.new do\n uri = URI(\"http://logs-01.loggly.com/inputs/.../tag/ost/\")\n req = Net::HTTP::Post.new(uri)\n req['content-type'] = \"content-type:application/x-www-form-urlencoded\"\n req.body = data.to_json\n res = Net::HTTP.start(uri.hostname, uri.port) {|http|\n http.request(req)\n }\n end\nend", "def post_connect(uri, response, body_io); end", "def test_thread_answer\n threads = []\n (1..10).each{\n threads << Thread.new{\n post \"/answer\"\n assert last_response.status == 201\n assert last_response.body.include?('action='), 'not found: action'\n assert last_response.body.include?('hi pre answer'), 'not found: hi pre answer'\n \n action = last_response.body.scan(/action=\\\"([^\\\"]+)\\\"/).first.first\n id = action.scan(/[^\\/]+$/).first\n send_digits = SecureRandom.random_number(3000)\n post \"/digits/%s\" % id, {:Digits => send_digits}\n assert last_response.status == 201, 'bad response'\n assert last_response.body.include?('<Speak >%d</Speak>' % send_digits), 'Not get the digit send %s' % send_digits\n }\n }\n threads.each{|t| t.join}\n threads.clear()\n end", "def call_api method, arguments = {}, &block\n # Ensures only a valid DataContext is used\n unless @context.is_a? DataContext\n block.call nil, 'A context object was not provided.'\n return\n end\n \n unless @context.valid?\n block.call nil, 'A valid context object was not provided.'\n return\n end\n \n # Ensures only valid arguments are used\n query = []\n arguments.each_pair do |key, value|\n if API_VALID_ARGUMENTS.member? key.to_s\n query << \"$#{key}=#{URI.escape value.to_s}\"\n else\n query << \"#{key}=#{URI.escape value.to_s}\"\n end\n end\n \n # Generates timestamp and url\n timestamp = DOL.timestamp\n url = URI.parse [\"#{@context.host}/#{@context.url}/#{method}\", query.join('&')].join '?'\n \n # Creates a new thread, creates an authenticaed request, and requests data from the host\n @mutex.synchronize do\n @active_requests << Thread.new do\n request = Net::HTTP::Get.new [url.path, url.query].join '?'\n request.add_field 'Authorization', \"Timestamp=#{timestamp}&ApiKey=#{@context.key}&Signature=#{signature timestamp, url}\"\n request.add_field 'Accept', 'application/json'\n result = Net::HTTP.start url.host, url.port do |http|\n http.request request\n end\n \n if result.is_a? Net::HTTPSuccess\n \n #Cleanup jsonresult.\n result = result.body.gsub(/\\\\+\"/, '\"')\n result = result.gsub /\\\\+n/, \"\"\n result = result.gsub /\\\"\\\"\\{/, \"{\"\n result = result.gsub /}\\\"\\\"/, \"}\"\n\n result = JSON.parse(result)['d']\n \n if (result.include?'results')\n result = result['results'] if result.is_a? Hash\n end\n\n block.call result, nil\n \n else\n block.call nil, \"Error: #{result.message}\"\n end\n \n @mutex.synchronize do\n @active_requests.delete Thread.current\n end\n end\n end\n end", "def http_post_request\n begin\n return http_client.post(http_path_query, compressed_request, http_headers)\n rescue APIKeyError\n log 'error - you must set your api_key.'\n rescue TimeoutError\n log 'fail - timeout while contacting the api server.'\n rescue Exception => e\n log \"fail - exception raised during http post. (#{e.class.name}: #{e.message})\"\n end\n nil\n end", "def post()\n return @http.request(@req)\n end", "def post_data; end", "def http_post(payload)\n LOGGER.info(\"Sending POST request to #{@collector_uri}...\")\n LOGGER.debug(\"Payload: #{payload}\")\n destination = URI(@collector_uri)\n http = Net::HTTP.new(destination.host, destination.port)\n request = Net::HTTP::Post.new(destination.request_uri)\n if destination.scheme == 'https'\n http.use_ssl = true\n end\n request.body = payload.to_json\n request.set_content_type('application/json; charset=utf-8')\n response = http.request(request)\n LOGGER.add(is_good_status_code(response.code) ? Logger::INFO : Logger::WARN) {\n \"POST request to #{@collector_uri} finished with status code #{response.code}\"\n }\n\n response\n end", "def perform_request(method, path, params, body)\n CONNECTION.run_request \\\n method.downcase.to_sym,\n path,\n ( body ? MultiJson.dump(body): nil ),\n {'Content-Type' => 'application/json'}\n end", "def post url, parameters = {}, headers = {}\n @request_meter.mark\n\n try_n_times do\n @current_url = url\n escaped_url = URI.escape url\n\n GapCrawler.logger.debug \"POST: #{escaped_url}\\nParameters: #{parameters}\\nCustom Headers: #{headers}\"\n\n fetcher { @agent.post url, parameters, headers }\n end\n end", "def httppost(url, corpNum, postData, action = '', userID = '', contentsType = '')\n\n headers = {\n \"x-lh-version\" => KAKAOCERT_APIVersion,\n \"Accept-Encoding\" => \"gzip,deflate\",\n }\n\n apiServerTime = @linkhub.getTime(@useStaticIP, @useGAIP)\n\n hmacTarget = \"POST\\n\"\n hmacTarget += Base64.strict_encode64(Digest::SHA256.digest(postData)) + \"\\n\"\n hmacTarget += apiServerTime + \"\\n\"\n\n hmacTarget += KAKAOCERT_APIVersion + \"\\n\"\n\n key = Base64.decode64(@linkhub._secretKey)\n\n data = hmacTarget\n digest = OpenSSL::Digest.new(\"sha256\")\n hmac = Base64.strict_encode64(OpenSSL::HMAC.digest(digest, key, data))\n\n headers[\"x-kc-auth\"] = @linkhub._linkID+' '+hmac\n headers[\"x-lh-date\"] = apiServerTime\n\n if contentsType == ''\n headers[\"Content-Type\"] = \"application/json; charset=utf8\"\n else\n headers[\"Content-Type\"] = contentsType\n end\n\n headers[\"Authorization\"] = \"Bearer \" + getSession_Token(corpNum)\n\n\n uri = URI(getServiceURL() + url)\n\n https = Net::HTTP.new(uri.host, 443)\n https.use_ssl = true\n Net::HTTP::Post.new(uri)\n\n res = https.post(uri.request_uri, postData, headers)\n\n if res.code == \"200\"\n if res.header['Content-Encoding'].eql?('gzip')\n JSON.parse(gzip_parse(res.body))\n else\n JSON.parse(res.body)\n end\n else\n raise KakaocertException.new(JSON.parse(res.body)[\"code\"],\n JSON.parse(res.body)[\"message\"])\n end\n end", "def submit\n\t\tset_post_data\n get_response @url\n parse_response\n\tend", "def perform(resource_id, owner_code, repo_id)\n \n @owner_code = owner_code\n @repo_id = repo_id\n \n resourcetree = URIResolver.resolve_references(Resource.to_jsonmodel(resource_id.to_i), ['tree'])\n \n thread_array = []\n \n archival_object_searched = 0\n digital_objects_created = 0\n \n work_q = Queue.new \n\n prepare_tree_nodes(resourcetree['tree'][\"_resolved\"]) do |child|\n Log.info(child.inspect)\n if child['node_type'] == 'archival_object'\n Log.info('ARCHIVAL OBJECT FOUND: '+child['title'])\n archival_id = child['id']\n #If a digital object does not already exist for the archival object, create one\n if !child['instance_types'].include?('digital_object')\n Log.info(\"Adding \" + archival_id.to_s + \" to queue\")\n work_q << archival_id\n archival_object_searched = archival_object_searched + 1\n end\n \n end\n end\n \n responses = []\n osn_errors = []\n no_osn = []\n mutex = Mutex.new\n @countMutex = Mutex.new\n \n @count = 1\n workers = (0...4).map do\n thread = Thread.new do\n begin\n while archival_id = work_q.pop(true)\n Log.info(\"Poppped \" + archival_id.to_s + \" from queue\")\n RequestContext.open(:repo_id => repo_id) do \n archival_object = ArchivalObject.to_jsonmodel(archival_id.to_i)\n \n Log.info('ARCHIVAL OBJECT RETRIEVED: '+archival_object.to_json)\n #create digital object\n \n if (@count % 15 == 0)\n Log.info(\"SLEEPING 30 SECS TO ALLOW TIME_WAITS TO CLEAR>>>>>\")\n sleep(30)\n end\n t = create_digital_object(archival_object)\n mutex.synchronize do\n if (t.has_key?('error'))\n Log.error(t['error'])\n osn_errors << t['osn']\n elsif (t.has_key?('urn_created'))\n Log.info('URN created for' + t['osn'])\n digital_objects_created = digital_objects_created + 1\n elsif (t.has_key?('no_urn'))\n Log.info('No URN found for' + t['osn'])\n no_osn << t['osn']\n end\n end\n end\n end\n rescue ThreadError\n end\n end\n thread_array << thread\n thread.join\n \n end\n \n response = {:resource_id => resource_id, :osn_errors=>osn_errors, :no_osns=>no_osn, :archival_objects_searched => archival_object_searched, :digital_objects_created => digital_objects_created}\n end", "def process_request\n api_response = self.class.post(api_url, :body => build_xml)\n puts api_response if @debug\n response = parse_response(api_response)\n if success?(response)\n success_response(api_response, response)\n else\n failure_response(api_response, response)\n end\n end", "def submit\n self.http_get(self.poll_url)#.tap{|t| STDERR.puts \"Trace: #{caller[1]}: returning #{t}\"}\n end", "def post(*args)\n request, adapter = request_and_adapter_from(args)\n \n with request, adapter do |adapter|\n yield adapter.client if block_given?\n adapter.post request\n end\n end", "def _make_api_call(json_params=nil)\n\n puts \"Crossbar::HTTP - Request: POST #{url}\" if self.verbose\n\n encoded_params = nil\n if json_params != nil\n if self.pre_serialize != nil and self.pre_serialize.is_a? Proc\n json_params = self._parse_params json_params\n end\n encoded_params = JSON.generate(json_params)\n end\n\n puts \"Crossbar::HTTP - Params: #{encoded_params}\" if encoded_params != nil and self.verbose\n\n uri = URI(self.url)\n\n if self.key != nil and self.secret != nil and encoded_params != nil\n signature, nonce, timestamp = self._compute_signature(encoded_params)\n params = {\n timestamp: timestamp,\n seq: self.sequence.to_s,\n nonce: nonce,\n signature: signature,\n key: self.key\n }\n uri.query = URI.encode_www_form(params)\n\n puts \"Crossbar::HTTP - Signature Params: #{params}\" if self.verbose\n end\n\n # TODO: Not sure what this is supposed to be but this works\n self.sequence += 1\n\n self._api_call uri, encoded_params\n end", "def send_request method, params = {}\n uri = URI.parse(@api_url)\n if Time.now.to_i > @exp + 3600\n get_acces_token\n @exp = Time.now.to_i\n end\n required = { 'access_token' => @access_token, 'method' => method }\n params = required.merge(params)\n params = URI.escape(params.collect{ |k,v| \"#{k}=#{v}\"}.join('&'))\n http = Net::HTTP.new(uri.host, uri.port)\n req = Net::HTTP::Post.new(uri.request_uri)\n req['User-Agent'] = \"zhzhussupovkz pleer.com-ruby-api\"\n req.body = params\n res = http.request(req)\n if res.code == \"200\"\n data = res.body\n result = JSON.parse(data)\n else\n puts \"Invalid getting data from server\"\n exit\n end\n end", "def send_data\n request = Collector::Request.\n new(self.api_location,\n :user => config[:htpasswd_user],\n :pass => config[:htpasswd_pass])\n\n # convert the array of object to a hash\n server = {\n websites: @websites.map{ |w| w.to_hash(@version).merge({server: config[:client_name].underscore }) }.map{ |w| w[:website] },\n name: config[:client_name].underscore\n }\n\n request.send(server)\n\n end", "def submit(verb, data)\n start do |connection|\n request = http verb\n request.body = data\n connection.request request\n end\n end", "def post; end", "def api_post(endpoint, key, data)\n tries ||= 3\n response = do_post(endpoint, key, data)\n raise unless response.code == '200'\n return if response.body.empty?\n parsed = Oj.load(response.body)\n raise unless parsed['success']\n parsed['data']\n rescue StandardError => e\n tries -= 1\n sleep 2 && retry unless tries.zero?\n log_error(e, update_service: @update_service,\n sentry_extra: { query: data,\n response_body: response&.body,\n language: @wiki.language,\n project: @wiki.project })\n end", "def perform(&block)\n raise Exception.new(\"You must have at least 2 requests\") unless @requests.length > 1\n @responses.clear\n requests.each_slice(Limit).to_a.each do |batch|\n body = {\n :batch => Yajl::Encoder.encode(batch),\n :access_token => Baloo.client_credentials\n }\n Client.post(\"/\", :body => body).each do |response|\n # response['headers'] = Yajl::Parser.parse(response['headers'])\n response['body'] = Yajl::Parser.parse(response['body'])\n yield response\n end\n end\n end", "def exec_post(req, data, exit_on_fail = false)\n response_hash = exec_api_call('POST', req, data, exit_on_fail)\n response_hash[:response]\n end", "def post!\n self.https.request self.http_request # Net::HTTPResponse object\n end", "def post_request(options, path, post_data)\n\n result = {}\n\n http = Net::HTTP.new(ENV['NESSUS_HOST'], options[:port])\n http.use_ssl = @use_ssl\n http.verify_mode = OpenSSL::SSL::VERIFY_NONE\n\n http.start do |http|\n req = Net::HTTP::Post.new(path)\n\n req['X-ApiKeys'] = \"accessKey=#{ENV['NESSUS_ACCESS_KEY']}; secretKey=#{ENV['NESSUS_SECRET_KEY']}\"\n req.body = post_data\n \n resp, data = http.request(req)\n \n if resp.code.eql? '200'\n #print \"Data: \" + JSON.pretty_generate(JSON.parse(resp.body.to_s))\n result = JSON.parse(resp.body.to_s)\n else\n puts \"Error: \" + resp.code.to_s + \"\\n\" + resp.body\n end\n end\n\n return result\n end", "def post_async(body = '', headers = {}, parameters = {}, &block)\n @asynchronous_backend.execute(build_post(body, headers, parameters, &block))\n end", "def execute\n\n case @template[:payload][:method]\n when 'get'\n begin\n out = RestClient.get @template[:payload][:uri]\n response = {:status => 200, :message => \"[i2] GET request on #{@template[:payload][:uri]} executed.\", :id => @template[:payload][:uri], :response => out.to_str}\n rescue Exception => e\n response = {:status => 400, :message => \"Unable to perform GET request, #{e}\"}\n Services::Slog.exception e\n end\n when 'post'\n begin\n\n case @template[:payload][:message]\n when 'form'\n out = RestClient.post @template[:payload][:uri], @template[:payload]\n when 'text/plain'\n out = RestClient.post @template[:payload][:uri], @template[:payload][:content], :content_type => 'text/plain'\n when 'application/javascript'\n if @template[:payload][:content].nil?\n out = RestClient.post @template[:payload][:uri], @template[:payload].to_json, :content_type => 'application/javascript'\n else\n out = RestClient.post @template[:payload][:uri], @template[:payload][:content], :content_type => 'application/javascript'\n end\n when \"application/json\"\n if @template[:payload][:content].nil?\n out = RestClient.post @template[:payload][:uri], @template[:payload].to_json, :content_type => 'application/json'\n else\n out = RestClient.post @template[:payload][:uri], @template[:payload][:content], :content_type => 'application/json'\n end\n when 'application/xml'\n if @template[:payload][:content].nil?\n out = RestClient.post @template[:payload][:uri], @template[:payload].to_xml, :content_type => 'application/xml'\n else\n out = RestClient.post @template[:payload][:uri], @template[:payload][:content], :content_type => 'application/xml'\n end\n when 'text/xml'\n if @template[:payload][:content].nil?\n out = RestClient.post @template[:payload][:uri], @template[:payload].to_xml, :content_type => 'text/xml'\n else\n out = RestClient.post @template[:payload][:uri], @template[:payload][:content], :content_type => 'text/xml'\n end\n when 'text/html'\n out = RestClient.post @template[:payload][:uri], @template[:payload][:content], :content_type => 'text/html'\n end\n\n response = {:status => 200, :message => \"[i2] POST request on #{@template[:payload][:uri]} executed.\", :id => @template[:payload][:uri], :response => out.to_str}\n rescue Exception => e\n response = {:status => 400, :message => \"Unable to perform POST request, #{e}\"}\n end\n when 'put'\n begin\n\n rescue Exception => e\n response = {:status => 440, :message => \"Unable to perform PUT request (not implemented), #{e}\"}\n end\n when 'delete'\n begin\n\n rescue Exception => e\n response = {:status => 440, :message => \"Unable to perform DELETE request (not implemented), #{e}\"}\n end\n end\n response\n end", "def batch(*requests)\n body = requests.map do |request|\n {method: request[0], params: request[1..-1], jsonrpc: '2.0', id: SecureRandom.uuid}\n end\n parsed_response = parse_response(post(body))\n errors = parsed_response.select{|response| response[:error]}\n raise Error, errors if !errors.empty?\n parsed_response.map{|response| response[:result]}\n end", "def api_post(action, data)\n api_request(action, data, 'POST')\n end", "def request(method, endpoint, data)\n begin\n @last_response = conn.send(method) do |req|\n req.url endpoint\n req.body = MultiJson.dump(data)\n req.headers['Content-Type'] = 'application/json'\n req.headers['x-stackdriver-apikey'] = api_key\n req.options[:timeout] = 2\n req.options[:open_timeout] = 2\n end\n\n # TODO: Confirm whether 201 is the only success response.\n @last_response.status == 201\n rescue\n false\n end\n end", "def post_request(uri, body, token = nil, manage_errors = true)\n request = Net::HTTP::Post.new(uri.request_uri, initheader = build_headers(token))\n request.body = body.to_json\n return do_request(uri, request, manage_errors) \n end", "def streamData\n\n threads = [] #Thread pool.\n\n @stream = PowertrackStream.new(@url,@user_name, @password)\n\n #t = Thread.new {Thread.pass; consumeStream(stream)}\n t = Thread.new {consumeStream(stream)}\n\n begin\n t.run\n rescue ThreadError => e\n @log.error { \"Error starting consumer thread: #{e.message}\" }\n rescue => e\n @log.error { \"Error starting consumer thread: #{e.message}\" }\n end\n\n threads << t #Add it to our pool (array) of threads.\n\n #OK, add a thread for consuming from @activities.\n #This thread sends activities to the database.\n t = Thread.new {storeActivities}\n\n begin\n t.run\n rescue ThreadError => e\n @log.error { \"Error starting storeActivity thread: #{e.message}\" }\n end\n\n threads << t #Add it to our pool (array) of threads.\n\n threads.each do |t|\n begin\n @log.debug('here')\n t.join\n rescue ThreadError => e\n @log.error { \"Error with thread join: #{e.message}\" }\n rescue => e\n @log.error { \"Error with thread join: #{e.message}\" }\n end\n end\n\n end", "def http_post(*args)\n url = args.shift\n c = Curl::Easy.new url\n yield c if block_given?\n c.http_post *args\n c\n end", "def post_json(endpoint, payload, username = nil, password = nil, endpoint_stub = API_ENDPOINT)\n uri = URI(\"#{endpoint_stub}#{endpoint}\")\n\n res = ''\n Net::HTTP.start(uri.host, uri.port,\n :use_ssl => uri.scheme == 'https', \n :verify_mode => OpenSSL::SSL::VERIFY_NONE) do |http|\n req = Net::HTTP::Post.new(uri.request_uri)\n req.basic_auth(username, password) if username && password\n req['Content-Type'] = 'application/json'\n req.body = payload.to_json\n\n res = http.request(req)\n end\n\n JSON.parse(res.body.to_s)\nend", "def post(*args)\n request, adapter = request_and_adapter_from(args)\n\n with_adapter :post, request, adapter do |adapter|\n yield adapter.client if block_given?\n adapter.post request\n end\n end", "def send_post_request(url, payload, content_type = 'application/json')\n # set the uri\n url = URI(url)\n\n # set http settings\n http = Net::HTTP.new(url.host, url.port)\n http.use_ssl = true\n http.verify_mode = OpenSSL::SSL::VERIFY_NONE\n\n # set request\n request = Net::HTTP::Post.new(url)\n request[\"content-type\"] = content_type\n request[\"cache-control\"] = 'no-cache'\n\n # set the payload\n request.body = payload\n\n # send the request and get whatever is the response\n response = http.request(request)\n\n # return the response by reading the body\n return response.read_body\n end", "def post(data = {})\n call data, method: :post\n end", "def iod_request (api, params)\r\n uri = URI(\"http://api.idolondemand.com/1/api/async/#{api}/v1\")\r\n uri.query = URI.encode_www_form(params)\r\n res = Net::HTTP.get_response(uri, p_addr = $proxy_host, p_port = $proxy_port)\r\n jobid = JSON.parse(res.body)['jobID']\r\n puts \"Post request jobid [#{jobid}]\"\r\n return job_results(jobid)\r\nend", "def _api_call(uri, body=nil)\n # Create the request\n res = Net::HTTP.start(uri.hostname, uri.port, :use_ssl => uri.scheme == 'https') do |http|\n req = Net::HTTP::Post.new(uri)\n req['Content-Type'] = 'application/json'\n req.body = body\n\n http.request(req)\n end\n\n case res\n when Net::HTTPSuccess\n puts \"Crossbar::HTTP - Response Body: #{res.body}\" if self.verbose\n return JSON.parse(res.body, {:symbolize_names => true})\n else\n raise \"Crossbar::HTTP - Code: #{res.code}, Error: #{res.message}\"\n end\n end", "def post(payload)\n post_like payload, Net::HTTP::Post.new(@uri.path)\n end", "def perform\n # used to make request to get data from AWS API\n response = CurlRequest.new(URL).post\n Rails.logger.info \"==============#{response}============\"\n body = JSON.parse(response.body)\n\n # Calling Service to create data in the database from AWS response\n data = CreateData.new(body)\n data.execute\n end", "def post_call(location,params)\n puts \"#Wrapper Service POST req:- \\n#Host: #{@host} \\n#Location: #{location} \\n#Params: #{params.to_json} \"\n response = @conn.post location, params\n puts \"#Response Code: #{response.status}\"\n return response\n end", "def posts # proxy the posts struct\n @post_cb = lambda{|post,fobj| yield post,fobj }\n self\n end", "def api_request method, params = nil\n\t\t\tconnection = ZenfolioAPI::HTTP.new()\n\t\t\t@response = connection.POST(method, params, @auth.token)\n\t\tend", "def api_request(end_point, payload = {})\n\n request = Net::HTTP::Post.new(request_path(end_point), \n initheader = {'Content-Type' =>'application/json'})\n http = Net::HTTP.new(@base_url.host, @base_url.port)\n http.use_ssl = (@base_url.scheme == 'https')\n request.add_field('X-SWU-API-KEY', @api_key)\n request.body = payload.to_json\n\n response = http.request(request)\n case response\n when Net::HTTPNotFound\n raise \"Invalid API end point: #{end_point} (#{request_path(end_point)})\"\n when Net::HTTPSuccess\n # TODO: do something intelligent with response.body\n if @debug\n puts response.body\n end\n return response\n else\n raise \"Unknown error! #{response.code}\"\n end\n\n rescue Errno::ECONNREFUSED\n raise \"Could not connect to #{@base_url.host}!\" \n end", "def post(*args, &block)\n map_method(:post, args, &block)\n end", "def send_post(data_xml,url)\r\n result = @client.post(self.target_uri(url), :body => data_xml , :head => {'Content-Type' => 'application/xml'} ) \r\n raise \"Invalid status #{result.http_status} from server #{@host}:#{@port}\" if(result.http_status != '200') \r\n #reply = Reply.from_xml(result.http_body)\r\n if block_given?\r\n yield(result.http_body)\r\n else\r\n result.http_body\r\n end\r\n end", "def post(url, payload)\n url = URI.parse(url)\n http = Net::HTTP.new(url.host, url.port)\n http.use_ssl = true\n request = Net::HTTP::Post.new(url.path+'?access_token=verysecret')\n request.content_type = 'application/json'\n request.body = JSON.generate(payload)\n response = http.start {|http| http.request(request) }\n begin\n return JSON.parse(response.body)\n rescue\n # Log as a problematic case with rule number and line\n $problems.write \"#{$index}, #{payload}, #{response.body}\\n\"\n return nil\n end\nend", "def run!\n loop do\n begin\n job = Job.new(get_apps)\n job.fetch_targets!\n post_messages!(job.to_hash)\n sleep(@interval)\n rescue Errno::ECONNREFUSED => ce\n puts \"\"\n sleep(@interval)\n end\n\n end\n end", "def call(call_url = self.call_url, arguments = self.arguments, resending = false)\n Postage.logger.info \"Sending Request [UID: #{self.uid} URL: #{call_url}] \\n#{arguments.inspect}\\n\"\n \n self.arguments[:uid] = self.uid\n self.arguments[:plugin_version] = Postage::PLUGIN_VERSION\n \n body = { :api_key => Postage.api_key, :arguments => arguments }.to_json\n \n Timeout::timeout(5) do\n self.response = self.class.post( call_url, :headers => HEADERS, :body => body )\n end\n \n Postage.logger.info \"Received Response [UID: #{self.uid}] \\n#{self.response.inspect}\\n\"\n \n resend_failed_requests unless resending\n return Postage::Response.new(self.response)\n \n rescue Timeout::Error, SocketError, Exception => e\n Postage.logger.error \"Failure [UID: #{self.uid}] \\n#{e.inspect}\"\n \n store_failed_request(e) unless resending \n return nil # no response generated\n end", "def post; FacebookPostJob.new.async.perform(self.id); end", "def perform\n args = {\n :method => method,\n :url => url,\n :headers => headers,\n :timeout => 120,\n :open_timeout => 30\n }\n args[:payload] = payload.to_json if payload\n\n Response.new RestClient::Request.execute(args)\n rescue RestClient::Exception => e\n response = e.response\n\n if e.is_a?(RestClient::RequestTimeout)\n response = JSON.generate({:error => {:cause => 'REQUEST_TIMEDOUT'}})\n end\n\n ErrorResponse.new response\n end", "def api_call(action, xml_data)\n require 'net/http'\n require 'uri'\n current_url = URL;\n if action != 'get-queue'\n get_server(@@_target_type_options[@@_target_type]);\n end\n uri = URI.parse(current_url+\"/\"+action)\n http = Net::HTTP.new(uri.host,uri.port)\n request=Net::HTTP::Post.new(uri.request_uri);\n request.set_form_data({ \"queue\"=> array_to_xml(xml_data,\"\")})\n return http.request(request).body\n end", "def send_request method, params, &block\n message = {\n jsonrpc: \"2.0\",\n method: method,\n params: params,\n id: @next_request_id\n }\n json = message.to_json\n requests[@next_request_id] = Request.new(@next_request_id, &block)\n envelope = \"Content-Length: #{json.bytesize}\\r\\n\\r\\n#{json}\"\n queue envelope\n @next_request_id += 1\n end", "def start_post\n request_file_path\n ipa_file_path = get_file_path\n request_notes\n release_notes = get_notes\n distribution_list = get_distribution_list #calls the fetch && confirm methods\n post_app(get_conf('end_point'), get_conf('api_key'), get_conf('team_key'), ipa_file_path, release_notes, distribution_list)\n end", "def submit\r\n @sess.DoRequests(@request_set)\r\n end", "def test_answer_fibers\n threads = []\n ids = []\n mutex = Mutex.new\n (1..10).each{\n threads << Thread.new{\n post \"/answer\"\n assert last_response.body.include?('action='), 'not found: action'\n assert last_response.body.include?('hi pre answer'), 'not found: hi pre answer'\n \n action = last_response.body.scan(/action=\\\"([^\\\"]+)\\\"/).first.first.strip\n id = action.scan(/[^\\/]+$/).first\n mutex.synchronize{\n ids << id\n }\n }\n }\n threads.each{|t| t.join}\n threads.clear()\n\n threads = []\n ids.each{|uid|\n threads << Thread.new(uid){|id|\n send_digits = SecureRandom.random_number(3000)\n post \"/digits/%s\" % id, {:Digits => send_digits}\n assert last_response.status == 201, 'bad response'\n assert last_response.body.include?('<Speak >%d</Speak>' % send_digits), 'Not get the digit send'\n }\n }\n threads.each{|t| t.join}\n threads.clear()\n end", "def test_concurrent_requests_to_same_url_different_http_method\n delay = $config[\"nginx\"][\"proxy_read_timeout\"] - 1\n assert_operator(delay, :>, 0)\n\n start_time = Time.now.utc\n\n get_thread = Thread.new do\n Thread.current[:response] = Typhoeus.get(\"http://127.0.0.1:9080/api/delay-sec/#{delay}\", http_options)\n end\n\n # Wait 1 second to ensure the first GET request is fully established to the\n # backend.\n sleep 1\n\n post_thread = Thread.new do\n Thread.current[:response] = Typhoeus.post(\"http://127.0.0.1:9080/api/delay-sec/#{delay}\", http_options)\n end\n\n get_thread.join\n post_thread.join\n total_time = Time.now.utc - start_time\n\n assert_response_code(200, get_thread[:response])\n assert_response_code(200, post_thread[:response])\n\n # Sanity check to ensure the 2 requests were made in parallel and\n # overlapped.\n assert_operator(get_thread[:response].total_time, :>=, delay - BUFFER_TIME_LOWER)\n assert_operator(get_thread[:response].total_time, :<, delay + BUFFER_TIME_UPPER)\n assert_operator(post_thread[:response].total_time, :>=, delay - BUFFER_TIME_LOWER)\n assert_operator(post_thread[:response].total_time, :<, delay + BUFFER_TIME_UPPER)\n assert_operator(total_time, :>=, delay + 1 - BUFFER_TIME_LOWER)\n assert_operator(total_time, :<, delay + (BUFFER_TIME_UPPER * 2))\n assert_operator(total_time, :<, (delay * 2) - 1)\n end", "def post_data(body)\r\n raise ConfigError, 'no json_records' if body.empty?\r\n # Create REST request header\r\n header = get_header(body.bytesize)\r\n # Post REST request \r\n response = RestClient.post(@uri, body, header)\r\n\r\n return response\r\n end", "def post\r\n end", "def send_post_request(body)\n response = HTTParty.post(\n url.to_str,\n :body => body.to_json,\n :headers => { 'Content-Type' => 'application/json' },\n :basic_auth => auth,\n verify: false\n )\n\n logger.debug(\n \"POST Request to Turbulence: \\n\" +\n \"\\tURL: #{url.to_str}\\n\" +\n \"\\tAUTH: #{auth}\\n\" +\n \"\\tBODY: #{body.to_json}\"\n )\n\n unless response.success?\n raise \"Request to turbulence(#{url.to_str}) failed, \\n\" +\n \"\\tauth: #{auth.inspect}, \\n\" +\n \"\\tBody: #{body}, \\n\" +\n \"\\tResponse: #{response.body}\"\n end\n response_body = JSON.parse(response.body)\n return response_body[\"ID\"]\n end", "def send_request; end", "def process\n \n ua = Net::HTTP.new(SERVER, 443)\n ua.use_ssl = true\n \n # Set default hash function to HMAC SHA-512\n @PARAM_HASH['TPS_HASH_TYPE'] = 'HMAC_SHA512'\n\n # Checks presence of CA certificate\n if File.directory?(RootCA)\n ua.ca_path = RootCA\n ua.verify_mode = OpenSSL::SSL::VERIFY_PEER\n ua.verify_depth = 3\n else\n puts \"Invalid CA certificates directory. Exiting...\"\n exit\n end\n \n # Sets CUSTOMER_IP parameter\n begin\n \t@PARAM_HASH[\"CUSTOMER_IP\"] = request.env['REMOTE_ADDR']\n rescue Exception\n end\n\n # Response version to be returned\n @PARAM_HASH[\"RESPONSEVERSION\"] = '8'\n\n # Generate the query string and headers. Chooses which API to make request to.\n case @api\n when \"bpdailyreport2\"\n calc_report_tps\n path = \"/interfaces/bpdailyreport2\"\n query = \"ACCOUNT_ID=#{@ACCOUNT_ID}&\" + uri_query(@PARAM_HASH)\n when \"stq\"\n calc_report_tps\n path = \"/interfaces/stq\"\n query = \"ACCOUNT_ID=#{@ACCOUNT_ID}&\" + uri_query(@PARAM_HASH)\n when \"bp10emu\"\n calc_tps\n path = \"/interfaces/bp10emu\"\n query = \"MERCHANT=#{@ACCOUNT_ID}&\" + uri_query(@PARAM_HASH)\n # puts \"****\"; puts uri_query(@PARAM_HASH).inspect\n when \"bp20rebadmin\"\n calc_rebill_tps\n path = \"/interfaces/bp20rebadmin\"\n query = \"ACCOUNT_ID=#{@ACCOUNT_ID}&\" + uri_query(@PARAM_HASH)\n end\n queryheaders = {\n 'User-Agent' => 'BluePay Ruby Library/' + RELEASE_VERSION,\n 'Content-Type' => 'application/x-www-form-urlencoded'\n }\n # Post parameters to BluePay gateway\n # Resuce SSL error and retry with ca_file absolute path.\n begin\n headers, body = ua.post(path, query, queryheaders)\n rescue OpenSSL::SSL::SSLError\n ua.ca_file = File.expand_path(File.dirname(__FILE__)) + \"/\" + RootCAFile\n headers, body = ua.post(path, query, queryheaders)\n end\n\n # Split the response into the response hash.\n @RESPONSE_HASH = {}\n if path == \"/interfaces/bp10emu\"\n response = headers[\"Location\"].split(\"?\")[1]\n else\n response = headers.body\n end\n if path == \"/interfaces/bpdailyreport2\"\n response\n else\n response.split(\"&\").each do |pair| \n (key, val) = pair.split(\"=\")\n val = \"\" if val == nil\n @RESPONSE_HASH[URI.unescape(key)] = URI.unescape(val) \n end\n end\n end", "def post url\n Timeout.timeout(60) do\n puts \"POST: #{url}\" if config[:debug]\n \n tags = (Hpricot(open(\"http://del.icio.us/url/check?url=#{CGI.escape(url)}\"))/\n '#top-tags'/'li')[0..10].map do |li| \n (li/'span').innerHTML[/(.*?)<em/, 1]\n end.join(\" \")\n puts \"POST-TAGS: #{tags}\" if config[:debug]\n \n description = begin\n Timeout.timeout(5) do \n (((Hpricot(open(url))/:title).first.innerHTML or url) rescue url)\n end\n rescue Timeout::Error\n puts \"POST: URL timeout\" if config[:debug]\n url\n end\n \n query = { :url => url, :description => description, :tags => tags, :replace => 'yes' }\n\n http = Net::HTTP.new('api.del.icio.us', 443) \n http.use_ssl = true \n http.verify_mode = OpenSSL::SSL::VERIFY_NONE\n response = http.start do |http|\n post_url = '/v1/posts/add?' + query.map {|k,v| \"#{k}=#{CGI.escape(v)}\"}.join('&')\n puts \"POST: post url #{post_url}\" if config[:debug]\n req = Net::HTTP::Get.new(post_url, {\"User-Agent\" => \"Kirby\"})\n req.basic_auth config[:delicious_user], config[:delicious_pass]\n http.request(req)\n end.body\n\n puts \"POST: #{response.inspect}\" if config[:debug]\n end\n rescue Exception => e\n puts \"POST: #{e.inspect}\" if config[:debug]\n end", "def api_post(method: nil, item: [], params: {})\n raise ArgumentError, 'Missing method in API request' unless method\n\n login(@host) if Time.new.to_i > @session_timeout\n\n request = {}\n request[:method] = method\n request[:params] = [[item || []], params.to_h]\n # This is how we create request params once all methods use structs\n # request[:params] = [[item || []], params.to_h]\n # We use a StandardError since it is based on the HTTP response code with a JSON payload definition\n begin\n resp = @http.post(@uri, request.to_json, @headers)\n JSON.parse(resp.body)['result']['result']\n rescue StandardError\n puts \"The following error has occurred #{JSON.parse(resp.body)['error']['message']}\"\n end\n end", "def submit\n raise EbayTraderError, 'Cannot post an eBay API request before application keys have been set' unless EbayTrader.configuration.has_keys_set?\n\n uri = EbayTrader.configuration.uri\n\n http = Net::HTTP.new(uri.host, uri.port)\n http.read_timeout = http_timeout\n\n if uri.port == 443\n # http://www.rubyinside.com/nethttp-cheat-sheet-2940.html\n http.use_ssl = true\n verify = EbayTrader.configuration.ssl_verify\n if verify\n if verify.is_a?(String)\n pem = File.read(verify)\n http.cert = OpenSSL::X509::Certificate.new(pem)\n http.key = OpenSSL::PKey::RSA.new(pem)\n end\n http.verify_mode = OpenSSL::SSL::VERIFY_PEER\n else\n http.verify_mode = OpenSSL::SSL::VERIFY_NONE\n end\n\n end\n\n post = Net::HTTP::Post.new(uri.path, headers)\n post.body = xml_request\n\n begin\n response = http.start { |http| http.request(post) }\n rescue OpenSSL::SSL::SSLError => e\n # SSL_connect returned=1 errno=0 state=SSLv3 read server certificate B: certificate verify failed\n raise EbayTraderError, e\n rescue Net::ReadTimeout\n raise EbayTraderTimeoutError, \"Failed to complete #{call_name} in #{http_timeout} seconds\"\n rescue Exception => e\n raise EbayTraderError, e\n ensure\n EbayTrader.configuration.counter_callback.call if EbayTrader.configuration.has_counter?\n end\n\n @http_response_code = response.code.to_i.freeze\n\n # If the call was successful it should have a response code starting with '2'\n # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html\n raise EbayTraderError, \"HTTP Response Code: #{http_response_code}\" unless http_response_code.between?(200, 299)\n\n if response['Content-Encoding'] == 'gzip'\n @xml_response = ActiveSupport::Gzip.decompress(response.body)\n else\n @xml_response = response.body\n end\n end", "def test_concurrent_requests_to_same_url_different_http_method\n start_time = Time.now.utc\n\n get_thread = Thread.new do\n Thread.current[:response] = Typhoeus.get(\"http://127.0.0.1:9080/api/delay-sec/5\", http_options)\n end\n\n # Wait 1 second to ensure the first GET request is fully established to the\n # backend.\n sleep 1\n\n post_thread = Thread.new do\n Thread.current[:response] = Typhoeus.post(\"http://127.0.0.1:9080/api/delay-sec/5\", http_options)\n end\n\n get_thread.join\n post_thread.join\n total_time = Time.now.utc - start_time\n\n assert_equal(200, get_thread[:response].code)\n assert_equal(200, post_thread[:response].code)\n\n # Sanity check to ensure the 2 requests were made in parallel and\n # overlapped.\n assert_operator(total_time, :>, 5)\n assert_operator(total_time, :<, 9)\n end", "def post_request(secureNetId, secureKey, url)\n uri = URI.parse(url) # Parse the URI\n http = Net::HTTP.new(uri.host, uri.port) # New HTTP connection\n http.use_ssl = true # Must use SSL!\n req = Net::HTTP::Post.new(uri.request_uri) # HTTP POST request \n body = {} # Request body hash\n yield body # Build body of request\n req.body = body.to_json # Convert hash to json string\n req[\"Content-Type\"] = 'application/json' # JSON body\n req[\"Origin\"] = 'worldpay.com' # CORS origin\n req.basic_auth secureNetId, secureKey # HTTP basic auth\n res = http.request(req) # Make the call\n return JSON.parse(res.body) # Convert JSON to hashmap\nend", "def http_post_request(req_body)\n\t\t#New http request (uri library deals with port and host on its own when parsing the url)\n\t\thttp = Net::HTTP.new(@uri.host, @uri.port)\n\t\t#Original api url get does not need SSL (bad solution but any other way would not seem to work properly)\n\t\tif caller[1][/`.*'/].nil? or not (caller[1][/`.*'/][1..-2] == \"initialize\")\n\t\t\t#Https security stuff (don't need security when getting initial api url)\n\t\t\thttp.use_ssl = true\n\t\t\thttp.verify_mode = OpenSSL::SSL::VERIFY_NONE\n\t\tend\n\t\t#Post request using uri\n\t\trequest = Net::HTTP::Post.new(@uri.request_uri)\n\t\t#Sets request to use basic authentication using the given username and api_key\n\t\trequest.basic_auth(@username, @api_key)\n\t\t#Sets request to use json content type\n\t\trequest.content_type = \"application/json\"\n\t\t#Sets request body to json file passed\n\t\trequest.body = req_body\n\t\t#Executes setup request and returns body\n\t\thttp.request(request).body\n\tend", "def post\n resource.post(request, response)\n end", "def method_missing(method, *args, &block)\n data = { method: method, parameters: args }\n request.post(path: nodes_path, data: data, auth_token: auth_token)\n end", "def test_proxies_concurrent_requests_properly\n hydra = Typhoeus::Hydra.new\n requests = Array.new(20) do |index|\n request = Typhoeus::Request.new(\"http://127.0.0.1:9080/api/echo_delayed_chunked\", http_options.deep_merge({\n :params => {\n :input => \"#{unique_test_id}-#{index}-#{SecureRandom.hex(40)}\",\n },\n }))\n hydra.queue(request)\n request\n end\n hydra.run\n\n assert_equal(20, requests.length)\n requests.each do |request|\n assert_response_code(200, request.response)\n assert(request.original_options[:params][:input])\n assert_equal(request.original_options[:params][:input], request.response.body)\n end\n end", "def post(url, payload, options={}, &block)\n http_options = options.merge(@basic_options)\n if block_given?\n RestClient.post(url, payload, http_options, &block)\n else\n RestClient.post(url, payload, http_options)\n end\n end", "def post\n end", "def post(body)\n url = @endpoint + token_path\n params = {\n :headers => {'Content-Type' => 'application/x-www-form-urlencoded', 'Accept' => 'application/json'},\n :url => url,\n :body => body\n }\n\n Connection.new(:post, params).fetch\n end", "def post_json(path, body)\n uri = build_uri(path)\n puts \"*** POST #{path}\"\n puts JSON.pretty_generate(body)\n\n post_request = Net::HTTP::Post.new(uri, 'Content-Type' => 'application/json')\n post_request.body = JSON.generate(body)\n\n response = Net::HTTP.start(uri.hostname, uri.port, :use_ssl => true) do |http|\n http.request(post_request)\n end\n\n puts \"HTTP #{response.code}\"\n result = JSON.parse(response.body)\n puts result[:result]\n result\nend", "def submit_order()\n\tputs \"Submitting order\"\n\tdata = create_order()\n\tresponse = request_post(\"/api/order\", data)\n\tputs response.body\nend", "def post_process\n [status, headers, body]\n end" ]
[ "0.6468379", "0.6223869", "0.6221298", "0.61462307", "0.61395264", "0.61217654", "0.60459155", "0.6043673", "0.6006896", "0.59055287", "0.5905483", "0.58910966", "0.5865252", "0.5849763", "0.58276635", "0.58213305", "0.5800709", "0.57840395", "0.57711834", "0.576592", "0.5749597", "0.57434756", "0.5733103", "0.572721", "0.5726292", "0.57044035", "0.5698833", "0.5683449", "0.5680842", "0.5672787", "0.5636754", "0.56309944", "0.56290984", "0.56187063", "0.56162506", "0.5613502", "0.5574122", "0.5572524", "0.5563946", "0.5560369", "0.55480105", "0.5539338", "0.5538748", "0.55373824", "0.5515666", "0.5513006", "0.5508576", "0.550091", "0.5500826", "0.54975015", "0.5491669", "0.54906213", "0.54878664", "0.5479854", "0.5479739", "0.5479021", "0.5474883", "0.547327", "0.5471485", "0.5471239", "0.54677725", "0.546633", "0.54647845", "0.54576033", "0.54560924", "0.545431", "0.545208", "0.54503214", "0.5449427", "0.5444119", "0.5443461", "0.5434561", "0.5432209", "0.5423508", "0.5417442", "0.5416257", "0.54110634", "0.540369", "0.5397732", "0.53886056", "0.5371264", "0.53668845", "0.53666276", "0.5363604", "0.5361442", "0.5355049", "0.5354806", "0.5351997", "0.53489363", "0.53468686", "0.5337887", "0.53338766", "0.53310055", "0.53304297", "0.5329232", "0.5322623", "0.53214824", "0.5320857", "0.53175014", "0.5316183", "0.5314749" ]
0.0
-1
Returns the body of get request, with option for passing a hash for filtering parameters
def get_table(scaffold, **filters) get_url = "#{@device_address}/admin/scaffolds/#{scaffold}/index.json?api_key=#{@device_api_key}" json_body = JSON.parse(Excon.get(get_url).body) if filters.any? filters.each do |name, value| json_body.keep_if { |return_item| return_item[name.to_s] == value.to_s } end end json_body end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get\n request_object.get_query\n end", "def body_params(request)\n body = request.body.read\n return {} if body.empty?\n\n JSON\n .parse(body)\n .each_with_object({}) { |(key, value), res| res[key] = filtered_value(key, value) }\n end", "def http_get_body(uri, credentials=nil, headers={})\n response = http_request(:get, uri, credentials, headers)\n\n ### filter body\n if response\n return response.body_utf8\n end\n\n nil\n end", "def get_request(hash=true)\n resp = RestClient.get get_url\n hash ? Hash.from_xml(resp).it_keys_to_sym : resp\n end", "def get(route='', params = {}, filter = nil)\n url = base_url + route\n unless filter.nil?\n url << '/' + \"(#{ERB::Util.url_encode(filter)})\"\n end\n url << '.json'\n begin\n JSON.parse(RestClient.get(url, params: params))\n rescue RestClient::ResourceNotFound => e\n Object.new\n end\n end", "def get_params\n params.fetch(:get, {})\n end", "def get(params, url=\"https://baconipsum.com/api?\")\n if params[\"paras\"] && params[\"sentences\"]\n puts \"Just a warning that sentences override paras...\"\n end\n endpoint = url + URI.encode_www_form(params)\n print \"Endpoint is : \"\n puts endpoint\n body = open(endpoint).read\n return body\nend", "def get(params = {})\n request! do\n api[url.path + to_query(params)].get(API_HEADERS)\n end\n end", "def http_get_body(uri)\n response = http_get(uri)\n\n ### filter body\n if response\n return response.body.encode('UTF-8', {:invalid => :replace, :undef => :replace, :replace => '?'})\n end\n\n nil\n end", "def get(path, query={})\n request_json :get, path, query\n end", "def query_parameters\n request.GET\n end", "def query\n\n JSON.parse(Net::HTTP.get(self.build_uri))\n\n end", "def get_body\n request_object.body\n end", "def body_retrieve\n ret = Hash.new\n ret[:access_token] = self.access_token\n ret[:type] = self.institution\n ret[:email] = self.email\n ret.merge(body)\n end", "def get path, header={}, body_string_or_hash=\"\"\n env.http 'GET', path, header, body_string_or_hash\n end", "def body\n { 'query' => query, 'variables' => variables }\n end", "def body\n { 'query' => query, 'variables' => variables }\n end", "def body\n { 'query' => query, 'variables' => variables }\n end", "def body\n { 'query' => query, 'variables' => variables }\n end", "def request_body\n criteria = @options[:criteria] || @options\n filter_elements = {}\n\n criteria.each do |c|\n searchValue = { \"@operator\" => c[:operator] }\n\n if c[:value].is_a?(Array) && c[:type] == 'SearchDateField'\n searchValue[\"platformCore:searchValue\"] = c[:value][0].to_s\n searchValue[\"platformCore:searchValue2\"] = c[:value][1].to_s\n else\n searchValue[\"platformCore:searchValue\"] = c[:value]\n end\n\n filter_elements[\"platformCore:#{c[:field]}\"] = searchValue\n end\n\n {\n 'platformMsgs:pageIndex' => @options.fetch(:page, 1),\n 'platformMsgs:getDeletedFilter' => filter_elements\n }\n end", "def request(method, path, body = nil, headers = {})\n body = @conn.public_send(method, path, body, auth_headers.merge(headers)).body\n return {} if body.empty?\n body\n end", "def body\n connection.get(@url.path).body\n end", "def get(user_auth=false)\n http = new_http()\n request = Net::HTTP::Get.new(@uri.request_uri)\n initialize_header(request, @header)\n authenticate_user(request) if user_auth == true\n request.body = @payload if @payload\n log_request(request)\n response = http.request(request)\n log_response(response)\nend", "def get(params={})\n body = get_body(params)\n raw_request = post(body)\n\n parse(raw_request)\n end", "def body(path)\n get(path).body\n end", "def request_body\n request_body = {\n :active => 'true',\n :category => 'Other',\n :subcategory => 'Other',\n :u_service_provider => 'Duke University',\n :u_it_service => 'Public Computing Labs',\n :service_offering => 'Public Computing Labs Offering',\n :contact_type => 'email',\n :description => 'This is the lengthier description!',\n :impact => 'Individual',\n :urgency => 'Medium',\n :priority => 'Low',\n :incident_state => 'New',\n :notify => '1',\n :state => '1',\n :assignment_group => 'Service Desk-OIT',\n :short_description => 'This is the title of the the ticket!',\n :caller_id => CALLER\n }\n end", "def body\n return @final if @final\n\n @final = {}\n @final.merge! query_body if @query && @query.any?\n @final.merge! sort_body\n @final.merge! filter_body if @filters.any?\n\n @final[:from] = (@current_page - 1) * @page_size if @current_page\n @final[:size] = @page_size if @page_size\n\n @final[:explain] = true\n\n @final\n end", "def incoming_body_filter(attr_hash)\n attr_hash\n end", "def get(path, params = {}, headers = {})\n request(:get, path, params, headers).body\n end", "def outgoing_body_filter(attr_hash)\n attr_hash\n end", "def build_body(body, request_options, method)\n if method == :GET && body.empty?\n return nil\n end\n\n # merge optional special request options to the body when it\n # doesn't have to be in the array format\n body.merge!(request_options.data) if body.is_a?(Hash) && method != :GET\n to_json(body)\n end", "def get(params)\n request.method = :get\n execute(params)\n end", "def request_body\n MAPPING.keys.inject({}) do |mem, e|\n next mem unless value = send(e)\n mem.merge!(e.to_s => value.to_json)\n end\n end", "def get(path, params = {})\n params[:fields] = build_fields_param(params[:fields]) if params[:fields]\n url = build_url(path, params)\n response = Net::HTTP.get_response(url)\n JSON.parse(response.body).deep_symbolize_keys\n end", "def get(parameters = {}, headers = {}, &block)\n build_get(parameters, headers, &block).invoke\n end", "def get()\n return @http.request(@req)\n end", "def search\n response = make_request\n begin\n response_hash = JSON.parse(response.body)\n rescue JSON::ParserError\n raise RequestException\n else\n response_hash\n end\n end", "def query(action, hash = {})\n # uri = URI.parse(\"https://130.59.10.31\")\n # http = Net::HTTP.new(uri.host, uri.port)\n # http.use_ssl = true\n # http.verify_mode = OpenSSL::SSL::VERIFY_NONE\n #\n # request = Net::HTTP::Get.new(uri.request_uri)\n #\n # response = http.request(request)\n # response.body\n # response.status\n # response[\"header-here\"] # All headers are lowercase\n uri = URI.parse(@url + \"/api/xml?action=#{action}\")\n hash.each_pair do |key, val|\n if val\n if key == \"filter\" or key == \"sort\"\n uri.query += val.query\n else\n uri.query += \"&\" + key + \"=\" + CGI::escape(\"#{val}\")\n end\n end\n end\n http = Net::HTTP.new(uri.host, uri.port)\n if uri.scheme == \"https\"\n http.use_ssl=true\n http.verify_mode = OpenSSL::SSL::VERIFY_NONE\n end\n request = Net::HTTP::Get.new(uri.request_uri)\n # logger = Logger.new('log/development.log')\n # logger.info(url.path + \"?\" + url.query)\n if @sessionid\n request.add_field(\"Cookie\", \"BREEZESESSION=\"+@sessionid)\n end\n puts \"ACS query - request: \" + request.path\n response = http.request(request)\n puts \"ACS query - response: \" + response.body.inspect\n return response\n end", "def request_body\n criteria = @options[:criteria] || @options\n\n # TODO wish there was a cleaner way to do this: we need the namespace of the record\n example_instance = @klass.new\n namespace = example_instance.record_namespace\n\n # extract the class name without the module\n class_name = @klass.to_s.split(\"::\").last\n\tif class_name == \"InventoryItem\"\n\t class_name = \"Item\"\n\tend\n search_record = {}\n\n criteria.each_pair do |condition_category, conditions|\n search_record[\"#{namespace}:#{condition_category}\"] = conditions.inject({}) do |h, condition|\n h[\"platformCommon:#{condition[:field]}\"] = {\n \"platformCore:searchValue\" => condition[:value]\n }\n\n (h[:attributes!] ||= {}).merge!({\n \"platformCommon:#{condition[:field]}\" => {\n 'operator' => condition[:operator]\n }\n })\n\n h\n end\n end\n\n {\n 'searchRecord' => search_record,\n :attributes! => {\n 'searchRecord' => {\n 'xsi:type' => \"#{namespace}:#{class_name}Search\"\n },\n }\n }\n end", "def form_body\n if body.is_a?(Hash)\n body.map do |k,v|\n [AwsRequest.aws_encode(k), AwsRequest.aws_encode(v)].join(\"=\")\n end.join(\"&\")\n else\n body\n end\n end", "def get_body_params\n return @_get_body_params ||= begin\n fields = self.get_allowed_parameters || self.get_fields\n\n # Filter the request body.\n body_params = request.request_parameters.select { |p| fields.include?(p) }\n\n # Add query params in place of missing body params, if configured.\n if self.class.accept_generic_params_as_body_params\n (fields - body_params.keys).each do |k|\n if (value = params[k])\n body_params[k] = value\n end\n end\n end\n\n # Filter primary key if configured.\n if self.class.filter_pk_from_request_body\n body_params.delete(self.get_model&.primary_key)\n end\n\n # Filter fields in exclude_body_fields.\n (self.class.exclude_body_fields || []).each { |f| body_params.delete(f.to_s) }\n\n body_params\n end\n end", "def query_parameters\n params = {}\n params['filter'] = @optional['filter'] if @optional['filter']\n params\n end", "def get(params={})\n rpc_call :get, params\n end", "def url_body(params)\n\n # user can define scope as String or Array\n joined_scope = scope.join(',') if scope.respond_to?(:join)\n\n {\n :response_type => response_type,\n :client_id => client_id,\n :redirect_uri => redirect_uri,\n :scope => joined_scope\n }.merge(params)\n end", "def get options\n rest_request({ method: :get }.merge(options))\n end", "def get options\n rest_request({ method: :get }.merge(options))\n end", "def make_request_body(opts, headers); end", "def request_query_params\n {}\n end", "def http_params\n {}\n end", "def body\n fetch\n @body\n end", "def query ; @request.params ; end", "def to_body\r\n to_hash\r\n end", "def content_get\n # compress GET requests to try and stay under that 8KB request limit\n deflated_request = Zlib::Deflate.deflate(@request, 9)[2..-5]\n # strict_encode64() isn't available? sub out the newlines\n @request_params[\"SAMLRequest\"] = Base64.encode64(deflated_request).gsub(/\\n/, \"\")\n \n Logging.debug \"SAMLRequest=#{@request_params[\"SAMLRequest\"]}\"\n uri = Addressable::URI.parse(@URL)\n if uri.query_values == nil\n uri.query_values = @request_params\n else\n # solution to stevenwilkin's parameter merge\n uri.query_values = @request_params.merge(uri.query_values)\n end\n url = uri.to_s\n #Logging.debug \"Sending to URL #{url}\"\n return url\n end", "def request_body\n buffer = ''\n\n xml = Builder::XmlMarkup.new(target: buffer)\n\n xml.platformMsgs(:searchId, @options[:search_id])\n xml.platformMsgs(:pageIndex, @options[:page].present? ? @options[:page] : 2)\n\n buffer\n end", "def get_search_body conditions, sort, per_page, page\n query_body = get_query_body(conditions)\n size = per_page\n from_index = (page.to_i - 1) * size.to_i\n {\n query: query_body,\n from: from_index,\n size: size,\n sort: sort\n }\n end", "def params() request.params end", "def recieve_hash\n\t\t#recvd_json = params[:q_data]\n\t\t#p recvd_json\n\n\tend", "def get(uri, parameters = T.unsafe(nil), referer = T.unsafe(nil), headers = T.unsafe(nil)); end", "def query_param\n ::Genghis::JSON.decode(params.fetch('q', '{}'))\n end", "def query_param\n ::Genghis::JSON.decode(params.fetch('q', '{}'))\n end", "def get(data = {})\n call data, method: :get\n end", "def document\n json = Net::HTTP.get_response URI.parse(query_string(@query))\n json.body\n end", "def body\n @body ||= @request.body.read\n end", "def get_thing(query_hash)\n query = {\"type\" => \"/type/edition\"}.merge(query_hash)\n response = open(@api_url + \"/things?query=\" + CGI.escape(query.to_json) ).read\n JSON.parse(response)\n end", "def do_query(params) \n link = @api_url + \"/brief/json/\" + params\n return MultiJson.load( open(link).read )\n end", "def get(url, params = {})\n client.get(url, params).body\n end", "def get(payload = {})\n payload = with_credentials(payload)\n\n @uri.query = URI.encode_www_form(payload) unless payload.empty?\n\n run Net::HTTP::Get.new(@uri)\n end", "def body\n @params.to_json\n end", "def get_query_params\n {}\n end", "def get query = nil\n\t\tif (query = make_query query)\n\t\t\[email protected] = @uri.query ? @uri.query+\"&\"+query : query\n\t\tend\n\t\tfull_path = @uri.path + (@uri.query ? \"?#{@uri.query}\" : \"\")\n\t\t\t\n\t\treq = Net::HTTP::Get.new(full_path)\n\t\t# puts Net::HTTP::Proxy(@proxy_host, @proxy_port, @proxy_user, @proxy_pwd).get(@uri)\n\t\tdo_http req\n\tend", "def get(params)\n\n request_url = create_url(params)\n begin\n @json_response = HTTParty.get(URI.encode(request_url))\n rescue SocketError\n @response_hash = Hashie::Mash.new({})\n return\n end\n\n @response_hash = Hashie::Mash.new(@json_response.to_hash)\n set_attrs(@response_hash)\n @response_hash\n end", "def calculate_params\n content_type == 'application/json' ? JSON.parse(body) : nil\n end", "def get(uri, options = {})\n build_response(request.get(uri, build_request_options({:input => options.to_params})))\n end", "def params\n self.GET.update(self.POST)\n rescue EOFError => boom\n self.GET\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end", "def to_body\n to_hash\n end" ]
[ "0.63484734", "0.6285159", "0.6257098", "0.6240063", "0.621525", "0.6056217", "0.6055301", "0.6053456", "0.6050894", "0.6046616", "0.60446924", "0.6020386", "0.5988293", "0.598794", "0.5987188", "0.5961442", "0.5961442", "0.5961442", "0.5961442", "0.5959406", "0.5904518", "0.5904216", "0.590315", "0.58942765", "0.58855414", "0.5873664", "0.5864524", "0.58549225", "0.5825386", "0.58050424", "0.5796817", "0.57967377", "0.5781187", "0.57689875", "0.5718249", "0.5711679", "0.57050425", "0.5697862", "0.56931764", "0.56892526", "0.56879103", "0.5662331", "0.5662042", "0.56559384", "0.56427467", "0.56427467", "0.5641551", "0.5640834", "0.5627581", "0.5626143", "0.56229293", "0.5621474", "0.56157756", "0.5606586", "0.55922705", "0.55807346", "0.55797046", "0.5577406", "0.5573045", "0.5573045", "0.557166", "0.557029", "0.55658275", "0.55539346", "0.5550846", "0.5542642", "0.5538679", "0.5538127", "0.55297667", "0.55274606", "0.55252516", "0.5523354", "0.55174947", "0.55122787", "0.5511007", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427", "0.55107427" ]
0.0
-1
Gets all storage instances, instances must be filtered by the specified filter, filter (if set) must contain an Occi::Core::Mixins instance. Returned collection must contain Occi::Infrastructure::Storage instances wrapped in Occi::Core::Resources.
def storage_list(mixins = nil) storages = Occi::Core::Resources.new Backends::Ec2::Helpers::AwsConnectHelper.rescue_aws_service(@logger) do volumes = @ec2_client.describe_volumes.volumes volumes.each do |volume| next unless volume storages << storage_parse_backend_obj(volume) end if volumes end storages end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def all\n storage\n end", "def all_storages\n hosts = all_hosts\n MiqPreloader.preload(hosts, :storages)\n hosts.collect(&:storages).flatten.compact.uniq\n end", "def storages\n IbmCloudRest.get \"#{@uri}/storage\"\n end", "def list(mixins = nil)\n # TODO: impl filtering with mixins\n storage = ::Occi::Core::Resources.new\n backend_storage_pool = ::OpenNebula::ImagePool.new(@client)\n rc = backend_storage_pool.info_all\n check_retval(rc, Backends::Errors::ResourceRetrievalError)\n\n backend_storage_pool.each do |backend_storage|\n storage << parse_backend_obj(backend_storage)\n end\n\n storage\n end", "def all\n containers = service.list_containers\n data = []\n containers.each do |container|\n c = parse_storage_object(container)\n c[:acl] = 'unknown'\n data << c\n end\n load(data)\n end", "def all\n storage.map(&:repository)\n end", "def volumes\n Collection::StorageVolumeCollection.new(self)\n end", "def index\n @storages = Storage.all\n end", "def index\n @storages = Storage.all\n end", "def list(mixins = nil)\n if mixins.blank?\n read_storage_fixtures\n else\n filtered_storages = read_storage_fixtures.to_a.select { |s| (s.mixins & mixins).any? }\n ::Occi::Core::Resources.new filtered_storages\n end\n end", "def list_storage_accounts\n request_path = \"/services/storageservices\"\n request = ManagementHttpRequest.new(:get, request_path, nil)\n response = request.call\n Serialization.storage_services_from_xml(response)\n end", "def ocs_storages_disk_only\n disks = []\n for o in ocs_storages\n if o.TYPE == 'disk'\n disks << o\n end\n end\n disks\n end", "def index\n @storages = Mercadolibre::Storage.all.paginate(page: params[:page], per_page: 7)\n end", "def index\n @storage_types = StorageType.all\n end", "def storages(type: nil)\n response = get(type && \"storage/#{type}\" || \"storage\")\n data = JSON.parse response.body\n data[\"storages\"][\"storage\"]\n end", "def get_storages(request)\n # --- Get User's Images ---\n image_pool = ImagePoolOCCI.new(\n @client,\n POOL_FILTER)\n\n # --- Prepare XML Response ---\n rc = image_pool.info\n if OpenNebula.is_error?(rc)\n return rc, CloudServer::HTTP_ERROR_CODE[rc.errno]\n end\n\n return to_occi_xml(image_pool, :code=>200, :verbose=>request.params['verbose'])\n end", "def retrieve_volumes\n dbg { \"retrieving #{pool_info}, #{hv_info}\" }\n\n volumes = pool.list_all_volumes\n dbg { \"list_all_volumes #{pool_info}, #{hv_info}\" }\n\n storage_volumes = volumes.map.with_index do |vol, index|\n id = \"#{uuid}--#{index}\"\n StorageVolume.new(vol, pool: self, id: id)\n end\n\n dbg { \"retrieved size=#{storage_volumes.size}, #{pool_info}, #{hv_info}\" }\n storage_volumes\n end", "def registered\n return [] if empty?\n return storage.keys if storage.is_a?(Hash)\n return storage.to_a if storage.is_a?(Set)\n\n storage\n end", "def storage\n @storage ||= Set.new\n end", "def storage\n @storage ||= Set.new\n end", "def index\n @storage_units = StorageUnit.all\n end", "def registered\n return [] if storage.empty?\n\n return storage.keys if storage.is_a?(Hash)\n return storage.to_a if storage.is_a?(Set)\n\n storage\n end", "def get_instances\n instances = [ ]\n get_clouds.each do |cloud|\n instances += cloud.instances.index(:filter => [], :view => 'tiny')\n end\n instances\n end", "def registered\n return [] if empty?\n\n storage\n end", "def all(filter=nil)\n data = []\n conn = @ec2_main.environment.connection\n if conn != nil\n begin\n if @ec2_main.settings.openstack\n x = conn.images.all\n x.each do |y|\n data = hash_ops_image(y)\n end\n elsif @ec2_main.settings.softlayer\n x = conn.images.all\n x.each do |y|\n data = hash_softlayer_image(y)\n end\n elsif @ec2_main.settings.google\n data = google_self()\n elsif ((conn.class).to_s).start_with? \"Fog::Compute::AWS\"\n x = conn.images.all(filters)\n x.each do |y|\n data.push(hash_ops_image_aws(y))\n end\n else\n data = conn.describe_images(:filters => filter)\n end\n rescue\n puts \"ERROR: getting all images #{$!}\"\n end\n end\n return data\n end", "def list_all_private_images(filter = {})\n storage_accounts = list_all(filter.merge(:skip_accessors_definition => true))\n get_private_images(storage_accounts)\n end", "def all\n inactive + active\n rescue Exception::LibvirtError\n # If inactive isn't supported, then we just return the active\n # storage pools.\n active\n end", "def all\n raise ArgumentError, \"No type specified for query\" if @type.nil?\n\n @store.load_from_url(uri_builder.resources_uri(@type, to_query))\n end", "def all(filter: nil, page_size: nil, page_token: nil)\n data = service.list_metric_descriptors(\n :filter => filter,\n :page_size => page_size,\n :page_token => page_token\n ).to_h[:metric_descriptors] || []\n\n load(data)\n end", "def all(filters = {})\n # REVISIT: I'm not sure if this is the best way to implement search\n # filters on a collection but it does work. I need to study the AWS\n # code more to make sure this matches up.\n filters['folder'] ||= attributes['folder']\n response = connection.list_virtual_machines(filters)\n load(response['virtual_machines'])\n end", "def list\n storage.transaction {return storage.roots}\n end", "def list_all_volumes\n Docker::Volume.all({}, client)\n rescue\n []\n end", "def storage_names\n @storage_names\n end", "def storage_all_items\n storage_items + storage_equip_items\n end", "def each_instance(filter_resource=nil, &blk)\n filter_resource = case filter_resource \n when Resource\n filter_resource.class\n when String\n Util.build_const(filter_resource)\n when Module, NilClass\n filter_resource\n else\n raise ArgumentError, \"The first argument has to be in form of NilClass, Resource, String or Module: #{filter_resource.class}\"\n end\n\n filter_ids = []\n\n unless filter_resource.nil?\n filter_ids = self.resources.keys.find_all { |resid|\n Resource.find(resid).kind_of?(filter_resource)\n }\n return [] if filter_ids.empty?\n end\n \n ary = self.services.keys.collect {|k| ServiceInstance.find(k) }\n if filter_resource.nil?\n else\n ary = ary.find_all{|v| filter_ids.member?(v.resource.id) }\n end\n\n ary.each {|v| blk.call(v) } if block_given?\n ary\n end", "def get_storage_types\n JSON.parse(RestClient.get(\"https://#{region.sub(/-\\d$/, '')}.power-iaas.cloud.ibm.com/broker/v1/storage-types\", headers))[region]\n end", "def related_storage_volumes\n related_volumes.find_all do |volume|\n volume.provider_name == \"compellent\" || volume.provider_name == \"vnx\"\n end\n end", "def select_storage_devices(component)\n pci_devices = component.try(:pciDevices).try(:select) { |device| storage_device?(device) }\n addin_cards = component.try(:addinCards).try(:select) { |device| storage_device?(device) }\n\n devices = []\n devices.concat(pci_devices) if pci_devices.present?\n devices.concat(addin_cards) if addin_cards.present?\n devices\n end", "def storages\n storage_list = CopyStaging::RestoreWork::ORIGINALS_STORAGE + CopyStaging::RestoreWork::DERIVATIVES_STORAGE\n Hash[ storage_list.collect { |shrine_storage_key| [shrine_storage_key.to_s, shrine_config(shrine_storage_key) ] } ]\n end", "def storage_items\n @storage_items[@storage_name].keys.sort.collect {|id| $data_items[id] }\n end", "def getObjects\n readObjects\n\n return @Objects\n end", "def filters\n if metaclass.method_defined? :_filters\n self._filters\n else\n []\n end\n end", "def list\n ::Taxi::S3.instance.list_buckets\n end", "def storage_only\n select {|d| d.resource_type == 17}\n end", "def executions\n @storage_class.instance\n end", "def versions\n # TODO make this a collection proxy, only loading the first, then the\n # rest as needed during iteration (possibly in chunks)\n return nil if @archived\n @versions ||= [self].concat(CloudKit.storage_adapter.query { |q|\n q.add_condition('resource_reference', :eql, @resource_reference)\n q.add_condition('archived', :eql, 'true')\n }.reverse.map { |hash| self.class.build_from_hash(hash) })\n end", "def index\n @taskstorages = current_user.taskstorages.all\n end", "def index\n @selected_location = filter_params[:at_location]\n @adjustments = current_organization.adjustments.class_filter(filter_params)\n\n @storage_locations = Adjustment.storage_locations_adjusted_for(current_organization).uniq\n end", "def list(filter=KalturaNotImplemented, pager=KalturaNotImplemented)\n\t\t\tkparams = {}\n\t\t\tclient.add_param(kparams, 'filter', filter);\n\t\t\tclient.add_param(kparams, 'pager', pager);\n\t\t\tclient.queue_service_action_call('flavorasset', 'list', kparams);\n\t\t\tif (client.is_multirequest)\n\t\t\t\treturn nil;\n\t\t\tend\n\t\t\treturn client.do_queue();\n\t\tend", "def objects\n bucket_obj = Aws::S3::Bucket.new(name: bucket, client: s3_client)\n bucket_obj.objects( prefix: prefix)\n end", "def find_all(options = {})\n proxy_owner.send(:list_bucket, options)\n end", "def storages\n reflective_auto_load_adapter_extension\n storages # call the overrided method\n end", "def get_volumes\n volumes = get(\"cloud-instances/#{guid}/volumes\")[\"volumes\"] || []\n\n volumes.map do |volume|\n get_volume(volume[\"volumeID\"])\n end\n end", "def all\n return @all if @all\n\n @all = []\n sets = {}\n\n @fog.tags.all(:key => \"Host\", :value => @hostname).each do |tag|\n next unless tag.resource_type == \"snapshot\"\n next unless snap = @fog.snapshots.get(tag.resource_id)\n next unless snap.tags[\"Mount\"] == @mount\n\n set_id = snap.tags[\"SetID\"] || generate_set_id\n\n set = sets[set_id] ? sets[set_id] : {:ids => [], :created_at => snap.created_at, :types => []}\n\n set[:ids] << snap.id\n set[:created_at] = snap.created_at if snap.created_at < set[:created_at]\n set[:types] |= snap.tags[\"Type\"].split(\",\") if snap.tags.has_key?(\"Type\")\n\n sets[set_id] = set\n end\n\n @all = sets.values\n @all.sort!{|a, b| a[:created_at] <=> b[:created_at]}\n\n @all\n end", "def volumes\n @volumes.to_a.freeze\n end", "def volumes\n service.list_pool_volumes uuid\n end", "def all_objects(opts = {})\n objects(\"*\", opts)\n end", "def instance_filters\n @instance_filters ||= []\n end", "def index \n @storage_locations = StorageLocation.paginate(page: params[:page])\n end", "def list(\n filter,\n *args,\n deadline: nil\n )\n return @secret_stores.list(\n filter,\n *args,\n deadline: deadline,\n )\n end", "def resources\n resources = []\n\n default_storage = {\n 'swift_zone' => 100,\n 'object_port'=>6000,\n 'container_port'=>6001,\n 'account_port'=>6002,\n 'mountpoints'=> \"1 1\\n2 1\",\n 'weight'=> 100,\n 'types'=>['container', 'object', 'account'],\n }\n\n self[:storages].each do |storage|\n merged_storage = default_storage.merge(storage)\n merged_storage['types'].collect do |type|\n port = merged_storage[\"#{type}_port\"]\n options = {\n :name=>\"#{merged_storage['storage_address']}:#{port}\",\n :mountpoints=>merged_storage['mountpoints'],\n :zone => merged_storage['swift_zone']\n }\n resources += [Puppet::Type.type(\"ring_#{type}_device\".to_sym).new(options)]\n end\n end\n resources\n end", "def list(filter=KalturaNotImplemented)\n\t\t\tkparams = {}\n\t\t\tclient.add_param(kparams, 'filter', filter)\n\t\t\tclient.queue_service_action_call('meta', 'list', 'KalturaMetaListResponse', kparams)\n\t\t\tif (client.is_multirequest)\n\t\t\t\treturn nil\n\t\t\tend\n\t\t\treturn client.do_queue()\n\t\tend", "def get_extensions\n read_extensions 'storage', @options.model_extensions_dir\n end", "def items\n Taco.storage.items\n end", "def object_instances\n return [] if object_definition.blank?\n \n latest_version = ::AP::VoiceExtension::Voice::Config.instance.latest_version\n klazz = \"::#{latest_version.upcase}::#{object_definition.name}\".constantize\n query_scope = object_definition.query_scope\n objects = klazz.respond_to?(query_scope.to_sym) ? klazz.send(query_scope.to_sym) : []\n if objects.blank?\n return []\n else\n return objects\n end\n end", "def all\n @filters\n end", "def all\n # Requires authorization\n raise PutioError::AuthorizationRequired if authentication_required!\n\n files(-1)\n end", "def objects(container = @default_container,\n path: nil, limit: nil, gt: nil, lt: nil)\n path = path[1..-1] if path && path[0] == ?/\n p = { path: path, limit: limit, marker: gt, end_marker: lt\n }.delete_if {|k,v| v.nil? }\n j,h = api_openstack(:get, container, p)\n Hash[j.map {|o| [ o['name'], {\n :hash => o['hash'],\n :lastmod => Time.parse(o['last_modified']),\n :size => o['bytes'].to_i,\n :type => o['content_type'],\n :contuse => h['x-container-bytes-used'],\n :contoct => h['x-container-object-coun'],\n :storpol => h['x-storage-policy'],\n } ] } ]\n end", "def get_storage_names\n @schemas = self.get_schema\n @schemas.map { |schema| schema['id'] }\n end", "def pull_all\n storage = Array.new\n @catchpoint.get(\"tests\")[\"items\"].each do |test|\n test = Dish(test)\n storage << Dish(test)\n end\n return storage\n end", "def compute_list(mixins = nil)\n mixins = deep_clone(mixins) if mixins\n @backend_instance.compute_list(mixins) || Occi::Core::Resources.new\n end", "def objects(params = {})\n Proxy.new(lambda { list_bucket(params) }, :owner => self, :extend => ObjectsExtension)\n end", "def all_objects\n Registry.all(:root, :module, :class)\n end", "def all\n _register_class_observer\n if _class_fetch_states.has_key?(:all) && 'fi'.include?(_class_fetch_states[:all]) # if f_etched or i_n progress of fetching\n collection = HyperRecord::Collection.new\n _record_cache.each_value { |record| collection.push(record) }\n return collection\n end\n promise_all\n HyperRecord::Collection.new\n end", "def list(filter)\n\t\t\tkparams = {}\n\t\t\tclient.add_param(kparams, 'filter', filter)\n\t\t\tclient.queue_service_action_call('region', 'list', 'KalturaRegionListResponse', kparams)\n\t\t\tif (client.is_multirequest)\n\t\t\t\treturn nil\n\t\t\tend\n\t\t\treturn client.do_queue()\n\t\tend", "def list(filter=KalturaNotImplemented, pager=KalturaNotImplemented)\n\t\t\tkparams = {}\n\t\t\t# Mix entry filter\n\t\t\tclient.add_param(kparams, 'filter', filter);\n\t\t\t# Pager\n\t\t\tclient.add_param(kparams, 'pager', pager);\n\t\t\tclient.queue_service_action_call('mixing', 'list', kparams);\n\t\t\tif (client.is_multirequest)\n\t\t\t\treturn nil;\n\t\t\tend\n\t\t\treturn client.do_queue();\n\t\tend", "def all\n folder.data_objects.all(parameters).collect do |data_object|\n model.new(data_object)\n end\n end", "def collection\n if params[:deleted] == 'on'\n base_variant_scope ||= super.with_deleted\n else\n base_variant_scope ||= super\n end\n\n search = Spree::Config.variant_search_class.new(params[:variant_search_term], scope: base_variant_scope)\n @collection = search.results.includes(variant_includes).page(params[:page]).per(Spree::Config[:admin_variants_per_page])\n\n @collection = @collection.where(user_id: spree_current_user.try(:id) ) if spree_current_user && !spree_current_user.admin?\n @collection\n end", "def compute_list(mixins = nil)\n # TODO: impl filtering with mixins\n compute = Occi::Core::Resources.new\n backend_compute_pool = ::OpenNebula::VirtualMachinePool.new(@client)\n rc = backend_compute_pool.info_all\n check_retval(rc, Backends::Errors::ResourceRetrievalError)\n\n backend_compute_pool.each do |backend_compute|\n compute << compute_parse_backend_obj(backend_compute)\n end\n\n compute\n end", "def all(filters = {})\n f = {\n datacenter: datacenter,\n cluster: cluster,\n network: network,\n resource_pool: resource_pool,\n folder: folder,\n recursive: recursive\n }.merge(filters)\n\n load service.list_virtual_machines(f)\n end", "def all(filters = {})\n servers = service.list_servers(filters)\n @kind = servers.kind\n @resource_version = servers.resource_version\n load servers\n end", "def list_volumes\n response = @connection.req(\"GET\", \"/#{@volume_path}\")\n volumes_hash = JSON.parse(response.body)[\"volumes\"]\n volumes_hash.inject([]){|res, current| res << OpenStack::Volume::Volume.new(current); res}\n end", "def objects\n @objects ||= []\n end", "def storage\n @plugins.detect { |p| p.is_a? BaseStorage }\n end", "def snapshots\n Fission::Action::Snapshot::Lister.new(self).snapshots\n end", "def storage_servers\n self.client.get('gh.storage.servers').split(',')\n end", "def objects\n return @objects unless @objects.nil?\n objs = []\n dict = @instance.getDictionary\n (0 ... dict.getChildCount).each do |i|\n obj = dict.getChildAt(i)\n objs << {\n :name => obj.getName,\n :qual => obj.getQualification.toString.downcase.to_sym,\n :type => obj.getType.toString.downcase.to_sym,\n :object => obj,\n }\n end\n @objects = objs\n end", "def spaces\n assets = Asset.unscoped.\n editable_by(@context).where.not(scope: [SCOPE_PUBLIC, SCOPE_PRIVATE]).\n eager_load(user: :org).\n includes(:taggings).\n search_by_tags(params.dig(:filters, :tags))\n\n assets = FileService::FilesFilter.call(assets, params[:filters]).to_a\n\n if show_count\n render plain: assets.count\n else\n assets = sort_array_by_fields(assets, \"created_at\")\n page_meta = pagination_meta(assets.count)\n assets = paginate_array(assets)\n\n render json: assets, meta: page_meta, root: \"assets\", adapter: :json\n end\n end", "def list(\n filter,\n *args,\n deadline: nil\n )\n req = V1::SecretStoreListRequest.new()\n req.meta = V1::ListRequestMetadata.new()\n page_size_option = @parent._test_options[\"PageSize\"]\n if page_size_option.is_a? Integer\n req.meta.limit = page_size_option\n end\n if not @parent.snapshot_time.nil?\n req.meta.snapshot_at = @parent.snapshot_time\n end\n\n req.filter = Plumbing::quote_filter_args(filter, *args)\n resp = Enumerator::Generator.new { |g|\n tries = 0\n loop do\n begin\n plumbing_response = @stub.list(req, metadata: @parent.get_metadata(\"SecretStores.List\", req), deadline: deadline)\n rescue => exception\n if (@parent.shouldRetry(tries, exception))\n tries + [email protected](tries)\n next\n end\n raise Plumbing::convert_error_to_porcelain(exception)\n end\n tries = 0\n plumbing_response.secret_stores.each do |plumbing_item|\n g.yield Plumbing::convert_secret_store_to_porcelain(plumbing_item)\n end\n break if plumbing_response.meta.next_cursor == \"\"\n req.meta.cursor = plumbing_response.meta.next_cursor\n end\n }\n resp\n end", "def get_extensions\n collection = Occi::Collection.new\n BACKEND_TYPES.each { |backend_type| collection.merge! backend_instances[backend_type].get_extensions }\n collection\n end", "def all\n @collection ||= Collection.new model_name\n end", "def images\n return @canonical_image_pool if @canonical_image_pool\n Egi::Fedcloud::Vmhound::Log.debug \"[#{self.class}] Retrieving all images\"\n check_retval @image_pool.info_all!\n\n @canonical_image_pool = []\n @image_pool.each { |image| @canonical_image_pool << canonical_image(image) }\n @canonical_image_pool\n end", "def volumes\n volumes = @ec2.volumes\n volumes.delete_if {|v| v.instance_id != id}\n end", "def all\n PaginatedResource.new(self)\n end", "def all\n PaginatedResource.new(self)\n end", "def all\n PaginatedResource.new(self)\n end", "def all\n PaginatedResource.new(self)\n end", "def all\n PaginatedResource.new(self)\n end", "def all\n PaginatedResource.new(self)\n end", "def all\n PaginatedResource.new(self)\n end" ]
[ "0.6733361", "0.646533", "0.631976", "0.6300889", "0.62990654", "0.62872094", "0.6203506", "0.6121806", "0.6121806", "0.5918572", "0.5808153", "0.57654643", "0.57487977", "0.57340986", "0.5683546", "0.5622185", "0.5611591", "0.5582446", "0.55671453", "0.55671453", "0.553334", "0.550442", "0.5489807", "0.5484613", "0.54465806", "0.54391444", "0.54205257", "0.54065055", "0.53993976", "0.52638245", "0.52615833", "0.52580607", "0.5255962", "0.52075887", "0.51866114", "0.5149598", "0.51482356", "0.5140801", "0.51349604", "0.5130952", "0.5111394", "0.5106271", "0.5100119", "0.5093497", "0.5086974", "0.5075147", "0.506876", "0.50624853", "0.5047536", "0.5040068", "0.50397706", "0.50386953", "0.50184906", "0.50077224", "0.5003306", "0.4992732", "0.4986023", "0.49802926", "0.49523208", "0.49430537", "0.49400893", "0.49396315", "0.49377492", "0.49311775", "0.4929656", "0.49168432", "0.49154413", "0.48985633", "0.48970094", "0.48897046", "0.48862633", "0.48843727", "0.4880377", "0.4876639", "0.48732823", "0.48680416", "0.48560756", "0.48518866", "0.48506397", "0.4850034", "0.48454636", "0.48333797", "0.4818039", "0.48128262", "0.4812032", "0.47886968", "0.47884488", "0.47758815", "0.4769643", "0.47664163", "0.4762771", "0.47619498", "0.47553095", "0.47545058", "0.47545058", "0.47545058", "0.47545058", "0.47545058", "0.47545058", "0.47545058" ]
0.68213433
0
Gets a specific storage instance as Occi::Infrastructure::Storage. ID given as an argument must match the occi.core.id attribute inside the returned Occi::Infrastructure::Storage instance, however it is possible to implement internal mapping to a platformspecific identifier.
def storage_get(storage_id) filters = [] filters << { name: 'volume-id', values: [storage_id] } Backends::Ec2::Helpers::AwsConnectHelper.rescue_aws_service(@logger) do volumes = @ec2_client.describe_volumes(filters: filters).volumes volume = volumes ? volumes.first : nil return nil unless volume storage_parse_backend_obj(volume) end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def find(options = {})\n raise \"Unable to locate the storage named '#{options[:name]}'\" unless options[:id]\n response = Profitbricks.request :get_storage, \"<storageId>#{options[:id]}</storageId>\"\n Profitbricks::Storage.new(response.to_hash[:get_storage_response][:return])\n end", "def set_storage\n @storage = Storage.find(params[:id])\n end", "def set_storage\n @storage = Storage.find(params[:id])\n end", "def set_storage\n @storage = Storage.find(params[:id])\n end", "def set_storage\n @storage = Storage.find(params[:id])\n end", "def set_storage\n @storage = Storage.find(params[:id])\n end", "def get(storage_id)\n found = read_storage_fixtures.to_a.select { |s| s.id == storage_id }.first\n fail Backends::Errors::ResourceNotFoundError, \"Instance with ID #{storage_id} does not exist!\" unless found\n\n found\n end", "def get(storage_id)\n image = ::OpenNebula::Image.new(::OpenNebula::Image.build_xml(storage_id), @client)\n rc = image.info\n check_retval(rc, Backends::Errors::ResourceRetrievalError)\n\n parse_backend_obj(image)\n end", "def primary_storage_adapter\n @storage_adapters.first\n end", "def get_storage_class\n return @m_storage_class\n end", "def service\n softlayer_client[:Network_Storage].object_with_id(self.id)\n end", "def set_storage_type\n @storage_type = StorageType.find(params[:id])\n end", "def storage \n Trole::Storage::BaseOne\n end", "def retrieve(id)\n raise NotFound.new(\"id '#{id}'\") unless ids.include?(id)\n\n storage[id]\n end", "def storage_volume(storage_volume_id)\n from_resource :storage_volume,\n connection.get(api_uri(\"storage_volumes/#{storage_volume_id}\"))\n end", "def lookup_storage(key)\n { memory: Storage::Memory, redis: Storage::Redis }[key]\n end", "def store_with(storage, storage_id = nil, &block)\n @storages << Backup::Storage.const_get(\n last_constant(storage)\n ).new(storage_id, &block)\n end", "def storage_instance; end", "def find_by(id:)\n storage_adapters.values.find do |storage_adapter|\n storage_adapter.handles?(id: id)\n end.find_by(id: id)\n end", "def storage_class\n @storage_class || storage_loader.storage_class\n end", "def storage_type\n data[:storage_type]\n end", "def storage_type\n data[:storage_type]\n end", "def storage(domain)\n host = vhost(domain)\n host ? host.storage : @null\n end", "def current_storage\n @attributes[:current_storage]\n end", "def find_storage(name)\n storages.each { |key, value| return value if key.to_s == name.to_s }\n raise Error, \"storage #{name.inspect} isn't registered on #{self}\"\n end", "def storage\n require 'yaml/store'\n Storage[object_id] ||= YAML::Store.new(STORAGE_FILE)\n end", "def storage\n @storage ||= Fog::Storage.new(storage_config)\n end", "def storage_type\n @dbi.storage_type\n end", "def storage_type\n @dbi.storage_type\n end", "def show\n @storage = @client.storages.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @storage }\n end\n end", "def get_from_storage(id)\n\t\traise \"[FATAL] Storage directory not set\" if Repository.data_dir.nil?\n\n\t\t# Aquire raw JSON\n\t\traw = aquire_raw(id)\n\n\t\t# Escape if object not found\n\t\treturn nil if raw.nil?\n\n\t\t# Create object\n\t\tobj = JSON::parse(raw)\n\n\t\t# Grab needed objects, args => self\n\t\tobj.cache_collect\n\n\t\t# return\n\t\treturn obj\n\tend", "def storage_class\n raise NotImplementedError\n end", "def set_item_storage\n @item_storage = ItemStorage.find(params[:id])\n end", "def get_storage_adapter\n @storage_adapter.new(storage_adapter_config)\n end", "def storage\n @plugins.detect { |p| p.is_a? BaseStorage }\n end", "def set_storage_unit\n @storage_unit = StorageUnit.find(params[:id])\n end", "def storages(type: nil)\n response = get(type && \"storage/#{type}\" || \"storage\")\n data = JSON.parse response.body\n data[\"storages\"][\"storage\"]\n end", "def fetch identifier\n @storage[identifier]\n end", "def storage_name\n @storage_name || name\n end", "def store_with(name, storage_id = nil, &block)\n @storages << get_class_from_scope(Storage, name)\n .new(self, storage_id, &block)\n end", "def storage_location_id\n value = field_value('storage_location_id', find_by: :code)\n if value.nil?\n return self.storage_location_id = self.generate_uuid # make sure persistent_id is set & use that\n else\n return value\n end\n end", "def get_storage(request, params)\n # --- Get the Image ---\n image = ImageOCCI.new(\n Image.build_xml(params[:id]),\n @client)\n\n rc = image.info\n if OpenNebula.is_error?(rc)\n return rc, CloudServer::HTTP_ERROR_CODE[rc.errno]\n end\n\n # --- Prepare XML Response ---\n return to_occi_xml(image, :code=>200)\n end", "def storage_adapter\n @storage_adapter ||= begin\n all_adapters = storage_adapters\n raise UndefinedSetting, \"No storage adapters configured\" if all_adapters.empty?\n adapter = all_adapters.detect do |potential_adapter|\n potential_adapter.use?(storage)\n end\n raise UndefinedSetting, \"Storage is missing or not supported. Supported: #{all_adapters.map(&:display_name).join(', ')}\" unless adapter\n adapter\n end\n end", "def load_storage_system(item)\n raise \"Unspecified property: 'storage_system'. Please set it before attempting this action.\" unless storage_system\n storage_system_resource = OneviewSDK::StorageSystem.new(item.client, credentials: { ip_hostname: storage_system })\n unless storage_system_resource.exists?\n storage_system_resource = OneviewSDK::StorageSystem.new(item.client, name: storage_system)\n end\n item.set_storage_system(storage_system_resource)\n end", "def storage\n ManagedImage.variants_storage\n end", "def find identifier\n @storage[identifier]\n end", "def storage_management(options = {})\n Azure::StorageManagement::StorageManagementService.new(default_client(options))\n end", "def storage_type\n @grpc.default_storage_type\n end", "def storage\n Boom.storage\n end", "def identifier\n @identifier || (file && storage.try(:identifier))\n end", "def current\n fail NoInterfacesDefined if storage.empty?\n\n storage.first\n end", "def storage() @storage; end", "def storage\n @storage ||= in_memory\n end", "def storage\n @storage ||= in_memory\n end", "def storage\n @storage ||= in_memory\n end", "def storage\n @storage ||= in_memory\n end", "def storage\n @storage ||= in_memory\n end", "def get_workspace_bucket(workspace_bucket_id)\n self.storage.bucket workspace_bucket_id\n end", "def retrieve!(identifier)\n HesCloudStorage::HesCloudStorageEngine::File.new(uploader, self, ::File.basename(uploader.store_path(identifier), uploader.root))\n end", "def storage(arg=nil)\n set_or_return(:storage, arg, :kind_of => String)\n end", "def find_by_uuid(uuid)\n nil_or_object(FFI::Libvirt.virStoragePoolLookupByUUIDString(interface, uuid), StoragePool)\n end", "def storages\n IbmCloudRest.get \"#{@uri}/storage\"\n end", "def [] goid\n if @ghash[goid]\n return @ghash[goid]\n elsif @all_goids.include? goid\n log \"Loading #{goid} from storage\" , Logger::Ultimate\n begin\n obj = @storage.load_object(goid, self)\n rescue MUDError::NoSuchGOID\n log \"Tried to load #{goid}, but it must have been deleted.\"\n return nil\n end\n return obj\n else\n return nil\n end\n end", "def storage\n uploader.storage\n end", "def delete\n storage.delete(id)\n end", "def storage_fetch(repository, oid)\n unless @storage[repository]\n return false\n end\n if @storage[repository]\n obj = @storage[repository].select { |x| x[oid] }\n if obj\n return obj.first\n end\n else\n return false\n end\n return false\n end", "def show_storage\n\t\t\treturn @player_data_db.execute( \"select * from storage\" )\n\t\tend", "def find_storage_object_version(druid, version_id=nil)\n storage_object = find_storage_object(druid)\n storage_object.find_object_version(version_id)\n end", "def storage(domain)\n @config.storage(domain)\n end", "def statistics_by_primary_storage(opts = {})\n data, _status_code, _headers = statistics_by_primary_storage_with_http_info(opts)\n data\n end", "def find_by(id:)\n return unless handles?(id: id)\n Valkyrie::StorageAdapter::File.new(id: Valkyrie::ID.new(id.to_s), io: ::File.open(file_path(id), 'rb'))\n end", "def storage_location\n return @storage_location\n end", "def root_storage\n @attributes[:root_storage]\n end", "def simpledb_type\n model.storage_name(DataMapper.repository.name)\n end", "def get_storage_types\n JSON.parse(RestClient.get(\"https://#{region.sub(/-\\d$/, '')}.power-iaas.cloud.ibm.com/broker/v1/storage-types\", headers))[region]\n end", "def driver\n storage_hash = get_api_node(['storage-pools', resource[:name]])\n driver = storage_hash['driver']\n return driver\n end", "def set_taskstorage\n @taskstorage = Taskstorage.find(params[:id])\n end", "def get_storage_module(config)\n return StorageMongo.new(config)\nend", "def storage(domain=nil)\n domain ? @config.vhosts[domain] : @storage\n end", "def find(short_name)\n storage_adapters[short_name]\n end", "def storage\n @_storage ||= in_memory\n end", "def storage=(storage = nil)\n if storage.nil?\n # set this to nil so that the DefaultStorageClient can take over.\n @storage = nil\n else\n @storage = ::Cache.wrap storage\n end\n end", "def get_proper_storage_for_copy(size, exclude = [])\n FC::Storage.select_proper_storage_for_create(get_copy_storages, size, exclude)\n end", "def get_proper_storage_for_copy(size, exclude = [])\n FC::Storage.select_proper_storage_for_create(get_copy_storages, size, exclude)\n end", "def delete_storage(storage_uuid)\n response = delete \"storage/#{storage_uuid}\"\n\n response\n end", "def delete_storage(storage_uuid)\n response = delete \"storage/#{storage_uuid}\"\n\n response\n end", "def update\n @storage = @client.storages.find(params[:id])\n\n respond_to do |format|\n if @storage.update_attributes(params[:storage])\n format.html { redirect_to client_path(@client), notice: 'Storage was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @storage.errors, status: :unprocessable_entity }\n end\n end\n end", "def store\n @store ||= storage.new self\n end", "def simpledb_type(model)\n model.storage_name(model.repository.name)\n end", "def get key\n storage[key]\n end", "def issuer\n self.storage_issuer\n end", "def store identifier, object\n @storage.shift if @storage.size >= @size_limit\n @storage[identifier] = object\n end", "def storage_item_container(item_class)\n return @storage_items[@storage_name] if item_class == RPG::Item\n return @storage_weapons[@storage_name] if item_class == RPG::Weapon\n return @storage_armors[@storage_name] if item_class == RPG::Armor\n return nil\n end", "def sid\n\n Ruote::FlowExpressionId.to_storage_id(h.fei)\n end", "def getStructure(db, sid)\n\tstruct = StructureAbstract.new(db, sid)\n\ttype = struct.type\n\t\n\tif type == 'storage'\n\t\treturn StructureStorageRig.new(db, sid)\n\tend\n\t\n\tstruct\nend", "def storage_items\n @storage_items[@storage_name].keys.sort.collect {|id| $data_items[id] }\n end", "def get(id)\n Maglev::PERSISTENT_ROOT[self][id.to_i]\n end", "def storage_adapter\n Valkyrie::StorageAdapter.find(:disk_via_copy)\n end", "def get_real_storage\n\n @application_context[:s_expression_storage__1]\n end", "def storage_hash\n JSON.parse(@bot.storage.reload.content || '{}')\n end" ]
[ "0.70809156", "0.69654936", "0.69528383", "0.69528383", "0.69528383", "0.69528383", "0.68170005", "0.67749244", "0.6626385", "0.6621228", "0.65538925", "0.6470883", "0.6440725", "0.64258957", "0.6402922", "0.6314419", "0.62876207", "0.6283339", "0.6281086", "0.62758887", "0.62422925", "0.62422925", "0.621832", "0.6210395", "0.6206617", "0.6172708", "0.61711246", "0.61691993", "0.61691993", "0.6125448", "0.6121824", "0.6105321", "0.60911006", "0.60775286", "0.6073673", "0.6037487", "0.6033779", "0.6011585", "0.5924281", "0.59066963", "0.5880381", "0.5872208", "0.57925105", "0.5787119", "0.57848144", "0.57369065", "0.572876", "0.5727751", "0.5723106", "0.57136214", "0.5709735", "0.57073617", "0.5683595", "0.5683595", "0.5683595", "0.5659888", "0.5659888", "0.5654602", "0.56515837", "0.56473976", "0.56462723", "0.56411797", "0.5629935", "0.56282705", "0.56114846", "0.5601423", "0.55916667", "0.5588658", "0.55562437", "0.5544418", "0.5534034", "0.55297005", "0.5501973", "0.5497707", "0.54851186", "0.54721177", "0.5469465", "0.54674244", "0.54415524", "0.5439023", "0.54193294", "0.54152316", "0.53964263", "0.53964263", "0.53690726", "0.53690726", "0.5355927", "0.53465515", "0.5338833", "0.5335281", "0.5333089", "0.53255737", "0.5325052", "0.53053975", "0.5290307", "0.5288072", "0.5283112", "0.52735937", "0.52671766", "0.52568454" ]
0.7173718
0
Sets the site's url
def site_url if Rails.env.production? # Place your production URL in the quotes below "http://www.ezonline.com/" else # Our dev & test URL "http://ezonline-dev.com:3000" end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_site_url\n @site_url = SiteUrl.find(params[:id]) if params[:id]\n end", "def set_site\n @site = Site.find_by(slug: params[:site_slug])\n\n # We just want a valid URL for the site\n @url = @site.routes.first.host_with_scheme if @site.routes.any?\n end", "def set_url\n @url = DEFAULT_URL\n end", "def set_site\n @site = Site.find_by_slug!(params[:site_slug])\n\n if (@site.routes.any?)\n # We just want a valid URL for the site\n @url = @site.routes.first.host_with_scheme\n end\n end", "def setURL(url)\r\n\t\t\t\t\t@url = url\r\n\t\t\t\tend", "def setURL(url)\r\n\t\t\t\t\t@url = url\r\n\t\t\t\tend", "def setURL(url)\r\n\t\t\t\t\t@url = url\r\n\t\t\t\tend", "def setURL(url)\r\n\t\t\t\t\t@url = url\r\n\t\t\t\tend", "def setURL(url)\r\n\t\t\t\t\t@url = url\r\n\t\t\t\tend", "def set_site\n @site = Site.find_by({slug: params[:site_slug]})\n\n if (@site.routes.any?)\n # We just want a valid URL for the site\n @url = @site.routes.first.host_with_scheme\n end\n end", "def set_site\n @site = Site.find_by({slug: params[:site_slug]})\n\n if (@site.routes.any?)\n # We just want a valid URL for the site\n @url = @site.routes.first.host\n end\n end", "def fill_url(site, page_or_post)\r\n url = 'https:' + site.config['url'] + site.config['baseurl'] + page_or_post.location_on_server(url)\r\n \r\n url\r\n end", "def site(url = nil)\n @site = url if url\n resource ? (@site || resource.base_url) : @site\n end", "def set_website\n uri = URI.parse(url)\n if website = Website.where(url: uri.host).first\n self.website = website\n else\n website = Website.create(url: uri.host)\n self.website = website\n end\n end", "def set_url\n url 'set'\n end", "def url\n Config.site.url.chomp('/') + self.path\n end", "def web_url=(value)\n @web_url = value\n end", "def web_url=(value)\n @web_url = value\n end", "def web_url=(value)\n @web_url = value\n end", "def website=(site)\n if site =~ /^http:\\/\\/|^https:\\/\\//i\n self[:website] = site\n elsif site.present?\n self[:website] = \"http://#{site}\"\n end\n end", "def set_url_host \n @url_host = request.protocol() + request.host_with_port() + '/' \n end", "def url(url)\n @url = url\n end", "def site_url=(site_url)\n if site_url.nil?\n fail ArgumentError, \"site_url cannot be nil\"\n end\n\n if site_url.to_s.length > 100\n fail ArgumentError, \"invalid value for 'site_url', the character length must be smaller than or equal to 100.\"\n end\n\n @site_url = site_url\n end", "def site_url\n SITE_URL\n end", "def site_url\n raise StandardError.new \"define method site_url\"\n end", "def set_url\n @url = Url.find_by_admin_slug(params[:slug])\n end", "def setserverurl(value)\r\n setvalue(@@SERVER_URL, value)\r\n end", "def url=(url)\n @@url = url\n end", "def set_url_host_model\n if @url.present?\n @url.url_host = @url_host\n end\n end", "def url= new_url\n new_url = self.class.standardized_url new_url\n return if new_url == url\n super new_url # Heading for trouble if url wasn't unique\n @indexing_url = nil # Clear the memoized indexing_url\n self.http_status = nil # Reset the http_status\n # We do NOT build the associated site here, because we may be BUILDING the page_ref for a site, in\n # which case that site will assign itself to us. Instead, the site attribute is memoized, and if it\n # hasn't been built by the time that it is accessed, THEN we find or build an appropriate site\n self.site = SiteServices.find_or_build_for self\n self.kind = :site if site&.page_ref == self # Site may have failed to build\n # We trigger the site-adoption process if the existing site doesn't serve the new url\n # self.site = nil if site&.persisted? && (SiteServices.find_for(url) != site) # Gonna have to find another site\n request_attributes :url # Trigger gleaning and mercury_result to validate/modify url\n attrib_ready! :url # Has been requested but is currently ready\n end", "def site_url\n get_url(:site)\n end", "def website=(url)\n ### have to strip the url, in case there is whitespace\n url.strip!\n\n ### this will catch any malformed uris\n if url.length > 0\n url = URI.parse( url =~ /^https?/i ? url : 'http://' + url ).to_s\n end\n\n write_attribute( :website, url )\n end", "def url=(url)\n @@url = url\n end", "def set_base_url\n uri = URI.parse(@page_url)\n @base_url = \"#{uri.scheme}://#{uri.host}\"\n end", "def set_url\n @url = Url.get_from_short(params[:id])\n end", "def url=(u)\n u = u.try(:strip)\n u = \"http://#{u}\" if u.present? and u !~ /^\\w+:\\/\\//\n if self.page\n self.page.url = u\n else\n @url = u\n end\n end", "def url(value)\n @url = value\n end", "def set_url\n @url = Url.find(params[:id])\n end", "def set_url\n @url = Url.find(params[:id])\n end", "def set_url\n @url = Url.find(params[:id])\n end", "def set_url\n @url = Url.find(params[:id])\n end", "def set_url\n @url = Url.find(params[:id])\n end", "def set_url\n @url = Url.find(params[:id])\n end", "def set_url\n @url = Url.find(params[:id])\n end", "def set_url\n @url = Url.find(params[:id])\n end", "def set_url\n @url = Url.find(params[:id])\n end", "def set_url\n @url = Url.find(params[:id])\n end", "def set_url\n @url = Url.find(params[:id])\n end", "def set_url\n @url = Url.find(params[:id])\n end", "def set_url(url_value)\n @path = url_value.delete_prefix \"/\"\n @dir = File.dirname(@path)\n @url = url_value\n end", "def home_page_url=(value)\n @home_page_url = value\n end", "def url=(value)\n @url = value\n Curl.set_option(:url, value_for(value, :string), handle)\n end", "def url=(url)\n if url =~ /^http(s?)\\:\\/\\//i\n @url = url\n write_settings\n else\n $stderr.puts \"Please include a leading http://\"\n end\n end", "def set_webpage!\n self.webpage = 'http://www.' unless self.webpage\n end", "def hub_site_urls=(value)\n @hub_site_urls = value\n end", "def set_url(opts)\n opts = check_params(opts,[:urls])\n super(opts)\n end", "def set_base_url(url)\n base_url = url\n end", "def url=(_); end", "def website_url; website end", "def url=(new_url)\n @url = sanitize_url(new_url)\n end", "def set_url\n @url = Url.find_by(key: params[:key])\n end", "def set_url\n @url = Url.find_by(short: params[:short])\n end", "def set_git_urls\n self.url = GitHosting.repository_path(self) if url.blank?\n self.root_url = url if root_url.blank?\n end", "def set_site_domain\n begin\n @enable_games = false\n @enable_topics = false\n @enable_wiki = false\n @site_domain = 'lefiores.com'\n @facebook_url = 'https://www.facebook.com/www.gamerz.wiki'\n @twitter_url = 'https://twitter.com/gamerzwiki'\n @google_plus_url = 'https://plus.google.com/116093313158973278682/about'\n\n if Rails.env.production? \n @site_domain = 'lefiores.com' \n \n else\n @site_domain = 'localhost:3000' \n end\n rescue \n @site_domain = 'lefiores.com' \n end\n end", "def set_url\n self.update_column(:url, '/' + self.ancestry_url)\n end", "def set_site\n @site = Xmt::Press::Site.find(params[:id])\n end", "def add_http_to_url\n if !self.website_url.blank?\n unless self.website_url[/\\Ahttp:\\/\\//] || self.website_url[/\\Ahttps:\\/\\//]\n self.website_url = \"http://#{self.website_url}\"\n end\n end\n end", "def set_git_urls\n self.url = GitHosting.repository_path(self) if self.url.blank?\n self.root_url = self.url if self.root_url.blank?\n end", "def set_site\n @site = Site.find_by(id: params[:id])\n #for preview/themes/set_theme\n @site ||= Site.find_by(id: params[:site_id])\n not_found if @site.nil?\n end", "def set_url\n @url = Url.find_by_short_url(params[:id])\n end", "def site=(value)\n @connection = nil\n @site = value\n end", "def url_server\n\t\t\tunless @links_to_visit.empty?\n\t\t\t\t@url = @links_to_visit.pop\n\t\t\tend\n\t\tend", "def url=(value)\n value = \"http://#{value}\" unless value.empty? || value =~ %r{^http://} || value =~ %r{^https://}\n write_local(:url, value)\n end", "def app_url=(value)\n @app_url = value\n end", "def site=(site)\n @connection = nil\n @site = create_site_uri_from(site)\n end", "def website_url=(new_url)\n write_attribute(:website_url, Organization.clean_url(new_url))\n end", "def pages_url=(value)\n @pages_url = value\n end", "def site_url(path)\n\t\tbase_url = \"http://fullridecentral.com\"\n\t\t\"#{base_url}#{path}\"\n\tend", "def set_Website(value)\n set_input(\"Website\", value)\n end", "def login_web_url=(value)\n @login_web_url = value\n end", "def url\n \"http://#{self.cms_site.hostname}#{self.full_path}\"\n end", "def url\n \"#{@client.site_url}/#{id}\"\n end", "def start_url(url)\n @start_url = url\n end", "def url\n if @url\n @url\n else\n begin\n\n page_name = File.basename(self.name, '.*')\n config = @config['permalinks'][page_name]\n\n if config.is_a? String\n @url = config\n self.data['permalink'] = nil\n else\n @config['permalinks'][File.basename(self.name, '.*')] = self.data['permalink']\n end\n rescue; end\n\n super\n\n if @url && @url =~ /\\/$/\n if self.ext == '.xml'\n @url = File.join(@url, \"index.xml\")\n else\n @url = File.join(@url, \"index.html\")\n end\n end\n\n @url\n end\n end", "def url=(v)\n @url = v ? URI.parse(v) : nil\n end", "def web_link=(value)\n @web_link = value\n end", "def content_web_url=(value)\n @content_web_url = value\n end", "def set_site\n @site = Cms::Site.find(params[:id])\n end", "def set_URL(value)\n set_input(\"URL\", value)\n end", "def set_URL(value)\n set_input(\"URL\", value)\n end", "def set_URL(value)\n set_input(\"URL\", value)\n end", "def set_URL(value)\n set_input(\"URL\", value)\n end", "def set_URL(value)\n set_input(\"URL\", value)\n end", "def set_URL(value)\n set_input(\"URL\", value)\n end", "def set_URL(value)\n set_input(\"URL\", value)\n end", "def url=(value)\n if value.nil?\n @url = nil\n else\n @url = Addressable::URI.parse(value)\n @url.path << '/' unless @url.path.end_with?('/')\n end\n end", "def set_site\n @site = Site.find(params[:site_id])\n end", "def set_site\n @site = Site.find(params[:id])\n end", "def url=(url)\n active_window.url = url\n end", "def set_site\n @site = current_account.sites.find(params[:site_id]) if params[:site_id]\n end", "def site=(site)\n @site = site\n end" ]
[ "0.81166565", "0.764335", "0.7636164", "0.7451123", "0.7438351", "0.7438351", "0.7438351", "0.7438351", "0.7438351", "0.742402", "0.7383348", "0.7332757", "0.7247373", "0.7215744", "0.71878105", "0.7182632", "0.7175835", "0.7175835", "0.7175835", "0.71576303", "0.7092747", "0.7064149", "0.7039", "0.703091", "0.701932", "0.7006518", "0.7006328", "0.69810325", "0.6960744", "0.6955114", "0.69513303", "0.6931253", "0.6906778", "0.6880304", "0.68585545", "0.68262535", "0.678188", "0.6775765", "0.6775765", "0.6775765", "0.6775765", "0.6775765", "0.6775765", "0.6775765", "0.6775765", "0.6775765", "0.6775765", "0.6775765", "0.6775765", "0.6773246", "0.6743427", "0.67391413", "0.67363393", "0.67293674", "0.6724144", "0.6707265", "0.6695156", "0.6690714", "0.6666694", "0.6658648", "0.66505665", "0.6632194", "0.663207", "0.662941", "0.6628113", "0.6626508", "0.6622343", "0.66068023", "0.6599193", "0.6595852", "0.65939844", "0.6587422", "0.65745413", "0.65550274", "0.65409636", "0.65361536", "0.65236086", "0.6510952", "0.64959824", "0.6485132", "0.64551455", "0.6449265", "0.6444985", "0.64293635", "0.6427038", "0.6393519", "0.6384103", "0.6383087", "0.63768053", "0.63768053", "0.63768053", "0.63768053", "0.63768053", "0.63768053", "0.63768053", "0.63652414", "0.6359791", "0.6358708", "0.6354279", "0.6348394", "0.6346046" ]
0.0
-1
Sets the author of the site
def meta_author # Change the value below between the quotes. "Team Tation" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_AuthorUsername(value)\n set_input(\"AuthorUsername\", value)\n end", "def author=(author)\n @author = author.to_s.strip\n end", "def set_author\n author_reference.reference = \"Patient/#{user.icn}\"\n end", "def author=(author)\n @author = author\n end", "def set_author\n @author = apply_scopes(Author).find(params[:id]) rescue nil\n end", "def set_author\n @author = User.find(params[:id])\n end", "def set_author_name\n # Handles toggling between fake and real names (at edit)\n if anonymouse_changed?\n if self.anonymouse?\n self.author_name = Faker::Name.name\n else\n self.author_name = self.user.name\n end\n end\n # Handles\n if !self.anonymouse?\n self.author_name = self.user.name\n end\n end", "def author=(author) #setter\n @author = author\n end", "def set_author\n @author = Author.friendly.find(params[:author_name])\n end", "def author\n @author ||= AuthorDrop.new(:page => page, :site => site)\n end", "def author=(new_author = nil)\n @info[:Author] = new_author\n end", "def set_author\n @author = Author.friendly.find(params[:id])\n end", "def SetAuthor(author)\n\t\t#Author of document\n\t\t@author = author\n\tend", "def set_author\n @author = Author.find(params[:internet_id])\n end", "def set_author_id\n if self.author_id.blank?\n self.author_id = self.user_id\n end\n end", "def set_author_id\n if self.author_id.blank?\n self.author_id = self.user_id\n end\n end", "def setAuthor(author)\n @fields['author'] = author\n self\n end", "def setAuthor(author)\n @fields['author'] = author\n self\n end", "def setAuthor(author)\n @fields['author'] = author\n self\n end", "def setAuthor(author)\n @fields['author'] = author\n self\n end", "def set_author\n @author = Author.find(params[:id])\n end", "def set_author\n @creator = Creator.find(params[:id])\n end", "def set_AuthorName(value)\n set_input(\"AuthorName\", value)\n end", "def set_author(new_author)\r\n @author = new_author\r\n return self\r\n end", "def author\n @author ||= Readability::Document.new(@html).author\n end", "def check_author\n if self.author.blank?\n self.author = 'anon'\n end\n end", "def author\n @author ||= get_author\n end", "def author\n @author_data ||= read(\"/user/#{@author}/about.json\", :handler => \"User\")\n end", "def set_author\n @author = Author.find(params[:id])\n end", "def set_author\n @author = Author.find(params[:id])\n end", "def set_author\n @author = Author.find(params[:id])\n end", "def set_author\n @author = Author.find(params[:id])\n end", "def set_author\n @author = Author.find(params[:id])\n end", "def set_author\n @author = Author.find(params[:id])\n end", "def set_author\n @author = Author.find(params[:id])\n end", "def set_author\n @author = Author.find(params[:id])\n end", "def set_author\n @author = Author.find(params[:id])\n end", "def set_author\n @author = Author.find(params[:id])\n end", "def set_author\n @author = Author.find(params[:id])\n end", "def set_author\n @author = Author.find(params[:id])\n end", "def set_author\n @author = Author.find(params[:id])\n end", "def page_author(author)\n content_for_layout :meta_author, author\n end", "def author_login=(name)\r\n self.user = User.find_by_login(name)\r\n end", "def author\n page.version.author.name\n end", "def set_author\r\n @author = Author.find(params[:id])\r\n end", "def add_author\n add_author_editor \"author\"\n end", "def author value = nil\n return @author if value.nil?\n @author = value\n end", "def author\n @author ||= begin\n UnfuddleAPI::People.find(self[:author_id]).username\n rescue\n ''\n end\n end", "def set_author(name, email=nil)\n self.actor = name ? Grit::Actor.new(name, (email ? email : '')) : nil\n end", "def set_AuthorURL(value)\n set_input(\"AuthorURL\", value)\n end", "def set_author\n @author = Author.find_by_sql(\"SELECT * FROM\n AUTHOR WHERE id =\n #{params[:id].to_i}\")[0]\n end", "def set_AuthorID(value)\n set_input(\"AuthorID\", value)\n end", "def author=(o)\n self.authors = [o]\n end", "def set_comments_author(author = '', author_enc = 0)\n @comments_author = author\n @comments_author_enc = author_enc\n end", "def author\n response = read(\"/user/#{@author}/about.json\", :handler => \"User\") if @author\n @author_data ||= response[0] if response\n end", "def set_author\n @author = Author.includes(:circulations).where(:admin_id => @current_user.id, :id => params[:id]).first\n end", "def author_exuid=(exuid)\n self.author = User.with_param(exuid).first\n end", "def author_name\n h Settings.author_name\n end", "def author\n \"#{user.name} (#{user.login})\"\n end", "def author\n @author ||= User.find_by_id(author_id)\n end", "def author=(value)\n\t\t\t\n\t\t\tif value.class == String\n\t\t\t\tvalue = split_authors(value)\n\t\t\tend\n\t\t\t\n\t\t\traise Bibmix::ReferenceError.new(\"Author has an invalid type (#{value.class}, #{value})\") if value.class != Array\n\t\t\t@author = value\n\t\tend", "def set_author\n @author = Author.find(params[:id])\n end", "def set_author\n @author = Author.find(params[:id])\n end", "def set_author\n @author = Author.find(params[:id])\n end", "def author\n @author ||=\n convert_content(item_attributes!.author) ||\n convert_content(item_attributes!.creator) ||\n \"\"\n end", "def author=(new_author)\n if new_author.respond_to?(:name) &&\n new_author.respond_to?(:email) &&\n new_author.respond_to?(:url)\n # It's a complete author object, just set it.\n @author = new_author\n else\n # We're not looking at an author object, this is probably a string,\n # default to setting the author's name.\n if @author.nil?\n @author = FeedTools::Author.new\n end\n @author.name = new_author\n end\n end", "def author\n\t\t@author\n\tend", "def set_author_site_type\n @author_site_type = SiteType.find(params[:id])\n end", "def author_url\n @author_url ||= begin\n \"http://twitter.com/#{self.author_screenname}\" if self.author_screenname\n end\n end", "def author\n props[ AUTHOR_PROP_NAME ] if props\n end", "def author\n user\n end", "def author=(author)\n\t \t@author = author #tells post its author name\n\t \tif [email protected]?(self)\n\t \t\[email protected] << self # add post to author collection if not already\n\t \tend\n\t end", "def resolved_author; end", "def author\n @info[:Author]\n end", "def author_name=(author_name)\n self.author = Author.find_or_create_by(name: author_name)\n end", "def author\n @author ||= Person.new(connection, @attributes[:author])\n end", "def author\n @title_pages.each { |tp| tp.author and return tp.author }\n nil\n end", "def author; @author; end", "def author; @author; end", "def author_uri\n meta['author']\n end", "def set_references_author\n @references_author = References::Author.find(params[:id])\n end", "def author\n @author ||= User.find_by_id(author_id) unless author_id.nil?\n end", "def author\n user || OpenStruct.new({\n :nicename => external_author_name,\n :email => \"%s@%s\" % [external_author_id, Doers::Config.app_id]\n })\n end", "def author\n @author ||= begin\n name = grit.config['user.name']\n email = grit.config['user.email']\n Grit::Actor.new(name, email)\n end\n end", "def author\n url = @json['user']['meta']['author']\n data = client.get url\n client.factory.create(GoodData::Membership, data)\n end", "def set_creator\n @creator = Author.find(params[:id])\n end", "def author; end", "def set_author_widget\n @author_widget = Widget.where(id: params[:id]).first\n end", "def the_author(content=nil)\n if node = content || @content || Content.get(params[:id]) \n author = node.author\n end\n end", "def meta_author\n \"Ahmed Nadar\"\n end", "def set_author_on_subject(author, subject)\n if content = subject.contents.first\n content.update_attributes(\n authorable_id: author.id,\n authorable_type: author.class.name\n )\n end\n end", "def author\n @author\n end", "def get_author\n @image.user = current_user\n end", "def set_paper_author\n @paper_author = PaperAuthor.find(params[:id])\n end", "def author; User.get(self.author_id); end", "def default_author\n if author \n author \n else\n 'joe'\n end \n end", "def author_name\n self.author ? \"#{self.author}\" : 'Anonymous'\n end", "def\n get_author()\n @author\n end", "def get_author()\n @author\n end", "def author\n if @author.nil?\n @author = FeedTools::Author.new\n author_node = FeedTools::XmlHelper.try_xpaths(self.channel_node, [\n \"atom10:author\",\n \"atom03:author\",\n \"atom:author\",\n \"author\",\n \"managingEditor\",\n \"dc:author\",\n \"dc:creator\"\n ])\n unless author_node.nil?\n @author.raw = FeedTools::XmlHelper.try_xpaths(\n author_node, [\"text()\"], :select_result_value => true)\n @author.raw = FeedTools::HtmlHelper.unescape_entities(@author.raw)\n unless @author.raw.nil?\n raw_scan = @author.raw.scan(\n /(.*)\\((\\b[A-Z0-9._%-\\+]+@[A-Z0-9._%-]+\\.[A-Z]{2,4}\\b)\\)/i)\n if raw_scan.nil? || raw_scan.size == 0\n raw_scan = @author.raw.scan(\n /(\\b[A-Z0-9._%-\\+]+@[A-Z0-9._%-]+\\.[A-Z]{2,4}\\b)\\s*\\((.*)\\)/i)\n unless raw_scan.size == 0\n author_raw_pair = raw_scan.first.reverse\n end\n else\n author_raw_pair = raw_scan.first\n end\n if raw_scan.nil? || raw_scan.size == 0\n email_scan = @author.raw.scan(\n /\\b[A-Z0-9._%-\\+]+@[A-Z0-9._%-]+\\.[A-Z]{2,4}\\b/i)\n if email_scan != nil && email_scan.size > 0\n @author.email = email_scan.first.strip\n end\n end\n unless author_raw_pair.nil? || author_raw_pair.size == 0\n @author.name = author_raw_pair.first.strip\n @author.email = author_raw_pair.last.strip\n else\n unless @author.raw.include?(\"@\")\n # We can be reasonably sure we are looking at something\n # that the creator didn't intend to contain an email address\n # if it got through the preceeding regexes and it doesn't\n # contain the tell-tale '@' symbol.\n @author.name = @author.raw\n end\n end\n end\n if @author.name.blank?\n @author.name = FeedTools::HtmlHelper.unescape_entities(\n FeedTools::XmlHelper.try_xpaths(author_node, [\n \"atom10:name/text()\",\n \"atom03:name/text()\",\n \"atom:name/text()\",\n \"name/text()\",\n \"@name\"\n ], :select_result_value => true)\n )\n end\n if @author.email.blank?\n @author.email = FeedTools::HtmlHelper.unescape_entities(\n FeedTools::XmlHelper.try_xpaths(author_node, [\n \"atom10:email/text()\",\n \"atom03:email/text()\",\n \"atom:email/text()\",\n \"email/text()\",\n \"@email\"\n ], :select_result_value => true)\n )\n end\n if @author.url.blank?\n @author.url = FeedTools::HtmlHelper.unescape_entities(\n FeedTools::XmlHelper.try_xpaths(author_node, [\n \"atom10:url/text()\",\n \"atom03:url/text()\",\n \"atom:url/text()\",\n \"url/text()\",\n \"atom10:uri/text()\",\n \"atom03:uri/text()\",\n \"atom:uri/text()\",\n \"uri/text()\",\n \"@href\",\n \"@uri\",\n \"@href\"\n ], :select_result_value => true)\n )\n end\n if @author.name.blank? && [email protected]? &&\n [email protected]?\n name_scan = @author.raw.scan(\n /\"?([^\"]*)\"? ?[\\(<].*#{@author.email}.*[\\)>].*/)\n if name_scan.flatten.size == 1\n @author.name = name_scan.flatten[0].strip\n end\n if @author.name.blank?\n name_scan = @author.raw.scan(\n /.*#{@author.email} ?[\\(<]\"?([^\"]*)\"?[\\)>].*/)\n if name_scan.flatten.size == 1\n @author.name = name_scan.flatten[0].strip\n end\n end\n end\n @author.name = nil if @author.name.blank?\n @author.raw = nil if @author.raw.blank?\n @author.email = nil if @author.email.blank?\n @author.url = nil if @author.url.blank?\n if @author.url != nil\n begin\n if !(@author.url =~ /^file:/) &&\n !FeedTools::UriHelper.is_uri?(@author.url)\n @author.url = FeedTools::UriHelper.resolve_relative_uri(\n @author.url, [author_node.base_uri, self.base_uri])\n end\n rescue\n end\n end\n if FeedTools::XmlHelper.try_xpaths(author_node,\n [\"@gr:unknown-author\"], :select_result_value => true) == \"true\"\n if @author.name == \"(author unknown)\"\n @author.name = nil\n end\n end\n end\n # Fallback on the itunes module if we didn't find an author name\n begin\n @author.name = self.itunes_author if @author.name.nil?\n rescue\n @author.name = nil\n end\n end\n return @author\n end" ]
[ "0.7599614", "0.75797874", "0.75769466", "0.7539164", "0.7435064", "0.7407946", "0.738863", "0.7374768", "0.73689765", "0.7353703", "0.73254436", "0.7293326", "0.72781205", "0.72464", "0.71533936", "0.71533936", "0.7140008", "0.7140008", "0.7140008", "0.7140008", "0.71172047", "0.70957404", "0.7081737", "0.706958", "0.70639265", "0.7062347", "0.7032676", "0.7002679", "0.6986935", "0.6986935", "0.6986935", "0.6986935", "0.6986935", "0.6986935", "0.6986935", "0.6986935", "0.6986935", "0.6986935", "0.6986935", "0.6986935", "0.6986935", "0.69850314", "0.69823", "0.6933811", "0.6930414", "0.69164854", "0.6903368", "0.6892877", "0.68814737", "0.6872965", "0.68728095", "0.68585664", "0.6832456", "0.681651", "0.67908555", "0.6782887", "0.6737947", "0.6727883", "0.66659826", "0.66564417", "0.6656383", "0.6640416", "0.6640416", "0.6640416", "0.66308796", "0.6628608", "0.6592527", "0.6581883", "0.65644974", "0.65644616", "0.6558092", "0.65572774", "0.6553886", "0.6526171", "0.65006757", "0.6491563", "0.6490794", "0.64860517", "0.64860517", "0.6482236", "0.64780945", "0.6466414", "0.6454332", "0.6450542", "0.644026", "0.64399904", "0.6438082", "0.6430181", "0.64167506", "0.64019424", "0.6395962", "0.63838804", "0.6369948", "0.63678896", "0.6365655", "0.6346311", "0.63366175", "0.6325563", "0.6316755", "0.63145065" ]
0.6746163
56
Sets the descriotion of the site
def meta_description # Change the value below between the quotes. "File Repository for EZ Troubleshooter" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_description(page)\n if !page.data['description']\n page.data['description'] = \"Check end-of-life, release policy and support schedule for #{page.data['title']}.\"\n end\n end", "def set_title_and_description\n website = Nokogiri::HTML(open(self.url))\n self.title = website.css('title').text\n self.description = website.at(\"meta[name='description']\")['content'] unless website.at(\"meta[name='description']\").nil?\n end", "def set_meta_description\n html = html_overview || html_content || ''\n\n self.meta_description =\n html.\n gsub(/<\\/?[^>]*>/, ' '). # replace HTML tags with spaces\n gsub(/&\\w{1,9};|\"/, ''). # remove HTML special chars and double quotes\n gsub(/\\n+/, \" \"). # remove new lines\n gsub(/\\s+/, ' '). # remove duplicated spaces\n strip[0..200] # strip spaces and get first 200 chars\n end", "def set_description\n\t\t@description = \"Sales by Rails\"\n\tend", "def set_title_and_description\n \t\tresp = get_resp(self.url)\n \t\tself.title = resp.match(/<title>(.+)<\\/title>/)[1]\n \t\tself.description = resp.match(/<meta name=\"description\" content=\"([^\\\"]*)/)[1]\n \t\tself.save!\n \tend", "def description\n @description = \"Espresso\"\n end", "def setDescription(description)\n\t\tRestClient.put(\"https://#{Cbthelper.username}:#{Cbthelper.authkey}@crossbrowsertesting.com/api/v3/selenium/#{@testId}\",\n \"action=set_description&description=#{description}\")\n\tend", "def meta_description\n\t\t\"Add your website description here. Find me in application_helper.rb\"\n\tend", "def description=(desc)\n zombie_check\n @metadata.update(@name, desc: desc) \n end", "def page_description\n content_tag :meta, \"\", {name: \"description\", value: current_page.data.description } if current_page.data.description\n end", "def description=(desc)\n @link.Description = desc\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description=(value)\n @description = value\n end", "def description(page_description) \n content_for(:description) do \n \"<meta name=\\\"description\\\" content=\\\"#{page_description}\\\" />\\n\" \n end \n end", "def details\n format_description(@description, 25) + \"site name: \" + (@site_name || \"\")\n end", "def desc( description )\n @description = description\n end", "def description=(description)\n end", "def site_description\n headings = @doc.xpath(\"//h3[@class='clearl']\")\n content_sections = @doc.xpath(\"//h3[@class='clearl']/following-sibling::p[1]\")\n content = \"\"\n headings.zip(content_sections).each do |h, c| \n unless (c.to_s().squeeze().empty?)\n content << \"<h3>#{sanitize(h.to_s)}</h3>\" \n content << \"<p>#{sanitize(c.to_s)}\"\n end\n end\n rhtml = IO.read(File.expand_path(\"site_description.rhtml\", File.dirname(__FILE__)))\n content_html = Erubis::Eruby.new(rhtml)\n content_html.result(:content => content)\n end", "def setDescription(description)\r\n\t\t\t\t\t@description = description\r\n\t\t\t\tend", "def setDescription(description)\r\n\t\t\t\t\t@description = description\r\n\t\t\t\tend", "def set_description(description)\n @description = description\n end", "def set_description\n self.description = \"#{self.manufacturer.code} #{self.manufacturer_model}\" unless self.manufacturer.nil?\n end", "def set_Description(value)\n set_input(\"Description\", value)\n end", "def set_Description(value)\n set_input(\"Description\", value)\n end", "def set_Description(value)\n set_input(\"Description\", value)\n end", "def set_Description(value)\n set_input(\"Description\", value)\n end", "def set_Description(value)\n set_input(\"Description\", value)\n end", "def set_Description(value)\n set_input(\"Description\", value)\n end", "def set_Description(value)\n set_input(\"Description\", value)\n end", "def set_Description(value)\n set_input(\"Description\", value)\n end", "def set_Description(value)\n set_input(\"Description\", value)\n end", "def set_Description(value)\n set_input(\"Description\", value)\n end", "def set_Description(value)\n set_input(\"Description\", value)\n end", "def set_Description(value)\n set_input(\"Description\", value)\n end", "def create_meta_description\n '<meta name=\"description\" content=\"' + item[:description] + '\"/>' if item[:description]\n end", "def description\n @description = \"Dark Roast Coffee\"\n end", "def description(page_description)\n content_for(:description) do\n \"<meta name=\\\"description\\\" content=\\\"#{page_description}\\\" />\\n\"\n end\n end", "def description(page_description)\n content_for(:description) do\n \"<meta name=\\\"description\\\" content=\\\"#{page_description}\\\" />\\n\"\n end\n end", "def desc\n\t\t\"Useful for analyzing scanned web sites later.\"\n\tend", "def page_meta_description\n \"To encourage an atmosphere of responsibility and transparency, while creating the most positive social network experience, Ello has created this bill of rights for all social network users.\"\n end", "def set_Description(value)\n set_input(\"Description\", value)\n end", "def set_page_description(hash, order = nil, type = :description)\n @page_description = page_meta_string_builder(type, hash, order)\n end", "def desc(txt)\n @@desc = txt\n end", "def description\n meta['description'] || extract_description\n end", "def description\n @description = \"House Blend Coffee\"\n end", "def description\n @description = PageDescription[self.description_name.to_sym] if self.description_name\n @description\n end", "def description=(str)\n @description = str\n end", "def desc\n\t\t\t\"XSS Framework managing XSS modules\"\n\t\tend", "def description(value)\n @ole.Description = value\n nil\n end", "def description(value)\n @ole.Description = value\n nil\n end", "def page_description(description)\n content_for_layout :meta_description, description\n end", "def set_description\n @company = Company.find(params[:company_id])\n @description = @company.description\n end", "def description= desc\n super(Rails::Html::FullSanitizer.new.sanitize(desc))\n end", "def desc(text)\n @storage[:description] = text\n end", "def description=(str)\n @description = str\n write_properties\n end", "def description=(v)\n @description = v\n end", "def description=(description)\r\n\t\t\t`#{BITS::BITSADMIN} /setdescription {#{@id}} \\\"#{description}\\\"`\r\n\t\tend", "def description(doc)\n node = doc.at('/html/head/meta[@name=\"description\"]/@content') and node.text\n end", "def test_overriding_description\r\n\t\t# first, a feature with no description assigned\r\n\t\tfeatures = Feature.for_static_page(static_pages(:aboutus).id)\r\n\r\n\t\tassert_equal(\"Short description\", features.first[:short_description])\r\n\r\n\t\t# now, a feature with a description to override\r\n\t\tfeatures = Feature.for_category(categories(:livestock).id)\r\n\r\n\t\tassert_equal(\"Different description\", features.first[:short_description])\r\n\tend", "def set_description(name, opts = {})\n commands = command_builder('description', opts)\n configure_interface(name, commands)\n end", "def description(desc = nil)\n @description = desc if desc\n @description\n end", "def description=(val)\n self[:description] = val\n end", "def page_description\n if content_for?(:description)\n \"#{yield_content(:description)}\"\n else\n \"Capra is a design agency based in Ottawa, Canada run by husband and wife team Ollie and Kat Kavanagh. Our focus is great design. We love interactive work like websites, games and apps because we get involved in building what we design.\"\n end\n end", "def setDescription(description)\n @description = description.to_s\n end", "def description\n description = \"\"\n end", "def description\n page.render_part('description') rescue ''\n end", "def description\n page.render_part('description') rescue ''\n end", "def about_us\r\n\t@title = \"About Us\"\r\n end", "def meta_description\n read_attribute(:meta_description).blank? ? self.intro[0, 255] : read_attribute(:meta_description)\n end", "def set_meta( title, *description )\n\t\t@title = title\n\t\t@description = description.first[0..200] unless description.first.blank?\n\tend", "def description\n meta_description.nil? ? secondary_description : meta_description\n end", "def description\n meta_description.nil? ? secondary_description : meta_description\n end", "def set_info\n @page_header = 'Insight Engine'\n @page_secondary = 'Let\\'s get a view at 10,000 feet.'\n @page_title = 'LeadAccount | Insight Engine'\n @page_icon = 'lightbulb'\n end", "def typus_description\n Typus::Configuration.config[self.name]['description']\n end", "def set_description(text)\n @description_label.set_text(text)\n if text\n @description_label.set_hidden(false)\n else\n @description_label.set_hidden(true)\n end\n end", "def description d\n @description = d\n end" ]
[ "0.7389236", "0.6992278", "0.68011177", "0.66967404", "0.6505359", "0.64826983", "0.64793044", "0.64503634", "0.6393473", "0.63174766", "0.62601244", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6230144", "0.6226859", "0.61974674", "0.61723024", "0.61399686", "0.6082257", "0.608088", "0.608088", "0.60520476", "0.60517305", "0.60290205", "0.60290205", "0.60290205", "0.60290205", "0.60290205", "0.60290205", "0.60290205", "0.60290205", "0.60290205", "0.60290205", "0.60290205", "0.60290205", "0.6017658", "0.60155386", "0.5991671", "0.5991671", "0.59882224", "0.5934152", "0.59305096", "0.5921478", "0.59156024", "0.5904218", "0.59027684", "0.5900835", "0.5897237", "0.58939445", "0.58693635", "0.58693635", "0.58530104", "0.58517", "0.58507663", "0.5842853", "0.5840022", "0.5835358", "0.582397", "0.58197695", "0.5813077", "0.58102804", "0.5797419", "0.5779879", "0.57708263", "0.5770044", "0.5766261", "0.575763", "0.575763", "0.57536215", "0.5749771", "0.5748604", "0.57438517", "0.57438517", "0.57264805", "0.572593", "0.57213116", "0.5719532" ]
0.0
-1
Sets the key words for the site
def meta_keywords # Change the value below between the quotes. "ezonline, ez troubleshooter, repository" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_keywords\n\t\t\tself.keywords = [title, author, description].map(&:downcase).join(' ')\n\t\tend", "def the_keywords\n k = object.get_option(\"keywords\", \"\")\n k = h.current_site.the_keywords if object.class.name != \"CamaleonCms::Site\" && !k.present?\n k.to_s.translate(get_locale)\n end", "def set_keywords\n # keywords = '' -> can't write this way because keywords will only be a\n # local variable and won't reference the model's property. would work if\n # we were wanting to read only but not for property assignment\n self.keywords = [title, author, description].map(&:downcase).join(' ')\n # &: is symbol to proc\n # it knows that the variables in the array are referring to properties on\n # self\n end", "def set_Keywords(value)\n set_input(\"Keywords\", value)\n end", "def initialize\n @words = (KEYWORDS + OTHERS).map(&:downcase)\n end", "def set_sitekey( keyname, keyvalue)\n if sitekey_exist? keyname\n @site_data[\"site\"][keyname] = keyvalue\n end\n end", "def set_words\n @en_word = params[:en] && English.find_by(name: params[:en].downcase)\n @ru_word = params[:ru] && Russian.find_by(name: params[:ru].downcase)\n end", "def set_WordSense(value)\n set_input(\"WordSense\", value)\n end", "def set_tool_keywords\n keywords = fetch_science_keywords\n if keywords.key? 'category'\n keywords['category'].each do |category|\n if category['value'] == 'EARTH SCIENCE'\n keywords['category'].delete(category)\n break\n end\n end\n end\n @tool_keywords = keywords\n end", "def meta_keywords\n\t\t\"Add your keywords here. Find me in application_helper.rb\"\n\tend", "def SetKeywords(keywords)\n\t\t#Keywords of document\n\t\t@keywords = keywords\n\tend", "def setKeywords(keywords)\n @fields['keywords'] = keywords\n self\n end", "def setKeywords(keywords)\n @fields['keywords'] = keywords\n self\n end", "def setKeywords(keywords)\n @fields['keywords'] = keywords\n self\n end", "def setKeywords(keywords)\n @fields['keywords'] = keywords\n self\n end", "def set_keywords=(value)\n self.product_keywords = value ? value.split(',').map{|w| w.strip} : []\n end", "def set_keywords=(value)\n self.product_keywords = value ? value.split(',').map{|w| w.strip} : []\n end", "def set_keywords=(value)\n self.product_keywords = value ? value.split(',').map{|w| w.strip} : []\n end", "def keywords\n @keywords_accessed = true\n @keywords\n end", "def set_search_terms\n if relevent_dish\n self.search_terms = self.name_translations.collect do |key,value|\n next value\n end.join(\" \")\n if self.restaurant\n self.restaurant_name = self.restaurant.name\n end\n if img = self.image.first\n self.top_image = img.img_url_medium\n end\n else\n self.search_terms = nil\n end\n save\n end", "def set_Word(value)\n set_input(\"Word\", value)\n end", "def set_Word(value)\n set_input(\"Word\", value)\n end", "def set_Word(value)\n set_input(\"Word\", value)\n end", "def set_Word(value)\n set_input(\"Word\", value)\n end", "def set_Word(value)\n set_input(\"Word\", value)\n end", "def set_Word(value)\n set_input(\"Word\", value)\n end", "def set_Word(value)\n set_input(\"Word\", value)\n end", "def set_Word(value)\n set_input(\"Word\", value)\n end", "def set_Word(value)\n set_input(\"Word\", value)\n end", "def update\n words = params['replacelist'].split(/\\s+/)\n words << '?' if words.length.odd? #ensure even-number length to make a hash\n replacelist = Hash[*words]\n Setting['replacelist'] = replacelist\n Setting['blacklist'] = params['blacklist'].split(/[\\r\\n]+/)\n redirect_to :controller => 'admin/keywords', :action => :index\n end", "def set_page_defaults\n @site_defaults = SiteConfiguration.all\n @page_title ||= 'Aeromotion Pages'\n @seo_keywords ||= 'Aeromotion, Tutorials, Workouts, Best Trainings, Free Videos, Personal, Best'\n end", "def setsecretword word\r\n\t\t @secretword = word\r\n\t\t templ = createtemplate\r\n\t\t\t\t\tword_templ = templ.gsub(\"[\",\"\").gsub(\"]\",\"\")\r\n\t\t\t\t\ti=0\r\n\t\t\t\t\[email protected]\r\n\t\t\t\t\twhile i < sec_word_array.length do\r\n\t\t\t\t\t\tif sec_word_array[i] == \" \"\r\n\t\t\t\t\t @resulta[i] = \" \"\r\n\t\t\t\t\t else\r\n\t\t\t\t\t \t@resulta[i] = \"_\"\r\n\t\t\t\t end\r\n\t\t\t\t i+=1\r\n\t\t\t\t end\r\n\t\t end", "def keyword_params\n params.require(:keyword).permit(:site_id, :word)\n end", "def set_keywords\n self.search_fields.keys.each do |namespace| \n self._keywords[namespace] = self.search_fields[namespace].map do |field|\n if field.is_a?(Hash) \n field.keys.map do |key|\n attribute = self.send(key)\n method = field[key] \n attribute = [attribute] if !attribute.is_a?(Array) \n method = [method] if !method.is_a?(Array)\n method.map {|m| attribute.map { |a| Util.keywords a.send(m), stem_keywords[namespace], ignore_list[namespace] } }\n end\n else \n value = self[field]\n value = [value] if !value.is_a?(Array)\n value.map {|v| Util.keywords(v, stem_keywords[namespace], ignore_list[namespace]) if v}\n end\n end.flatten.map(&:to_s).select{|f| not f.empty? }.uniq.sort\n \n end\n end", "def set_keywords\n self._keywords = self.search_fields.map do |field|\n if field.is_a?(Hash)\n field.keys.map do |key|\n attribute = self.send(key)\n method = field[key]\n if attribute.is_a?(Array)\n if method.is_a?(Array)\n method.map {|m| attribute.map { |a| KeywordsExtractor.extract a.send(m) } }\n else\n attribute.map(&method).map { |t| KeywordsExtractor.extract t }\n end\n else\n KeywordsExtractor.extract(attribute.send(method))\n end\n end\n else\n KeywordsExtractor.extract(self.send(field))\n end\n end.flatten.compact.sort\n end", "def keywords( *args )\n @allowed_keywords = Array([*args])\n end", "def index_keywords!\n update_attribute(:_keywords, set_keywords)\n end", "def fix_keywords\n @fix_keywords_cache ||= all_applicable_hooks.map do |hook|\n hook.keywords.map { |keyword| keyword.downcase.strip }\n end.flatten.uniq\n end", "def set_WordList(value)\n set_input(\"WordList\", value)\n end", "def set_WordList(value)\n set_input(\"WordList\", value)\n end", "def set_WordList(value)\n set_input(\"WordList\", value)\n end", "def set_keyword\n @keyword = Keyword.friendly.find(params[:id])\n end", "def set_keywords\n product_keywords ? product_keywords.join(', ') : ''\n end", "def set_keywords\n product_keywords ? product_keywords.join(', ') : ''\n end", "def set_keywords\n product_keywords ? product_keywords.join(', ') : ''\n end", "def keywords(keywords)\n ensure_valid_parameter('keywords', keywords, lambda {|k| k.is_a? Array or k.is_a? String})\n keywords = keywords.join(' ') if keywords.is_a? Array\n @request[:keywords] = keywords\n self\n end", "def set_Keyword(value)\n set_input(\"Keyword\", value)\n end", "def set_Keyword(value)\n set_input(\"Keyword\", value)\n end", "def keywords(page_keywords) \n content_for(:keywords){\"<meta name=\\\"keywords\\\" content=\\\"#{page_keywords}\\\" />\\n\"} \n end", "def get_keywords(site)\n response = Net::HTTP.get_response(site, \"/\")\n Hpricot(response.body).\n search(\"meta[@name='keywords']\")[0]. #Select meta keywords element\n attributes[\"content\"]. #Select its content\n split(\",\"). #Keywords are coma separated\n collect{ |k| k.strip.downcase } #Remove start and end white spaces\nend", "def update!(**args)\n @words = args[:words] if args.key?(:words)\n end", "def set_args\n get_keywords.join(\"\\\\|\")\n end", "def set_site_meta_tags\n config = ZanoboRails::Crawlable.configuration\n\n site_ids = {\n separator: config.page_title_sitename_separator,\n reverse: config.page_title_sitename_pos == 'right'\n }\n\n if config.gplus_id.present?\n site_ids[:publisher] = \"https://plus.google.com/#{config.gplus_id}\"\n end\n if config.twitter_id.present?\n site_ids[:twitter] = \"@#{config.twitter_id}\"\n end\n if config.fb_app_id.present?\n site_ids[:fb] = { app_id: \"@#{config.fb_app_id}\" }\n end\n if config.site_name.present?\n site_ids[:open_graph] = { site_name: config.site_name }\n site_ids[:site] = config.site_name\n end\n\n set_meta_tags(site_ids)\n end", "def set_titles_controls\n @control_help_window.add_control(Vocab::CONFIRM, Input::Keys::A) \n @control_help_window.add_control(Vocab::BACK, Input::Keys::B)\n @control_help_window.add_control(Vocab::DESCRIPTION, Input::Keys::START)\n end", "def keywords(page_keywords)\n content_for(:keywords) do\n \"<meta name=\\\"keywords\\\" content=\\\"#{page_keywords}\\\" />\\n\"\n end\n end", "def keywords(page_keywords)\n content_for(:keywords) do\n \"<meta name=\\\"keywords\\\" content=\\\"#{page_keywords}\\\" />\\n\"\n end\n end", "def keywords_feed(site_id)\n KEYWORDS_URL % [CGI::escape(site_id || '')]\n end", "def secret_word=(word)\n @secret_word = word.upcase\n @secret_word_length = word.length\n end", "def set_meta_tags_title\n set_meta_tags site: 'benradler.com'\n end", "def set_keyword\n @keyword = User.find(current_user.id).collections.find(params[:collection_id]).keywords.find(params[:id])\n end", "def setup(options = {})\n options.symbolize_keys.each_pair do |k, v|\n self.send(\"#{k}=\", v)\n end\n # Set the site for the ActiveResource objects\n Resource.site = site\n # Special for keywords\n Keyword.site = \"#{site}watched_domains/:watched_domain_id\"\n self\n end", "def global_vars\n @title = 'Wytwarzanie Pomocy Dydaktycznych: Pomoce dydaktyczne, szkolne i naukowe dla przedszkoli szkoł'\n @desription = ''\n @keywords = 'pomoce dydaktyczne, pomoce szkolne, pomoce naukowe, pomoce dydaktyczne dla szkół, pomoce dydaktyczne dla przedszkoli, radosna szkoła, Wytwarzanie Pomocy Dydaktycznych, wpd, wpd.pl'\n end", "def add_word word #Function shovels individual strings into the dictionary array\n @dictionary << word\n end", "def set_capacities_controls\n @control_help_window.add_control(Vocab::TOGGLE, Input::Keys::A) \n @control_help_window.add_control(Vocab::BACK, Input::Keys::B)\n @control_help_window.add_control(Vocab::CHANGE_CATEGORY, Input::Keys::L, Input::Keys::R)\n end", "def set_seo_meta(title,keywords = '',desc = '')\n if title\n @page_title = \"#{title}\"\n if params[:page]\n @page_title += \" &raquo; (First#{params[:page]}Page)\"\n end\n @page_title += \" &raquo; #{@setting.site_name}\"\n else\n @page_title = @setting.site_name\n end\n @meta_keywords = keywords\n @meta_description = desc\n end", "def word_list=(list)\n end", "def set_keyword\n journal = Journal.find(params[:journal_id])\n issue = journal.issues.find(params[:issue_id])\n article = issue.articles.find(params[:article_id])\n @keyword = article.keywords.find(params[:id])\n end", "def show\n set_word\n end", "def set_word\n @word = Word.friendly.find(params[:id])\n end", "def seo_for_page\n PAGE_SEO_META.each do |meta|\n send(\"set_#{meta}_key\", \"#{seo_condition[:prefix]}#{meta}\")\n end\n\n set_title_variables(seo_variables)\n end", "def key_words\n # because dollar signs don't work we can't display a $ sign in the keyword\n # list so its not explicitly clear what the keyword\n variables = scope.to_hash.keys\n # prepend a :: to topscope variables\n scoped_vars = variables.map { |k, _v| scope.compiler.topscope.exist?(k) ? \"$::#{k}\" : \"$#{k}\" }\n PuppetDebugger::InputResponders::Functions.instance.debugger = self\n funcs = PuppetDebugger::InputResponders::Functions.instance.func_list\n PuppetDebugger::InputResponders::Datatypes.instance.debugger = self\n (scoped_vars + funcs + static_responder_list + PuppetDebugger::InputResponders::Datatypes.instance.all_data_types).uniq.sort\n end", "def set_suspicious_keyword\n @suspicious_keyword = SuspiciousKeyword.find(params[:id])\n end", "def set_RelatedWords(value)\n set_input(\"RelatedWords\", value)\n end", "def set_game_variables\n\t\tword = @dictionary.random_word\n\t\t@answer_array = word.split('')\n\t\t@blank_word_array = []\n\t\tword.length.times do \n\t\t\t@blank_word_array << \"_\"\n\t\tend\n\t\t@guess_counter = 6\n\t\t@incorrect_array = []\n\tend", "def set_welcome\n end", "def add_words(new_words)\n new_words.each do |word|\n add(word)\n end\n end", "def set_search_text!\n self.text['a'] = extract_search_text :title, :meta_description\n self.text['b'] = extract_search_text :slug, :path\n self.text['c'] = extract_search_text *self.class.field_names(:textual)\n self.text['d'] = nil\n end", "def create_keywords\n\n keywords = t(LOCALE_KEYWORDS_KEY, default: SiteMetaInfoDefaults.keywords)\n business_cats = BusinessCategory.pluck(:name).uniq\n\n cats_str = business_cats.empty? ? '' : ', ' + business_cats.join(', ')\n keywords + cats_str\n end", "def update_pose_words\n\n # Step 1: get an array of all words for the current object.\n search_text = instance_eval &(self.class.pose_content)\n new_words = Query.new([], search_text.to_s).query_words\n\n # Step 2: Add new words to the search index.\n Helpers.get_words_to_add(self.pose_words, new_words).each do |word_to_add|\n self.pose_words << Word.find_or_create_by(text: word_to_add)\n end\n\n # Step 3: Remove now obsolete words from search index.\n Helpers.get_words_to_remove(self.pose_words, new_words).each do |word_to_remove|\n self.pose_words.delete word_to_remove\n end\n end", "def keywords=(list)\n tmp_fname = File.join(@maildir.path, 'tmp', unique_name)\n File.open(tmp_fname, 'w') { |f| f.write(list.join(\"\\n\")) }\n FileUtils.mv(tmp_fname, File.join(@maildir.keyword_dir, unique_name))\n end", "def init\n @words = dictionary.sample(20).map(&:strip)\n end", "def keywords_attributes=(new_keywords)\n keywords.clear\n\n (new_keywords || []).each do |word|\n keywords.build(:word => word)\n end\n end", "def initialize \n @story_keywords = [] # Creates the array that will hold the part of speech keywords\n end", "def update!(**args)\n @knowledge_base_name = args[:knowledge_base_name] if args.key?(:knowledge_base_name)\n end", "def madlib(words)\n words = { noun: nil, verb: nil, adjective: nil, adverb: nil }\n \n words.each do |key, value|\n print \"=> Choose your #{key}: \"\n words[key] = gets.chomp\n end\n\n puts \"Your #{words[:adjective]} #{words[:noun]} wants to #{words[:adverb]}\" +\n \" #{words[:verb]}. LOL.\"\nend", "def set_word\n @word = Word.find_by(id:params[:id], user_id:current_user.id)\n end", "def upcase_keys!\n replace(upcase_keys)\n end", "def index\n @sites = Xmt::Press::Site.all.page(params[:page])\n unless params[:key].blank?\n key = /.*#{params[:key]}.*/\n @sites = @sites.where(title: key).page(params[:page])\n end\n end", "def keyword_params\n params.require(:keyword).permit(:value, :site_id)\n end", "def set_wording\n\t\t\t@wording = Wording.find(params[:id])\n\t\tend", "def meta_keywords(*args)\n if args.any?\n @meta_keywords = Array(args.first).join(\", \")\n else\n keywords = @meta_keywords\n keywords ||= @page.tag_list if @page&.tags&.any?\n strip_tags(keywords)\n end\n end", "def add_keyword\n @item.keywords = @bib.keyword.map(&:content).join(\", \") if @bib.keyword.any?\n end", "def set_seo_params\n @seo = {\n :title => ::Setting::get('Title for index page'),\n :keywords => ::Setting::get('Keywords for index page'),\n :description => ::Setting::get('Description for index page')\n }\n end", "def set_guide_variables\r\n @region = Region.friendly.find(params[:region_name]) if params[:region_name]\r\n @sport = Sport.friendly.find(params[:sport_name]) if params[:sport_name]\r\n \r\n @title = page_title\r\n @meta_keywords = page_meta_keywords\r\n @meta_description = page_meta_description \r\n end", "def update!(**args)\n @spoken_languages = args[:spoken_languages] if args.key?(:spoken_languages)\n end", "def update_word_state\n @secret.chars.each_with_index do |letter, idx| \n @word_state[idx] = letter if @guesses[-1] == letter\n end\n @word_state\n end", "def setguessword\n\t\t@starting_word.each_index do |index|\n\t\t\t@guessing_word[index]=\"_ \"\n\t\tend\n\t\treturn @guessing_word\n\tend", "def set_key!\n self.new_user_key = [*('a'..'z'),*('0'..'9')].shuffle[0,8].join\n end", "def set_seo_data(hash)\n tt = Setting::get('Title for location page')\n unless tt.blank?\n hash[:title] = self.process_seo_str tt\n end\n dt = Setting::get('Description for location page')\n unless dt.blank?\n hash[:description] = self.process_seo_str dt\n end\n kt = Setting::get('Keywords for location page')\n unless kt.blank?\n hash[:keywords] = self.process_seo_str kt\n end\n end", "def keywords\r\n @@words.keys.sort\r\nend" ]
[ "0.7074105", "0.63755447", "0.63662577", "0.62822354", "0.6098856", "0.60911876", "0.6025354", "0.5941879", "0.5905031", "0.58012956", "0.5784372", "0.5767428", "0.5767428", "0.5767428", "0.5767428", "0.57613105", "0.57613105", "0.57613105", "0.56434494", "0.5585984", "0.5562129", "0.5562129", "0.5562129", "0.5562129", "0.5562129", "0.5562129", "0.5562129", "0.5562129", "0.5562129", "0.55614096", "0.5548512", "0.5539167", "0.5517743", "0.54837835", "0.5481238", "0.5479879", "0.5479843", "0.5465884", "0.54559803", "0.54559803", "0.54559803", "0.5455588", "0.5453572", "0.5453572", "0.5453572", "0.5445975", "0.5426598", "0.5426598", "0.54246205", "0.5398672", "0.53878397", "0.53530085", "0.53276587", "0.53262013", "0.5325746", "0.5325746", "0.53077394", "0.5300947", "0.5295926", "0.5271888", "0.5270072", "0.5263942", "0.52526647", "0.5251303", "0.5236708", "0.52301395", "0.5227947", "0.5227786", "0.5224529", "0.5219414", "0.5218405", "0.5206453", "0.5194683", "0.51868886", "0.5176972", "0.51710254", "0.5163831", "0.51578903", "0.5157567", "0.5151846", "0.5140402", "0.5140327", "0.5137491", "0.51189065", "0.50907236", "0.5089326", "0.50887626", "0.50877684", "0.50861776", "0.50821584", "0.5080541", "0.5076997", "0.50707215", "0.50700474", "0.5069233", "0.50671333", "0.50564283", "0.5052076", "0.50479674", "0.5043086" ]
0.52336025
65
Returns the full title on a perpage basis.No need to change any of this we set page_title and site_name elsewhere.
def full_title(page_title) if page_title.empty? site_name else "#{page_title} | #{site_name}" end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def page_title\n @page_title ||= format_string(page[\"title\"]) || site_title\n end", "def title\n @title ||= begin\n if site_title && page_title != site_title\n page_title + TITLE_SEPARATOR + site_title\n elsif site_description && site_title\n site_title + TITLE_SEPARATOR + site_title_extention_or_description\n else\n page_title || site_title\n end\n end\n\n return page_number + @title if page_number\n\n @title\n end", "def full_title(page_title)\n \t\tif page_title.empty?\n \t\t\tsite_name\n \t\telse\n \t\t\t\"#{page_title} | #{site_name}\"\n \t\tend\n \tend", "def full_title(page_title)\n if page_title.empty?\n site_name\n else\n #{page_title} | #{site_name}\n end\n end", "def page_title\n page.title\n end", "def page_title(page)\n return site_title if page.title.nil? || page.title.empty?\n\n title = t(\"titles.#{page.title}\",\n flavor: settings.site_name.capitalize,\n default: [\"docs.#{page.parent}.#{page.title}\".to_sym,\n page.title.to_s.titleize])\n\n if page.parent.nil?\n parent_title = site_title\n else\n parent_title = t(\"titles.#{page.parent}\",\n flavor: settings.site_name.capitalize,\n default: site_title)\n end\n\n \"#{title} | #{parent_title}\"\n end", "def page_title\n @page_title || TaliaCore::SITE_NAME\n end", "def full_title(page_title)\n if page_title.empty?\n site_name\n else\n \"#{page_title} | Alfalfa\"\n end\n end", "def page_title(title = nil)\n if title\n content_for(:page_title) { title }\n else\n content_for?(:page_title) ? content_for(:page_title) : APP_CONFIG[:site_name] # or a hard-coded default\n end\n end", "def title\n @title_pages.each { |tp| tp.title and return tp.title }\n nil\n end", "def full_title(page_title)\n if page_title.empty?\n @@base_title\n else\n \"#{@@base_title} | #{page_title}\"\n end\n end", "def full_title(page_title)\n\t\tpage_title = PAGE_TITLE \n \tbase_title = BASE_TITLE\n if page_title.empty?\n base_title\n else\n \"#{page_title} | #{base_title}\"\n end\n end", "def full_title(page_title)\n base_title = \"Proman 2014\"\n if page_title.empty?\n base_title\n else\n \"#{base_title} | #{page_title}\"\n end\n end", "def page_title\n if @title.present?\n I18n.t('page_title', :page_title => @title, :blog_title => blog_title)\n else\n I18n.t('home_title', :blog_title => blog_title)\n end\n end", "def full_title(page_title = '')\n\t\tif page_title.empty?\n\t\t\tself.base_title\n\t\telse\n\t\t\tpage_title + \" | \" + self.base_title\n\t\tend\n\tend", "def full_title(page_title = '')\n base_title = \"Spookd.me\"\n if page_title.empty?\n base_title\n else \n page_title\n end\n end", "def full_title(page_title)\n base_title = \"Proman 2013\"\n if page_title.empty?\n base_title\n else\n \"#{base_title} | #{page_title}\"\n end\n end", "def full_title page_title\n base_title = \"emo-search\"\n if page_title.empty?\n base_title\n else\n \"#{base_title} | #{page_title}\"\n end\n end", "def full_title(page_title)\n base_title = \"Quick-Score.com\"\n if page_title.empty?\n base_title\n else\n \"#{page_title} @ #{base_title}\"\n end\n end", "def full_title(page_title)\n\t\tif page_title.empty?\n\t\t\tbase_title\n\t\telse\n\t\t\t\"#{base_title} | #{page_title}\"\n\t\tend\n\tend", "def full_title(page_title = '')\n base_title = t('application.site_title')\n if page_title.empty?\n base_title\n else\n page_title + ' - ' + base_title\n end\n end", "def full_title(page_title)\n base_title = 'Sample CMS'\n if page_title.empty?\n base_title\n else\n \"#{base_title} | #{page_title}\"\n end\n end", "def full_title(page_title)\n base_title = \"WebSchool\"\n if page_title.empty?\n base_title\n else\n \"#{page_title} - #{base_title}\"\n end\n end", "def page_title\n title = content_for?(:title) ? \" - #{content_for(:title)}\" : \"\"\n \"Todobadour#{title}\"\n end", "def full_title(page_title)\n base_title = \"Anand Sampat\"\n if page_title.empty?\n base_title\n else\n \"#{base_title} | #{page_title}\"\n end\n end", "def title\n return @title if @title\n if matches = class_const(:TITLE_RE).match(page)\n @title = matches[1].to_s.strip\n title_processor\n @title = decode_entities(@title)\n end\n end", "def full_title(page_title)\n page_title.blank? ? \"My Defi Pie\" : \"My Defi Pie | #{page_title}\"\n end", "def full_title(page_title)\n\t\tbase_title = \"dreamly\"\n\t\tif page_title.empty?\n\t\t\tbase_title\n\t\telse\n\t\t\t\"#{base_title} | #{page_title}\"\n\t\tend\n\tend", "def full_title(page_title)\n base_title = \"BH Herbarium\"\n if page_title.empty?\n base_title\n else\n \"#{base_title} | #{page_title}\"\n end\n end", "def full_title(page_title = '')\n base_title = app_name\n if page_title.empty?\n base_title\n else\n \"#{page_title} | #{base_title}\"\n end\n end", "def page_title\n return \"#{this_webapp.webapp_name} - #{@page_title}\" if @page_title\n return \"#{this_webapp.webapp_name}\"\n end", "def full_title(page_title)\n\t\tbase_title = \"Hybrid Edu\"\n\t\tif page_title.empty?\n\t\t\tbase_title\n\t\telse\n\t\t\t\"#{base_title} | #{page_title}\"\n\t\tend\n\tend", "def full_title(page_title = '')\n base_title = 'My Money'\n if page_title.empty?\n base_title\n else\n \"#{page_title} | #{base_title}\"\n end\n end", "def title(page_title)\n content_for(:title) do\n \"#{page_title} - #{MySettings.company_full_name}\"\n end\n end", "def full_title(page_title)\n if page_title.empty?\n app_name\n else\n \"#{page_title} | #{app_name}\"\n end\n end", "def title(page_title = '')\n\t\tbase_title = \"AB Online Shop\"\n\t\tif page_title.empty?\n\t\t\tbase_title\n\t\telse\n\t\t\tpage_title + \" | \" + base_title\n\t\tend\n\tend", "def page_title(title)\n content_for_wrapper(:page_title, title)\n end", "def full_title(page_title)\n base_title = \"Secret Sauce\"\n if page_title.empty?\n base_title\n else\n \"#{base_title} | #{page_title}\"\n end\n end", "def full_title(page_title)\n base_title = \"Koprulu Sector\"\n if page_title.empty?\n base_title\n else\n \"#{base_title} | #{page_title}\"\n end\n end", "def page_title\n title = \"Amplify\"\n title.prepend(\"#{@page_title} | \") if @page_title\n title\n end", "def full_title(page_title)\n base_title = \"WDIS\"\n if page_title.empty?\n base_title\n else\n \"#{base_title} | #{page_title}\"\n end\n end", "def full_title(page_title)\n\t\tbase_title = CONFIG[:title] || \"Ruby on Rails Template\"\n\t\tif page_title.empty?\n\t\t\tbase_title\n\t\telse\n\t\t\t\"#{base_title} | #{page_title}\"\n\t\tend\n\tend", "def full_title(page_title)\n\t\tbase_title = CONFIG[:title] || \"Ruby on Rails Template\"\n\t\tif page_title.empty?\n\t\t\tbase_title\n\t\telse\n\t\t\t\"#{base_title} | #{page_title}\"\n\t\tend\n\tend", "def full_title(page_title = '')\n base_title = \"Tradies\"\n if page_title.empty?\n base_title\n else\n page_title + \" | \" + base_title\n end\n end", "def full_title(page_title)\n\t\tbase_title=\"StemLoops\"\n\t\tif page_title.empty?\n\t\t\tbase_title\n\t\telse\n\t\t\tbase_title+\" | \"+page_title\n\t\tend\n\tend", "def full_title(page_title)\n\t\tbase_title = \"Scheduler\"\n\t\tif page_title.empty?\n\t\t\tbase_title\n\t\telse\n\t\t\t\"#{base_title} | #{page_title}\".html_safe\n\t\tend\n\tend", "def full_title(page_title)\n base_title = \"Bill Pay Center\"\n if page_title.empty?\n base_title\n else\n \"#{base_title} / #{page_title}\"\n end\n end", "def full_title(page_title)\n base_title = \"ARealtor\"\n if page_title.empty?\n base_title\n else\n \"#{base_title} | #{page_title}\"\n end\n end", "def render_page_title\n @page_title ? \"#{@page_title}_#{SiteName}\" : SiteName rescue \"SITE_NAME\"\n end", "def full_title(page_title = '')\n base_title = \"NKUNet\"\n if page_title.empty?\n base_title\n else\n page_title + \" | \" + base_title\n end\n end", "def full_title(page_title)\n title = base_title\n\n Array(page_title).each do |var|\n title << (' | ' + var) unless var.empty? \n end\n\n title\n end", "def page_title(page_title)\n content_for_layout :page_title, page_title\n end", "def title\n base_title = \"My Site\"\n unless @title.nil?\n \"#{base_title} | #{@title}\"\n else\n base_title\n end\n\n end", "def full_title(page_title)\n base_title = \"Adit\"\n if page_title.empty?\n base_title\n else\n \"#{base_title} | #{page_title}\"\n end\n end", "def full_title(page_title)\n base_title = \"Rails Project\"\n if page_title.empty?\n base_title\n else\n \"#{base_title} | #{page_title}\"\n end\n end", "def full_title(page_title)\n base_title = \"StoryStore\"\n if page_title.empty?\n base_title\n else\n \"#{base_title} | #{page_title}\"\n end\n end", "def title(page_title)\n\t content_for(:title) { page_title }\n\tend", "def full_title(page_title)\n\t\tbase_title = \"Myreef.tv\"\n\t\tif page_title.empty?\n\t\t\tbase_title\n\t\telse\n\t\t\t\"#{base_title} | #{page_title}\"\n\t\tend\n\tend", "def full_title(page_title)\n base_title = \"Relativies\"\n if page_title.empty?\n base_title\n else\n \"#{base_title} | #{page_title}\"\n end\n end", "def page_title( this_title = nil )\n content_for( :title ) { \"#{ SITE_ID }: #{ this_title.nil? ? I18n.t( controller.controller_name + '.title' ) : this_title }\" }\n end", "def base_title(page_title = '')\n base_title = \"Sergio Mironescu\"\n if page_title.empty?\n base_title\n else\n \"#{base_title} | #{page_title}\"\n end\n end", "def full_title(page_title)\n base_title = \"Geocode App\"\n if page_title.empty?\n base_title\n else\n \"#{base_title} | #{page_title}\"\n end\n end", "def page_title title= nil\n\t\tif title\n\t\t\tcontent_for(:page_title) { \"#{title} - 2da.re\" }\n\t\t\treturn title\n\t\telse\n\t\t\tcontent_for?(:page_title) ? content_for(:page_title) : \"Ready 2da.re?\"\n\t\tend\n\tend", "def title(page_title)\n content_for(:title) { page_title }\n end", "def title(page_title)\n content_for(:title) { page_title }\n end", "def title(page_title)\n content_for(:title) { page_title }\n end", "def title(page_title)\n content_for(:title) { page_title }\n end", "def full_title(page_title)\n base_title = \"Scirate\"\n if page_title.empty?\n base_title\n else\n sanitize(\"#{base_title} | #{page_title}\")\n end\n end", "def title(page_title)\n base_title = \"Blog Secret Santa\" \n content_for(:title) { \"#{page_title} | #{base_title}\" }\n content_for(:heading) { page_title }\n end", "def title(page_title = nil)\n if page_title\n content_for(:title) do\n page_title\n end\n else\n content_for(:title) do\n \"DateIdeas.ca\"\n end\n end\n end", "def page_title\n end", "def full_title(page_title = '')\n base_title = \"Colegio de Notarios\"\n if page_title.empty?\n base_title\n else\n page_title + \" | \" + base_title\n end\n end", "def title(page_title)\n @title = page_title\n content_for(:title) { page_title }\n end", "def full_title(page_title)\n base_title = 'Vietnam Beauty Spots'\n if page_title.empty?\n base_title\n else\n \"#{base_title} | #{page_title}\"\n end\n end", "def title(page_title)\n content_for :page_title, page_title.to_s.html_safe\n end", "def page_title\n layout = controller.send(:_layout)\n base_title = I18n.t(\"layouts.#{layout}.title\", default: :'layouts.application.title')\n\n i18n_scope = \"#{params[:controller].gsub('/', '.')}.#{action_name}\"\n i18n_parts = [\n content_for(:page_title),\n I18n.t(:page_title, default: \"\", scope: i18n_scope).presence,\n I18n.t(:title, default: \"\", scope: i18n_scope).presence\n ]\n title_content = i18n_parts.compact.first\n [base_title, title_content].compact.join(' - ')\n end", "def full_title(page_title = '')\n base_title = \"Hearth Helper\"\n if page_title.empty?\n base_title\n else\n page_title + \" - \" + base_title\n end\n end", "def title(page_title)\n content_for :title do\n page_title\n end\n end", "def full_title(page_title = '')\n base_title = t('layouts.header.heading')\n if page_title.empty?\n base_title\n else\n base_title + \" | \" + page_title\n end\n end", "def full_title(page_title)\n base_title = \"GamePlayDate\"\n if page_title.empty?\n base_title\n else\n \"#{base_title} | #{page_title}\"\n end\n end", "def full_title(page_title = \"\")\n \tbase_title = \"Facebook Clone\"\n \tif page_title.empty?\n \t\tbase_title\n \telse\n \t\tpage_title + \" | \" + base_title\n \tend\n end", "def title_with_page_title_check\n return @page.title if @page && [email protected]?\n title_without_page_title_check\n end", "def title\n base_title = \"CloudSpokes Coding Challenges\"\n if @page_title.nil?\n base_title\n else\n \"#{base_title} - #{@page_title}\"\n end\n end", "def title(title = nil)\n raise TypeError, \"expecting a String or an Array\" unless [String,Array].include?(title.class) || title.nil?\n separator = \" ~ \"\n @page_title = title if title\n if @page_title\n title = @page_title.to_a.flatten\n [@page_title, site_name].flatten.reverse.join(separator)\n else\n site_name\n end\n end", "def title(page_title)\n \tcontent_for(:title) { page_title }\n \tend", "def full_title(page_title = '', hideExtraTitle)\n base_title = \"Henkubao - Your premiere blog and resource for all things Asian food\"\n if page_title.empty?\n base_title\n else\n if hideExtraTitle.empty?\n \"#{page_title} | #{base_title}\"\n else\n \"#{page_title}\"\n end\n \n end\n end", "def full_title(page_title = '')\n base_title = \"WriteIt\"\n if page_title.empty?\n base_title\n else\n page_title + \" | \" + base_title\n end\n end", "def render_page_title\n (content_for(:page_title) if content_for?(:page_title)) || @page_title || application_name\n end", "def page_title\n @page_title = \"Nursing System\"\n end", "def site_title\n @site.title\n end", "def get_display_title(title)\n page_info_get_val(title, 'displaytitle', 'displaytitle')\n end", "def title(page_title)\n\t\t\tmode = \"[DEV] \" unless ::Rails.env.production?\n\t\t\tcontent_for(:title) { mode.to_s + page_title + \" | \" }\n\t\tend", "def page_title(page_title = nil)\n @page_title ||= page_title\n @page_title.nil? ? \"Carers: #{action_name}\" : \"#{@page_title} @ Lort Smith\"\n end", "def render_page_title\n (sanitize(content_for(:page_title), tags: []) if content_for?(:page_title)) || sanitize(@page_title, tags: []) || application_name\n end" ]
[ "0.85687196", "0.8331109", "0.824858", "0.82197994", "0.81325006", "0.8109226", "0.8086481", "0.8071382", "0.8039476", "0.8034617", "0.79303885", "0.79068077", "0.79036236", "0.7898914", "0.7893025", "0.7887799", "0.78828776", "0.7867037", "0.7858253", "0.78459156", "0.7845712", "0.78454465", "0.78348947", "0.78322625", "0.78281105", "0.777549", "0.7760675", "0.77389824", "0.7735572", "0.7734163", "0.77328956", "0.7724159", "0.7723835", "0.77192587", "0.769045", "0.76899016", "0.76887584", "0.76852846", "0.76796037", "0.7679494", "0.76748306", "0.76693", "0.76693", "0.76669097", "0.7665105", "0.7660648", "0.7658365", "0.7640126", "0.7639477", "0.7637499", "0.7634233", "0.763023", "0.76203513", "0.76177025", "0.7617351", "0.7615282", "0.76109135", "0.761052", "0.7609105", "0.76088744", "0.7597686", "0.7595381", "0.7593086", "0.75896853", "0.75896853", "0.75896853", "0.75896853", "0.7581013", "0.75791085", "0.7571833", "0.75603074", "0.75589496", "0.7548788", "0.7540993", "0.7540753", "0.7538731", "0.7528422", "0.7523913", "0.7522697", "0.7518211", "0.7508816", "0.75074965", "0.7501282", "0.7498505", "0.749566", "0.74906385", "0.74901825", "0.74879694", "0.7480932", "0.7476315", "0.7471674", "0.7467619", "0.7462303", "0.7458096" ]
0.8256091
7
Choose action depeding on level
def flash_class(level) case level when 'notice' then "alert alert-info" when 'success' then "alert alert-success" when 'error' then "alert alert-danger" when 'alert' then "alert alert-error" end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_level_action(key)\n key = Access::Validate.level(key)\n get_hash_value(Access::Core.levels_hash, key, :action)\n end", "def action\n (@current_level = 'X' ; return) if ( @user_choice == 'X' || @user_choice == 'x')\n\n (menu_level_left ; return) if @user_choice == \"\\e[D\"\n\n tmp_lvl = @current_level + '.' + @user_choice\n cmd = @menu_map[tmp_lvl][1]\n cmd ? execute_action(cmd) : @current_level = tmp_lvl\n end", "def menu_choice(action)\n if action == \"a\"\n @humanplayer.search_weapon\n elsif action == \"s\"\n @humanplayer.search_health_pack\n elsif action == \"1\"\n @humanplayer.attack(@player_1)\n elsif action == \"2\"\n @humanplayer.attack(@player_2)\n elsif action == \"3\"\n @humanplayer.attack(@player_3)\n elsif action == \"4\"\n @humanplayer.attack(@player_4)\n else puts \"Please select option a, s, 1, 2, 3 or 4\"\n end\n\n kill_player\n end", "def menu_choice(cmd)\n hplayer = @human_player\n action = cmd\n\n # define possible actions for the player\n if action == \"a\"\n hplayer.search_weapon\n elsif action == \"s\"\n hplayer.search_health_pack\n elsif action.match(/^(\\d)+$/) # check if action is a number\n hplayer.attacks(@enemies[action.to_i])\n if @enemies[action.to_i].life_points <= 0\n kill_player(@enemies[action.to_i])\n end\n end\n \n end", "def decide_action_mode(ary)\n @mode = nil\n\n if @current_health >= 18\n @mode = :rescue_mode\n elsif @current_health >= 15\n @mode = :attack_mode\n elsif @current_health >= 10\n @mode = :defense_mode\n else\n @mode = :recover_mode\n end\n end", "def action_A_menu\n case @intern_mode\n when :choose_move_pokemon\n action_move_current_pokemon\n when :choose_move_item\n return $game_system.se_play($data_system.buzzer_se) if @team_buttons[@index].data.item_holding == 0\n @team_buttons[@move = @index].selected = true\n @intern_mode = :move_item\n @base_ui.show_win_text(text_get(23, 22))\n when :move_pokemon\n process_switch\n when :move_item\n process_item_switch\n else\n $game_system.se_play($data_system.decision_se)\n return show_choice\n end\n $game_system.se_play($data_system.decision_se)\n end", "def actions\n action = [:walk!, stairs]\n action = [:rescue!, direction_of(:captive?)] if nearby?(:captive?)\n action = [:rest!] if starting_health < MAX_HEALTH\n action = [:attack!, direction_of(:enemy?)] if nearby?(:enemy?)\n action = [:bind!, direction_of(:enemy?)] if nearby_count(:enemy?) > SURROUNDED\n\n return action\n end", "def action_do(action)\n\t\tif action == \"a\"\n\t\t\tself.search_weapon\n\t\telsif action == \"s\"\n\t\t\tself.search_health_pack\n\t\telsif action == \"0\"\n\t\t\tself.attacks(@@ennemis[action.to_i])\n\t\telsif action == \"1\"\n\t\t\tself.attacks(@@ennemis[action.to_i])\n\t\tend\n\t\tputs \"\"\t\t\n\tend", "def action_A\n case @mode\n when :menu\n action_A_menu\n else\n $game_system.se_play($data_system.decision_se)\n show_choice\n end\n end", "def action\n if taking_damage?\n taking_damage_action\n elsif @warrior.feel.stairs? #&& ground_covered?\n @warrior.walk!\n elsif @warrior.feel.empty? && @warrior.health < MAX_HEALTH\n @warrior.rest!\n elsif @warrior.feel.enemy?\n @warrior.attack!\n elsif @warrior.feel.captive?\n @warrior.rescue!\n elsif @warrior.feel.empty?\n @warrior.walk!\n elsif @warrior.feel.wall?\n @warrior.pivot!(:backward)\n end\n end", "def switch_choice\n pokemon_to_send = @visual.show_pokemon_choice\n if pokemon_to_send\n pokemon_to_switch = @logic.battler(0, @player_actions.size)\n # The player made a choice we store the action and we check if he can make other choices\n @player_actions << { type: :switch, who: pokemon_to_switch, with: pokemon_to_send }\n pokemon_to_send.switching = true\n pokemon_to_switch.switching = true\n log_debug(\"Action : #{@player_actions.last}\") if debug? # To prevent useless overhead outside debug\n @next_update = can_player_make_another_action_choice? ? :player_action_choice : :trigger_all_AI\n else\n # If the player canceled we return to the player action\n @next_update = :player_action_choice\n end\n end", "def execute_action_skill\n # Call a custom battle method\n ty_execute_action_skill\n # Call the original battle method if still attacking\n if @active_battler.action.kind == 1\n ty_ammo_requirements_execute_skill\n end\n end", "def execute_action_attack\n # Call a custom battle method\n ty_execute_action_attack\n # Call the original battle method if still attacking\n if @active_battler.action.kind == 0\n ty_ammo_requirements_attack\n end\n end", "def local_action(command, id, action)\n super(command,id,ACTION[action])\n end", "def phase3_command_attack\r\n # Set action\r\n @active_battler.current_action.kind = 0\r\n @active_battler.current_action.basic = 0\r\n # Start enemy selection\r\n start_enemy_select\r\n end", "def target_choice\n launcher, skill, target_bank, target_position = @visual.show_target_choice\n if launcher\n # The player made a choice we store the action and check if he can make other choices\n @player_actions << { type: :attack, launcher: launcher, skill: skill, target_bank: target_bank, target_position: target_position }\n log_debug(\"Action : #{@player_actions.last}\") if debug? # To prevent useless overhead outside debug\n @next_update = can_player_make_another_action_choice? ? :player_action_choice : :trigger_all_AI\n else\n # If the player canceled we return to the player action\n @next_update = :player_action_choice\n end\n ensure\n @skip_frame = true\n end", "def select_action\n return :help if @options[:help]\n\n return :version if @options[:version]\n\n return :info if @options[:info]\n\n return :bump if bump_option? @options\n\n # command without options invokes info action\n :info\n end", "def set_enemy_action(action)\r\n if action\r\n set_skill(action.skill_id)\r\n else\r\n clear\r\n end\r\n end", "def advanced_enemy_action(char)\n # initialize basic action\n attack = defend = escape = false\n # initialize skill action\n dmg, heal, neutral = [], [], []\n # iterate through all actions\n char.battler.actions.each {|action|\n # conditions\n n = $game_temp.battle_turn\n a = action.condition_turn_a\n b = action.condition_turn_b\n # skip if conditions are not fulfilled\n next if b == 0 && n != a || b > 0 && (n < 1 || n < a || n % b != a % b)\n next if char.battler.hp * 100.0 / char.battler.maxhp > action.condition_hp\n next if $game_party.max_level < action.condition_level\n switch_id = action.condition_switch_id\n next if switch_id > 0 && !$game_switches[switch_id]\n # depending on which basic type of action\n case action.kind\n when 0 # basic action\n case action.basic\n when 0 then attack = true\n when 1 then defend = true\n when 2 then escape = true\n end\n when 1 # skill action\n # if skill can be used\n if char.skill_can_use?(action.skill_id)\n # if damaging skill\n if $data_skills[action.skill_id].power > 0\n # add to array of damaging skills\n dmg.push(action.skill_id)\n # if healing skill\n elsif $data_skills[action.skill_id].power < 0\n # add to array of healing skills\n heal.push(action.skill_id)\n else\n # add to array of neutral skills\n neutral.push(action.skill_id)\n end\n end\n end}\n # decide a target\n decide_target(char, dmg, heal, neutral, attack, defend, escape)\n end", "def menu_choice(choice)\n\n\t\tif choice == \"a\"\n\t\t\thuman_player.search_weapon\n\t\telsif choice == \"s\"\n\t\t\thuman_player.search_health_pack\n\t\telse\n\t\t\ti = 0\n\t\t\tenemies_in_sight.each do |enemy|\n\t\t\t\ti += 1\n\t\t\t\tif i == choice.to_i\n\t\t\t\t\thuman_player.attacks(enemy)\n\t\t\t\t\tunless enemy.life_points > 0\n\t\t\t\t\t\tenemies_in_sight.delete(enemy)\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\tend", "def action\n warrior_action, direction = actions\n\n if direction.nil?\n warrior.send(warrior_action)\n else\n warrior.send(warrior_action, direction)\n end\n end", "def do_action\n if self.affects == \"world\" then\n player_tile = self.character.tile\n\n # an aoe effect is represented as a list of objects,\n # each one representing the effect on one tile\n ITEM_PROPERTIES[self.item_type][\"aoe\"].each do |aoe|\n dx = aoe[\"xCoordPlus\"]\n dy = aoe[\"yCoordPlus\"]\n tile_becomes = aoe[\"tileBecomes\"]\n Tile.tile_at(player_tile.x + dx, player_tile.y + dy).become tile_becomes\n end\n\n elsif self.affects == \"player\" then\n\n dx = self.moves_player_x\n dy = self.moves_player_y\n\n # Move me to the place this item takes me\n if (dx != 0 or dy != 0) then\n target_tile = Tile.tile_at(self.character.tile.x + dx,\n self.character.tile.y + dy)\n if target_tile\n self.character.tile = target_tile\n end\n end\n\n self.character.heal(self.health_effect)\n self.character.charge(self.battery_effect)\n end\n\n if self.consumable then\n self.character.item = nil\n self.destroy\n end\n\n end", "def display_action_menu(player1, player2)\n puts \"\\nQuelle action veux-tu effectuer ?\"\n puts \"a - chercher une meilleure arme\"\n puts \"s - chercher à se soigner\"\n puts \"\\nattaquer un joueur en vue :\"\n print \"0 \"\n player1.show_state if player1.life_points > 0\n print \"1 \"\n player2.show_state if player2.life_points > 0\n end", "def chooser(action , aDragon)\r\n\tif action == 1\r\n\t\taDragon.full?\r\n\telsif action == 2\r\n\t\taDragon.feed\r\n\telsif action == 3\r\n\t\taDragon.needsToPoop?\r\n\telsif action == 4\r\n\t\taDragon.poop\r\n\telsif action == 5\r\n\t\taDragon.play?\r\n\telsif action == 6\r\n\t\taDragon.play\r\n\telsif action == 7\r\n\t\taDragon.sleepy?\r\n\telsif action == 8\r\n\t\taDragon.sleep\r\n\telsif action == 9\r\n\t\texit\r\n\telse\r\n\t\tputs 'Sorry, that\\'s not a valid command.'\r\n\t\tputs 'Enter a command number (1-9)'\r\n\t\taction = gets.chomp.to_i\r\n\tend\r\nend", "def action\n if @dealer.total <= 16\n :hit\n else\n :stand\n end\n end", "def execute_action\n # This is a VERY lazy way for using different enemy ammo counts.\n # When it's the enemies turn, add one to enemy_attack. This acts as a index\n # For emoving enemy ammo. It's an extremely simple and lazy way :x\n if @active_battler.is_a?(Game_Actor)\n @enemy_attack = -1\n else\n @enemy_attack += 1\n end\n ty_ammo_requirements_execute_action\n end", "def actions\n answer = @prompt.select(\"Where will you go?\", %w(Foward Back Status Items), cycle:true, per_page:4)\n if answer == \"Foward\"\n if @zone_cur == 10\n narrate(\"you leave the #{self.name}\")\n leave(self.exit_name)\n else\n @zone_cur += 1\n narrate(\"you continue foward\")\n encounter_check\n end\n elsif answer == \"Back\"\n if @zone_cur == 0\n narrate(\"you leave the #{self.name}\")\n leave(self.entrance_name)\n else\n @zone_cur -= 1\n narrate(\"you retreat backwards\")\n encounter_check\n end\n elsif answer == 'Status'\n narrate(\"#{@player.name} has #{@player.hp} hp, #{@player.attack} attack, and #{@player.defence} defence\")\n str = \"\"\n a = @player.equipment.map do |el|\n el.name\n end\n b = a.join(\", \")\n narrate(\"#{@player.name} has the following items equipped: #{b}\")\n here\n elsif answer == 'Items'\n answer = @prompt.select(\"Items Menu\", %w(Equip_item Look_item), cycle:true, per_page:4)\n if answer == 'Equip_item'\n #list inventory\n stuff = @player.items.map do |el|\n el.name\n end\n #select an item from inventory\n answer = @prompt.select(\"Items Menu\", stuff, cycle:true, per_page:4)\n #find that item again\n to_eq = @player.items.find do |el|\n el.name == answer\n end\n binding.pry\n #equip that item\n @player.equip(to_eq)\n narrate( \"You have equipped the #{to_eq.name}!\")\n actions\n end\n end\n end", "def action_X\n $game_temp.temp_team = @temp_team if @mode == :select\n # Check if the number of selected Pokemon is equal to the required number\n @running = false if @mode == :select && enough_pokemon? == true\n return if @mode != :menu \n return $game_system.se_play($data_system.buzzer_se) if @intern_mode != :normal or @party.size <= 1\n @base_ui.show_win_text(text_get(23, 19))\n @intern_mode = :choose_move_pokemon\n end", "def actions() ; info[:actions] ; end", "def actions() ; info[:actions] ; end", "def data_field_action_select(label, action)\n field_detect(label)\n @field.all(:css, 'button[data-toggle=\"dropdown\"]').first.click #opens the actions menu\n action_list = @field.all(:css, 'ul[class=\"dropdown-menu pull-right\"] > li > a')\n action_item = action_list.detect { |item| item.text == action }\n\n if action_item\n action_item.click\n else\n raise \"Action: #{action} is not available for the field: #{label}.\"\n end\n end", "def decide_action(char)\n # temporary variables\n dir, x, y, ai = char.direction, char.x, char.y, char.ai\n pix = $BlizzABS.pixel\n # get alignment setup\n negative, positive = ai.negative, ai.positive\n # invert setup if confused\n negative, positive = positive, negative if char.restriction == 3\n # get all enemies in sight\n in_sight_e = ai.sight.find_all {|b| negative.include?(b.ai.group)}\n # if no enemies are available\n if in_sight_e.size == 0\n # initialize\n in_sight_a, in_range = [], ai.memory.keys\n # get all allies and enemies in range\n in_range.each {|b|\n in_sight_a.push(b) if positive.include?(b.ai.group)\n in_sight_e.push(b) if negative.include?(b.ai.group)}\n # if still no enemies are available\n if in_sight_e.size == 0\n # initialize again\n in_sight_a = []\n # get all allies and enemies from memory\n ai.memory.each_key {|b|\n in_sight_a.push(b) if positive.include?(b.ai.group)\n in_sight_e.push(b) if negative.include?(b.ai.group)}\n end\n else\n # get all allies in sight\n in_sight_a = ai.sight.find_all {|b| positive.include?(b.ai.group)}\n end\n # exit if no enemies are in sight\n return if in_sight_e.size == 0\n # if actor\n if char.is_a?(Map_Actor)\n # exit if \"no enemies\" are in sight\n return if in_sight_e.size == 0\n # get radius reach of player\n rad = $BlizzABS.util.get_player_radius\n # find all enemies within radius according to aggressiveness\n in_radius = in_sight_e.find_all {|e|\n Math.hypot(char.x / pix - e.x / pix, char.y / pix - e.y / pix) <=\n rad * char.ai.aggressive / 15}\n # check next trigger if action can't be executed\n return if in_radius.size == 0\n # add self as ally\n in_sight_a.push(char)\n # if confused or no trigger action was set up\n if char.restriction == 3 ||\n !trigger_actor_action(char, in_sight_a, in_sight_e)\n # set up advanced action based on Blizz-ABS AI\n advanced_actor_action(char)\n end\n # if enemy\n elsif char.is_a?(Map_Enemy)\n # if action attribute is not active\n if !ai.actions\n # decide normal action\n char.battler.make_action\n # temporary variable\n act = char.battler.current_action\n # set up the action in Blizz-ABS as normal action\n normal_action(char, in_sight_a, in_sight_e, (act.kind == 0),\n act.basic, rand(31) + 70, 80, act.skill_id)\n else\n # set up advanced action based on Blizz-ABS AI\n advanced_enemy_action(char)\n end\n end\n # if target doesn't exist or forced moving\n if ai.target == nil || !ai.target.valid?\n # reset action\n char.reset_action\n # if not being force moved\n elsif char.is_a?(Map_Enemy) && char.move_type != 3\n # set path request state\n ai.state = Request\n # turn toward the target not to lose it out of sight\n char.turn_toward(ai.target)\n # request a path\n request_path(char, ai.target)\n end\n end", "def choose_action(player1, player2)\n\t\tputs \"Quelle action veux-tu effectuer ?\\n a - chercher une meilleure arme\\n s - chercher à se soigner \"\n\t\tputs \" attaquer un joueur en vue :\"\n\t\tif player1.life_points > 0 \n\t\t\tputs \" 0 - #{player1.show_state} \"\n\t\telse puts \" #{player1.name} est KO, aucun action possible\"\n\t\tend\n\t\tif player2.life_points > 0 \n\t\t\tputs \" 1 - #{player2.show_state}\"\n\t\telse puts \" #{player2.name} est KO, aucun action possible\"\n\t\tend\n\t\tprint \"> \"\n\t\treturn gets.chomp\nend", "def button_down(id)\n if id == Gosu::KbEscape || id == Gosu::KbQ\n save\n close\n elsif id == Gosu::KbA\n @current_type = :terrain\n elsif id == Gosu::KbS\n @current_type = :enemies\n elsif id == Gosu::KbD\n @current_type = :candies\n elsif id == Gosu::KbLeft || id == Gosu::GpLeft\n @x_offset -= 1 if @x_offset > 0\n elsif id == Gosu::KbUp || id == Gosu::GpUp\n @y_offset -= 1 if @y_offset > 0\n elsif id == Gosu::KbRight || id == Gosu::GpRight\n @x_offset += 1 if @x_offset < LEVEL_WIDTH - 10\n elsif id == Gosu::KbDown || id == Gosu::GpDown\n @y_offset += 1 if @y_offset < LEVEL_HEIGHT - 10\n elsif id == Gosu::Kb1\n if @current_type == :terrain\n @current_selection = :background\n elsif @current_type == :enemies\n @current_selection = :slug\n elsif @current_type == :candies\n @current_selection = :soda\n end\n elsif id == Gosu::Kb2\n if @current_type == :terrain\n @current_selection = :platform\n elsif @current_type == :enemies\n @current_selection = :spikes\n elsif @current_type == :candies\n @current_selection = :gum\n end\n elsif id == Gosu::Kb3\n if @current_type == :terrain\n @current_selection = :player\n elsif @current_type == :enemies\n @current_selection = :mushroom\n elsif @current_type == :candies\n @current_selection = :chocolate\n end\n elsif id == Gosu::Kb4\n if @current_type == :terrain\n @current_selection = :door\n end\n elsif id == Gosu::Kb5\n if @current_type == :terrain\n @current_selection = :background2\n end\n elsif id == Gosu::MsLeft\n if @current_selection == :slug\n x = (mouse_x / SCALE).to_i\n x -= x % 32\n x += 32 * @x_offset\n y = (mouse_y / SCALE).to_i\n y -= y % 25\n y -= 12\n y += 25 * @y_offset\n @enemies.push(Slug.new(self, x, y))\n elsif @current_selection == :spikes\n x = (mouse_x / SCALE).to_i\n x -= x % 32\n x += 3\n y = (mouse_y / SCALE).to_i\n y -= y % 25\n y -= 12\n x += 32 * @x_offset\n y += 25 * @y_offset\n @enemies.push(Spikes.new(self, x, y))\n elsif @current_selection == :mushroom\n x = (mouse_x / SCALE).to_i\n x -= x % 32\n y = (mouse_y / SCALE).to_i\n y -= y % 25\n y += 6\n x += 32 * @x_offset\n y += 25 * @y_offset\n @enemies.push(Mushroom.new(self, x, y))\n elsif @current_selection == :player\n x = (mouse_x / SCALE).to_i\n x -= x % 32\n y = (mouse_y / SCALE).to_i\n y -= y % 25\n x += 32 * @x_offset\n y += 25 * @y_offset\n x += 2\n @player = [x, y]\n elsif @current_selection == :door\n x = (mouse_x / SCALE).to_i\n x -= x % 32\n y = (mouse_y / SCALE).to_i\n y -= y % 25\n x += 32 * @x_offset\n y += 25 * @y_offset\n y += 2 \n @door = [x, y]\n elsif @current_type == :candies\n x = (mouse_x / SCALE).to_i\n y = (mouse_y / SCALE).to_i\n x += 32 * @x_offset\n y += 25 * @y_offset\n @candies.push(Object.const_get(@current_selection.to_s.capitalize).new(self, x, y))\n end\n end\n end", "def command_attack\n BattleManager.actor.input.set_attack\n select_target_selection\n end", "def get_action(type, status)\n case \"#{type}_#{status}\"\n when \"positive_ok\", \"negative_ok\"\n action = :do_nothing\n when \"positive_low\", \"negative_high\"\n # for positive metrics low is bad, ex: puma_capacity\n # for negative metrics high is bad, ex: kafka_consumer_lag\n action = :scale_up\n when \"positive_high\", \"negative_low\"\n # for negative metrics low is good, ex: kafka_consumer_lag\n # for positive metrics high is good, ex: puma_capacity\n action = :scale_down\n else\n # type or status is unknown, set action to unknown\n action ||= :unknown\n end\n return action\n end", "def hook_action\n if tool_canuse?(HweaponId) and not @showing_hook\n unless character_at_playerpos?\n @hookshot.each do |hook| \n hook.direction = @direction\n hook.moveto(@x, @y)\n hook.char_steps = hook.index\n end\n @showing_hook = true\n RPG::SE.new(HookActionSe, 80).play\n @tool_anime = 30\n else\n Sound.play_buzzer\n end\n end\n end", "def advanced_actor_action(char)\n # initialize skill action\n dmg, heal, neutral = [], [], []\n # iterate through all actions\n $BlizzABS.util.get_actor_skills(char.battler).each {|id|\n # if skill can be used\n if char.skill_can_use?(id)\n # if damaging skill\n if $data_skills[id].power > 0\n # add to array of damaging skills\n dmg.push(id)\n # if healing skill\n elsif $data_skills[id].power < 0\n # add to array of healing skills\n heal.push(id)\n else\n # add to array of neutral skills\n neutral.push(id)\n end\n end}\n # decide a target\n decide_target(char, dmg, heal, neutral, true, true, false)\n end", "def phase3_command_item\r\n # Set action\r\n @active_battler.current_action.kind = 2\r\n # Start item selection\r\n start_item_select\r\n end", "def select_kit_action(kit_id=nil)\n actions = { 'View Kit info' => :view_kit,\n 'Update Kit' => :prompt_kit_params, \n 'Delete Kit' => :delete_kit }\n \n action = @io.select_from_options(\"What would you like to do with this Kit?\", \n actions)\n\n # if the selected action is to view the kit info, add the :after_view operation\n @operation_stack.unshift :after_view if action == :view_kit\n @operation_stack.unshift action\n\n kit_id\n end", "def make_basic_action_result\n # If the current actor's weapon does not target all\n if @active_battler.current_action.basic != 128\n # Call the original method\n targetall_action\n else\n # Set animation ID\n @animation1_id = @active_battler.animation1_id\n @animation2_id = @active_battler.animation2_id\n # check restrictions and set targets\n @target_battlers = []\n # If attacking allies\n if @active_battler.restriction == 3\n # Attack all allies\n set_target_battlers(4)\n # If attacking enemies\n else\n # Attack all enemies\n set_target_battlers(2)\n end\n # Apply normal attack results\n for target in @target_battlers\n target.attack_effect(@active_battler)\n end\n end\n end", "def update_phase3_enemy_select\n pkmn = @actors[@actor_actions.size]\n skill = pkmn.skills_set[@atk_index]\n if skill.id == 174 and !pkmn.type_ghost? #> Malédiction\n return [pkmn]\n end\n #>Choix automatique en 1v1\n if $game_temp.vs_type == 1 or (@enemy_party.pokemon_alive==1 and $pokemon_party.pokemon_alive==1) or skill.is_no_choice_skill?\n return util_targetselection_automatic(pkmn, skill)\n #>Choix 2v2\n elsif $game_temp.vs_type == 2\n data = update_phase3_pokemon_select_2v2(pkmn, skill)\n return -1 if data == -1\n if data < 2\n return [@enemies[data]]\n end\n return [@actors[data-2]]\n else\n return -1\n end\n\n end", "def action\n case item.type\n when :switch, :bar\n toggle_item\n when :advanced\n process_method\n when :variable\n open_popup\n end\n end", "def determine_action(input)\n case (input.to_i)\n when 1 then TimeMenu::WeektimeMenu.new.print_menu\n when 2 then TimeMenu::MonthtimeMenu.new.print_menu\n when 3 then TimeMenu::CustomtimeMenu.new.print_menu\n when 4 then return false\n else\n handle_wrong_option\n end\n return true\n end", "def menu_action(action)\n if menu_action?(action)\n action.to_sym\n else\n \"#{action}#{SELECT_ACTION_SUFFIX}\".to_sym\n end\n end", "def right_key\n @choice = @choice < @max_level ? @choice + 1 : @max_level\n @choice_sound.play(1, 0.5, false)\n end", "def action\n @action ||= calculate_action\n end", "def select_view\n #redirect_to :action => params[:selection]\n if (SELECTABLE_ACTIONS_ADMIN.include? params[:selection].to_sym) || (SELECTABLE_ACTIONS.include? params[:selection].to_sym)\n redirect_to :action => params[:selection]\n else\n redirect_to :action => 'home'\n end\n end", "def select_botgarden_action_code(data_set)\n action = data_set[BOTGARDENCurrentLocationData::ACTION_CODE.name]\n if action\n logger.debug \"Entering action code '#{action}'\"\n action_code_options_locator = input_options_locator([], BOTGARDENCurrentLocationData::ACTION_CODE.name)\n hit_escape\n wait_for_options_and_select(botgarden_action_code_input_locator, action_code_options_locator, action)\n end\n end", "def actionNom\n\t\treturn \"Acheter potion (100or)\"\n\tend", "def show_possible_actions(options={})\n\t\[email protected] do |menu|\n\t\t\toptions.each do |key, value|\n\t\t\t\tmenu.choice(key) { send(*value) if value }\n\t\t\tend\n\t\tend\n\tend", "def update_phase3_enemy_select\r\n # If B button was pressed\r\n if Input.trigger?(Input::B)\r\n # Play cancel SE\r\n $game_system.se_play($data_system.cancel_se)\r\n # End enemy selection\r\n end_enemy_select\r\n return\r\n end\r\n # If C button was pressed\r\n if Input.trigger?(Input::C)\r\n # Play decision SE\r\n $game_system.se_play($data_system.decision_se)\r\n # Set action\r\n @active_battler.current_action.target_index = @enemy_arrow.index\r\n # End enemy selection\r\n end_enemy_select\r\n # If skill window is showing\r\n if @skill_window != nil\r\n # End skill selection\r\n end_skill_select\r\n end\r\n # If item window is showing\r\n if @item_window != nil\r\n # End item selection\r\n end_item_select\r\n end\r\n # Go to command input for next actor\r\n phase3_next_actor\r\n end\r\n end", "def make_basic_action_result\n # Gather the current Ammo Cost\n gather_ammo_cost = Ammo::Range_weapons_id[@active_battler.weapon_id]\n # Gather the Current Ammo\n gather_ammo = Ammo::Range_ammo_id[@active_battler.weapon_id]\n # Check if the Active Battler is attacking and if they are using a ranged weapon\n if @active_battler.current_action.basic == 0 and Ammo::Range_weapons_id.has_key?(@active_battler.weapon_id)\n # Check the Ammo Count\n if $game_party.item_number(gather_ammo) >= gather_ammo_cost\n # Sufficient Ammo, remove item\n $game_party.lose_item(gather_ammo,gather_ammo_cost)\n syn_scene_battle_range\n else\n # Insufficient Ammo\n @help_window.set_text(\"#{@active_battler.name} cannot attack due to insufficient Ammo\", 1)\n end\n # Call Default Code\n else\n syn_scene_battle_range\n end\n end", "def actions(enemy)\n puts \"What will #{@name} do? They may:\"\n @actions.each_with_index{|a, i| puts \"#{i+1}) #{a.unCamelize}\"}\n #maybe instead of this, look at all the defined action method names and list them?\n userMethod = Rules.userInput\n (userMethod.to_i.to_s == userMethod) ? userMethod = @actions[userMethod.to_i-1] : userMethod = userMethod.camelize\n (@actions.include? userMethod) ? Actions.method(userMethod).call(self, enemy) : (puts \"#{@name} attempts to #{userMethod.unCamelize}... Nothing happens.\") \n end", "def draw_required_level\n return if item.equip_level <= 1\n return if actor && actor.level >= item.equip_level\n color = actor.nil? ? normal_color : power_down_color\n text = sprintf('Richiede il livello %d', item.equip_level)\n draw_feature(text, color)\n end", "def charPressedInMenu\n char = pressKey\n case (char)\n when \"p\"\n #load level of choice\n loadArray\n #displayArray\n displayArray\n when \"q\"\n #stop game\n exit\n when \"c\"\n #request level\n selectLevel\n else\n menuScreen\n end\nend", "def select_botgarden_action_code(data_set)\n action = data_set[InventoryMovement::REASON.name]\n if action\n action_code_options_locator = input_options_locator([], InventoryMovement::REASON.name)\n hit_escape\n wait_for_options_and_select(botgarden_action_code_input_locator, action_code_options_locator, action)\n end\n end", "def multi_attack_actions(session, battle)\n end", "def attack_ui(entity, action, options = {})\n selected_targets = []\n\n target = @prompt.select(\"#{entity.name} targets\") do |menu|\n battle.valid_targets_for(entity, action, target_types: options[:target_types], range: options[:range]).each do |target|\n menu.choice target.name, target\n end\n menu.choice 'Manual'\n menu.choice 'Back', nil\n end\n\n return nil if target == 'Back'\n\n if target == 'Manual'\n targets = target_ui(validation: lambda { |selected|\n selected_entities = map.thing_at(*selected)\n\n return false if selected_entities.empty?\n\n selected_entities.detect do |selected_entity|\n battle.valid_targets_for(entity, action, target_types: options[:target_types], range: options[:range], include_objects: true).include?(selected_entity)\n end\n })\n\n if targets.size > (options[:num_select].presence || 1)\n loop do\n target = @prompt.select('multiple targets at location(s) please select specific targets') do |menu|\n targets.each do |t|\n menu.choice t.name.to_s, t\n end\n end\n selected_targets << target\n break unless selected_targets.size < options[:num_select]\n end\n else\n selected_targets = targets\n end\n\n return nil if target.nil?\n else\n selected_targets << target\n end\n\n selected_targets.flatten\n end", "def available_action?(action_name); end", "def actions\n @actions = []\n price = compute_price('level2')\n commission = (price * 0.3).to_i\n insurance_fee = (commission / 2).to_i\n assistance_fee = duration * 100\n owner_fee = price - (price * 0.3).to_i\n drivy_fee = commission - insurance_fee - assistance_fee\n @options.each do |option|\n case option.type\n when 'gps'\n price += option_price_per_day('gps') * duration * 100\n owner_fee += option_price_per_day('gps') * duration * 100\n when 'baby_seat'\n price += option_price_per_day('baby_seat') * duration * 100\n owner_fee += option_price_per_day('baby_seat') * duration * 100\n when 'additional_insurance'\n price += option_price_per_day('additional_insurance') * duration * 100\n drivy_fee += option_price_per_day('additional_insurance') * duration * 100\n end\n end\n if price != 0\n @actions << Action.new(who: 'driver', type: 'debit', amount: price)\n @actions << Action.new(who: 'owner', type: 'credit', amount: owner_fee)\n @actions << Action.new(who: 'insurance', type: 'credit', amount: insurance_fee)\n @actions << Action.new(who: 'assistance', type: 'credit', amount: assistance_fee)\n @actions << Action.new(who: 'drivy', type: 'credit', amount: drivy_fee)\n end\n @actions.collect { |action| action.convert_to_hash }\n end", "def phase3_command_skill\r\n # Set action\r\n @active_battler.current_action.kind = 1\r\n # Start skill selection\r\n start_skill_select\r\n end", "def command_use_point\r\r\n if $game_actors[@actor.id].skill_tree[0] == 0 || confirm_skill_add\r\r\n Sound.play_buzzer\r\r\n @confirm.close\r\r\n @confirm.active = false\r\r\n else\r\r\n @skills_icons[@skill_selected].opacity = 255\r\r\n $game_actors[@actor.id].skill_tree[0] -= 1\r\r\n $game_actors[@actor.id].lose_jp(Actor[@actor.id][@skill_selected]['JP'])\r\r\n $game_actors[@actor.id].skill_mult[Actor[@actor.id][@skill_selected]['Skill_id']] += Actor[@actor.id][@skill_selected]['Multiply']\r\r\n $game_actors[@actor.id].skill_tree[Actor[@actor.id][@skill_selected]['Skill_id']] += 1\r\r\n $game_actors[@actor.id].learn_skill(Actor[@actor.id][@skill_selected]['Skill_id'])\r\r\n @info_window.refresh(@actor, @tree)\r\r\n Audio.se_play(\"Audio/SE/Skill3\",75,100)\r\r\n @confirm.close\r\r\n @confirm.active = false\r\r\n if $game_switches[19] # achievement available?\r\r\n #------------------------------------------------------------------------------- \r\r\n # Trophic: Markspony\r\r\n #-------------------------------------------------------------------------------\r\r\n earn_trophic = true\r\r\n for i in 546..561\r\r\n if !$game_actors[@actor.id].skill_learned?(i) && !$ACH_markspony\r\r\n earn_trophic = false\r\r\n break\r\r\n end\r\r\n end\r\r\n \r\r\n if earn_trophic && !$ACH_markspony\r\r\n $ACH_markspony = true\r\r\n GameJolt.award_trophy(\"53491\")\r\r\n p sprintf(\"Achievement unlock - markspony\")\r\r\n $game_system.earn_achievement(:markspony)\r\r\n end\r\r\n #------------------------------------------------------------------------------- \r\r\n # Trophic: Elementalist\r\r\n #------------------------------------------------------------------------------- \r\r\n earn_trophic = true\r\r\n for i in 563..582\r\r\n next if i == 567 || i == 571 || i == 577 || i == 581 \r\r\n if !$game_actors[@actor.id].skill_learned?(i) && !$ACH_elementalist\r\r\n earn_trophic = false\r\r\n break\r\r\n end\r\r\n end\r\r\n \r\r\n if earn_trophic && !$ACH_elementalist\r\r\n $ACH_elementalist = true\r\r\n GameJolt.award_trophy(\"53485\") \r\r\n p sprintf(\"Achievement unlock - elementalist\")\r\r\n $game_system.earn_achievement(:elementalist)\r\r\n end\r\r\n #---------------------------------------------------\r\r\n end\r\r\n end\r\r\n end", "def action!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 10 )\n\n type = ACTION\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 141:10: 'action'\n match( \"action\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 10 )\n\n end", "def handle_action(username, actiontype)\n \n end", "def target!(battle,opponent)\n action = Action.new\n action.ability = self\n action.target = opponent\n action.battle = battle\n action.damage = self.damage\n if action.save\n return action\n else\n return nil\n end\n end", "def game_action(num)\n\t\t\t\n\t\t\tnet_ball = rand(5..15)\n\t\t\treturn \"net\" if num == net_ball\n\t\t\treturn \"oob\" if num <= 3\n\t\t\treturn \"hit\" if num > 3 && num <=17\n\t\t\treturn \"miss\" if num > 17\n\t\tend", "def menu\n puts \"Quelle action veux-tu effectuer ?\" #Presentation menu\n puts \"\"\n puts \"a - chercher une meilleure arme\"\n puts \"s - chercher à se soigner\"\n puts \"\"\n puts \"Attaquer un joueur en vue\"\n print \"0 - \"\n puts @human_player.show_state\n counter_of_ennemies = 0\n @enemies.each do |mechant|\n print \"#{counter_of_ennemies} - \" \n puts mechant.show_state\n counter_of_ennemies += 1\n end\n end", "def actions; end", "def action\n Actions[ self[:action] ]\n end", "def general_action(action_name)\n case action_name\n when 'resource_status'\n if record.onsite_monitoring\n record.resource_status || :not_available\n else\n nil\n end\n when 'stop'\n set_state(:terminating)\n @record.clean_up_database!\n nil\n when 'restart'\n @record.clean_up_database!\n nil\n else\n nil\n end\n end", "def do_action(action)\n\t\tcase action\n\t\twhen 'list'\n\t\t\tlist()\n\t\twhen 'find'\n\t\t\tputs 'Finding...'\n\t\t\tfind()\n\t\twhen 'sort'\n\t\t\tsort()\n\t\twhen 'add'\n\t\t\tadd()\n\t\twhen 'quit'\n\t\t\treturn :quit\n\t\telse\n\t\t\tputs \"\\nI don't understand that command.\\n\"\n\t\tend\n\tend", "def action_target()\n \n end", "def execute\n @last_level = @light.level\n @light.off\n end", "def taking_damage_action\n if @warrior.feel(:backward).empty? && @health < 10\n @warrior.walk!(:backward)\n elsif @warrior.feel.empty?\n @warrior.walk!\n elsif @warrior.feel.enemy?\n @warrior.attack!\n end\n end", "def do_action(action_name)\n\n action = @actions.find{|a| a.name.eql?(action_name.to_s)}\n \n raise ArgumentError, \"Action #{action.name} is not allowed from state #{@current_state.name}\", caller unless\n current_action_names.include?(action.name)\n \n @current_state = action.result_state\n @current_cost = action.get_cost(tick_count)\n @current_reward = @current_state.get_reward(tick_count)\n @cumulative_reward += @current_reward - @current_cost\n \n end", "def choice_a_menu\n item_id = @item_list[@index]\n return action_b if item_id.nil?\n return play_buzzer_se if item_id == 0\n play_decision_se\n show_shadow_frame\n # Prepare the choice info\n # Use option\n map_usable = proc { !GameData::Item[item_id].map_usable }\n # Give option\n giv_check = proc { $pokemon_party.pokemon_alive <= 0 || !GameData::Item[item_id].holdable }\n # Unregister / register\n if $bag.shortcuts.include?(item_id)\n reg_id = 14\n reg_meth = method(:unregister_item)\n else\n reg_id = 2\n reg_meth = method(:register_item)\n reg_check = map_usable\n end\n # Throw option\n thr_check = proc { !GameData::Item[item_id].limited }\n # Create the choice\n choices = PFM::Choice_Helper.new(Yuki::ChoiceWindow::But, true, 999)\n choices.register_choice(text_get(22, 0), on_validate: method(:use_item), disable_detect: map_usable)\n .register_choice(text_get(22, 3), on_validate: method(:give_item), disable_detect: giv_check)\n .register_choice(text_get(22, reg_id), on_validate: reg_meth, disable_detect: reg_check)\n .register_choice(text_get(22, 1), on_validate: method(:throw_item), disable_detect: thr_check)\n .register_choice(text_get(22, 7))\n # Show selection : item_name\n @base_ui.show_win_text(parse_text(22, 35, PFM::Text::ITEM2[0] => GameData::Item[item_id].exact_name))\n # Process the actual choice\n y = 200 - 16 * choices.size\n choices.display_choice(@viewport, 306, y, nil, on_update: method(:update_graphics), align_right: true)\n @base_ui.hide_win_text\n hide_shadow_frame\n end", "def skill_choice\n if @visual.show_skill_choice(@player_actions.size)\n # The player choosed a move\n @next_update = :target_choice\n else\n # The player canceled\n @next_update = :player_action_choice\n end\n ensure\n @skip_frame = true\n end", "def get_action(str)\n puts \"Received '#{str.strip}'\" unless @quiet\n ['f','c','r'].choice\n end", "def decision_for_new_turn(player)\n choices = @actions.select { |a| a.timing == :main_action }.map { |action|\n choice = Choice.new(action.name_and_effect, action.arguments) { |args| cb_action(player, action, args) }\n last_player = @turns.size >= 2 ? @turns[-2].active_player : nil\n\n if player.coins >= 10 && action != Action::Coup\n choice.unavailable!('You must Coup when you have at least 10 coins.')\n elsif player.coins < action.cost + tax_for(player, action)\n choice.unavailable!(\"Need #{format_costs(player, action).join(' and ')}\")\n elsif last_player == player && action.another_turn?\n choice.unavailable!('Would cause three turns in a row')\n end\n\n [action.slug, choice]\n }.to_h\n\n # If player has a disappear token, add the block option.\n if (disappear_action = @disappear_players[player])\n action_class = disappear_action.class\n tax = tax_for(player, action_class)\n name = action_class.flavor_name\n choice = Choice.new(\"Block #{name}#{\" (tax of #{tax} coin)\" if tax > 0}\") {\n cb_block_disappear(player, disappear_action, start_turn: true)\n }\n choice.unavailable!(\"Need #{format_costs(player, action_class)}\") if player.coins < tax\n choices['block'] = choice\n end\n\n Decision.single_player(\n current_turn.id, player, \"#{player}'s turn to choose an action\",\n choices: choices,\n )\n end", "def store_player_action(plname, action, *args)\r\n curr_giocata = @info_match[:giocate].last\r\n if curr_giocata\r\n curr_actions = {:pl_name => plname, :type => action, :arg => args}\r\n curr_giocata[:actions] << curr_actions\r\n end\r\n end", "def get_actions ()\n\n #acciones del personaje que esta jugando\n player_character.base_card.base_actions.each do |action|\n actions.create!(base_action_id: action.id)\n end\n\n #acciones por distritos morados\n\n purple_districts = districts_on_game.where(\"colour = 'purple' AND name not in ('lighthouse', 'bell_tower')\")\n purple_districts.each do |district|\n district.base_card.base_actions.each do |action|\n actions.create!(base_action_id: action.id)\n end\n end\n\n\n character = player_character.base_card.name\n\n case character\n when 'merchant'\n current_coins = coins\n update_attribute(:coins, current_coins + 1)\n when 'architect'\n card_list = party.cards.districts.where(\"player_id is NULL\").order('position').limit(2)\n card_list.each do |card|\n Card.update(card.id, :state => 'ONHAND', :player_id => id)\n end\n end\n\n\n\n end", "def level\r\n\tif $lvl == 1\r\n\t\tlevel2\r\n\telse \r\n\t\tif $lvl == 2\r\n\t\t\tlevel3\r\n\t\telse\r\n\t\t\tif $lvl == 3\r\n\t\t\t\tlevel4\r\n\t\t\tend\r\n\t\tend\t\r\n\tend\t\r\nend", "def draw_basic_actions\n y = line_height\n enemy.actions.each do |action|\n next unless action.kind == 0\n draw_basic_action(y, action)\n y += line_height\n end\n y\n end", "def pick_difficulty\n user_diff = $prompt.select(\"Pick a difficulty level\", [\"Easy\", \"Medium\", \"Hard\"])\n case user_diff\n when \"Easy\"\n $hide_speed = 3 \n $time_limit = 7\n when \"Medium\"\n $hide_speed = 2\n $time_limit = 6\n when \"Hard\"\n $hide_speed = 1.5\n $time_limit = 5\n end \n pre_game\nend", "def action!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 51)\n\n type = ACTION\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 547:4: NESTED_ACTION ( '?' )?\n nested_action!\n # at line 547:18: ( '?' )?\n alt_12 = 2\n look_12_0 = @input.peek(1)\n\n if (look_12_0 == ??) \n alt_12 = 1\n end\n case alt_12\n when 1\n # at line 547:20: '?'\n match(??)\n # --> action\n type = SEMPRED \n # <-- action\n\n end\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 51)\n\n end", "def item_choice\n item_id, target = @visual.show_item_choice\n if item_id\n # The player made a choice we store the action and we check if he can make other choices\n @player_actions << { type: :item, item_id: item_id, target: target, bag: @logic.bags[0] }\n log_debug(\"Action : #{@player_actions.last}\") if debug? # To prevent useless overhead outside debug\n @next_update = can_player_make_another_action_choice? ? :player_action_choice : :trigger_all_AI\n else\n # If the player canceled we return to the player action\n @next_update = :player_action_choice\n end\n end", "def action(name)\n actions.find { |action| action.name == name }\n end", "def action; end", "def action; end", "def action; end", "def action; end", "def action; end", "def action\n case item.type\n when :switch, :bar\n toggle_item\n when :advanced\n process_method\n when :variable\n open_popup\n else\n process_custom_method\n end\n end", "def dlp_action=(value)\n @dlp_action = value\n end", "def maintenance_actions\n\t\t\tif is_mortgaged?\n\t\t\t\t\t\tif @owner.decide(:consider_unmortgage, property: self).is_yes? and @owner.balance > cost\n\t\t\t\t\t\t\tunmortgage!\n\t\t\t\t\t\tend\n\t\t\tend\n\t\t\tsuper\n\t\tend", "def ai_action()\n # sleep 3\n player = get_player(self.current_player)\n action_info = ai_one_logic(player)\n if player.ai == \"2\"\n action_info = ai_two_logic(player)\n end\n action(action_info[0], action_info[1], player.id) # this progresses current player and may progress round\n self.save\n end", "def combat\n\t\tif @battleManager.actionUnitsEmpty\n\t\t\tsetMenu\n\t\telse\n\t\t\tunitChange = [email protected]\n\t\t\tif unitChange\n\t\t\t\t@phase = @battleManager.checkChanges\n\t\t\t\tputs @phase\n\t\t\tend\n\t\tend\n\tend", "def case_menu(selection)\n case selection\n when 'Playlist'\n @playlist.menu\n when 'Account Details'\n account_details\n when 'Exit'\n p \"Is this exiting?\"\n end\n end", "def initial_status\r\n @action.hit(@hand, @decks)\r\n return @hand\r\n end", "def actions(m, params)\n case params[:toggle]\n when 'on'\n @registry[m.sourcenick + \"_actions\"] = true\n m.okay\n when 'off'\n @registry.delete(m.sourcenick + \"_actions\")\n m.okay\n else\n if @registry[m.sourcenick + \"_actions\"]\n m.reply _(\"actions will be twitted\")\n else\n m.reply _(\"actions will not be twitted\")\n end\n end\n end" ]
[ "0.68396413", "0.676429", "0.6751523", "0.6472519", "0.6215509", "0.61443734", "0.6142044", "0.6060048", "0.6034971", "0.60257566", "0.6013959", "0.5978408", "0.59598726", "0.59454983", "0.5935239", "0.5887355", "0.5865783", "0.5809525", "0.5809156", "0.5795123", "0.5770813", "0.57608575", "0.5735441", "0.5733473", "0.5726138", "0.57190853", "0.5688702", "0.5685047", "0.5673246", "0.5673246", "0.5670432", "0.56620616", "0.5657765", "0.5610587", "0.5600218", "0.5592363", "0.55876225", "0.557996", "0.55795324", "0.55629086", "0.5545018", "0.55446726", "0.55341506", "0.55300784", "0.5529598", "0.55223006", "0.5518844", "0.55176073", "0.5515444", "0.5514261", "0.55126595", "0.55088913", "0.54879904", "0.5485594", "0.54849494", "0.5469673", "0.54629076", "0.54621", "0.5458249", "0.54450196", "0.5444208", "0.5443015", "0.5435853", "0.5423671", "0.54107195", "0.54048204", "0.5381416", "0.5377237", "0.5376853", "0.5375627", "0.53752136", "0.53748584", "0.5368491", "0.5362766", "0.53563243", "0.5354789", "0.53518325", "0.53508", "0.53494495", "0.5346617", "0.5346017", "0.53333104", "0.53288627", "0.53267866", "0.53266716", "0.53261405", "0.5325957", "0.5325075", "0.53227794", "0.53227794", "0.53227794", "0.53227794", "0.53227794", "0.5317189", "0.53154945", "0.53102016", "0.53093755", "0.5306735", "0.53060734", "0.5304954", "0.53003913" ]
0.0
-1
puts "hi" puts "how are you" puts "I'm fine"
def say(words) puts words + '.' end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def hello\n puts \"Hello Dude!!!\"\n puts \"Hellod Douchebag!!!\"\nend", "def saymessage\n puts \"hi programers\"\n puts \"I am hungy\"\nend", "def say_hello(anything)\n # write code\n puts anything\n puts \"Hello World!\"\nend", "def introduceMyself\n puts \"I am handsome\"\n puts \"I am talented\"\n puts \"I am brilliant\"\nend", "def introduce_myself\n puts 'am handsome'\n puts 'i am talented'\n puts 'i am brilliant'\nend", "def introduce_myself\n puts \"I am handsome\"\n puts \"I am talented\"\n puts \"I am brilliant\"\nend", "def my_method\n puts \"ayyy\"\n puts \"joe\"\nend", "def introduce_myself\n #method body\n puts \"'I am handsome'\"\n puts \"i am talented\"\n puts \"i am briliant\"\n puts \"i am amazing\"\n puts 'is talented'\n puts \"is charming\"\n\n #method body\n end", "def hail_the_king\n\tputs \"hail king mark\"\nend", "def hail_the_king\n\tputs \"hail king mark\"\nend", "def sayMessage\r\n puts \"Hey Programmers!\"\r\n puts \"What's for lunch?\"\r\nend", "def introduce_myself\n puts \"My name is Anndony Quemag\"\n puts \"My age is 23\"\n puts \"Im work at kommit\"\nend", "def introduce_myself\n puts \"I am a world ruler\"\n puts \"I am filty rich\"\n puts \"I own a library\"\nend", "def another_greetings\n puts 'Hello'; puts 'Motherfucker'\nend", "def tie_my_shoes\n puts \"grab shoe laces\"\n puts \"twist and tie around\"\n puts \"end\"\nend", "def test_method message\n puts \"Hi there\"\n puts message\n 2\n \"ha\"\nend", "def say (one, two, three)\n\t\tputs one\n\t\tputs two\n\t\tputs three\n\tend", "def say_hi\n\t\tputs \"Hello!\"\n\tend", "def say_hi\n\t\tputs 'saying hi'\n\tend", "def say_hello_world_five_times\n puts \"Hello World!\"\n puts \"Hello World!\"\n puts \"Hello World!\"\n puts \"Hello World!\"\n puts \"Hello World!\"\nend", "def this_output_twice\n print \"Calling print or puts\"\nend", "def sayHello\n # output some text\n puts(\"hello, world\")\nend", "def greeting\nputs \"HELLO, BONJOUR, HOLA, GUTENTAG, HALLO, HOWDY, NAMASKAR, MERHABA\"\nend", "def joke1\n puts \"A peanut was walking down the street. He was a-salt-ed.\"\nend", "def say_hi\n puts \"hi\"\nend", "def test\r\nputs \"Hodor! Hodor!\"\r\nend", "def code_like_crazy\r\n puts \"I'm crushing some code!\"\r\n end", "def sayHello(str)\n puts 'Hey, good to see you ' + str\nend", "def say_hi\n puts \"hi\"\nend", "def exiter\n puts \"\"\n puts \"*****************************************************************************\"\n puts \"I can't believe you're leaving, I GUESS MY JUKES WEREN'T GOOD ENOUGH FOR YOU!\"\n puts \"*****************************************************************************\"\n puts \"\"\nend", "def meow #teaching the cat how to meow\n puts \"meow!\" \n end", "def say_hello_ten_times\n\nphrase = \"Hello World!\"\nputs phrase\nputs phrase\nputs phrase\nputs phrase\nputs phrase\nputs phrase\nputs phrase\nputs phrase\nputs phrase\nputs phrase\nend", "def puts_two_again(arg1,arg2)\n\tputs \"arg1: #{arg1}, arg2:#{arg2}\"\nend", "def say_hi\n puts \"hi\"\n end", "def puts_two_again(arg1,arg2)\n\tputs \"arg1: #{arg1}, arg2: #{arg2}\"\nend", "def start_greetings\n puts \"This is the castle of the Ice King. At the end of the corridor there are two doors\"\n puts \"Thau shall choose to go through one door, the Fire Door or the Snow Door\"\n puts \"Press 1 for Fire Door. Press 2 for Snow Door\"\nend", "def say(output)\n puts \"===> #{output} <===\"\nend", "def puts_two_again(arg1, arg2)\n\tputs \"arg1: #{arg1}, arg2: #{arg2}\"\nend", "def puts_two_again(arg1, arg2)\n\tputs \"arg1: #{arg1}, arg2: #{arg2}\"\nend", "def puts_two_again(arg1, arg2)\n\tputs \"arg1: #{arg1}, arg2: #{arg2}\"\nend", "def puts_two_again(arg1, arg2)\n\tputs \"arg1: #{arg1}, arg2: #{arg2}\"\nend", "def puts_two_again(arg1, arg2)\n\tputs \"arg1: #{arg1}, arg2: #{arg2}\"\nend", "def puts_two_again(arg1, arg2)\n\tputs \"arg1: #{arg1}, arg2: #{arg2}\"\nend", "def puts_two_again(arg1, arg2)\n\tputs \"arg1: #{arg1}, arg2: #{arg2}\"\nend", "def hello(name, age)\n\tputs \"Welcome #{name}, #{age} is definitely not too old to learn how to code\" \nend", "def say_hi\n puts \"Hi!\"\n end", "def greetings\n\tputs \"Greetings friend!\"\nend", "def say_hi\n puts \"Hello\"\nend", "def me_happy\n puts \"ain't I happy\"\nend", "def say_hello_to(name)\n puts \"Hello, #{name}. It's good to see you.\"\n puts \" \"\n end", "def puts(str)\nend", "def puts_two_again(arg1, arg2)\r\n puts \"arg1: #{arg1}, arg2: #{arg2}\"\r\nend", "def puts(string)\n print \"#{string}\\n\"\nend", "def greet(name)\n print \"Hello, #{name} how are you doing today?\"\nend", "def say_hello(anything)\n #write code here\n puts anything\nend", "def say_hello(anything)\n #write code here\n puts anything\nend", "def say_hi \n expression = \"I am a genius\"\n puts expression\n end", "def hello_world \n puts \"This is a crazy part of the world\"\n end", "def say_hello(anything)\n# write code here\n puts anything\nend", "def puts_two_again(args1, args2)\n\tputs \"args1: #{args1}, args2: #{args2}.\"\nend", "def say_hello(anything)\n# write code here\n puts anything\nend", "def say_hello_world_ten_times\n phrase = \"Hello World\"\n \n \nphrase = \"Hello World!\"\nputs phrase\nputs phrase\nputs phrase\nputs phrase\nputs phrase\nputs phrase\nputs phrase\nputs phrase\nputs phrase\nputs phrase\nend", "def puts_letters\n puts \"a\"\n puts \"b\"\n puts \"c\"\nend", "def hello_user(name)\n\tputs \"\\nHi #{name}! It\\'s nice to meet you.\"\nend", "def greet name, name2\n puts \"Hello, #{name} and #{name2} how are you all doing today?\"\nend", "def sayHi\n\t\tputs(greet)\n\tend", "def second_method\n puts \"This is my second method\"\n \"Hello\"\nend", "def say_ruby_ten_times\n phrase = \"Hello World\"\n 10.times do puts phrase\n end\nend", "def my_favorite_musician(first_name, last_name)\r\n\tputs \"my favorite musition is \" + first_name + \" \" + last_name + \"!\"\r\nend", "def greet\n hello + \" \" + world # this format allows us to use puts on line 31\nend", "def p003rubystrings\n\tputs \"Hello World\"\n\t\n\t# CAn use \" or '. But, ' is more effective\n\tputs 'Hello WOrld'\n\n\t#String concatenate\n\tputs \"I like\" + \" Ruby\"\n\n\t#Escape Sequence\n\tputs 'It\\'s my Ruby'\n\n\t#New here displays the string 3 times\n\tputs 'Hello' * 3\n\n\tputs PI\n\n\t#USe of back-tick\n\tputs \"LIsting of files\"\n\tputs `ls`\n\n\tsystem (\"ls\")\n\nend", "def say\n puts \"Hi\"\nend", "def say\n puts \"Hi\"\nend", "def puts_two_again(arg1,arg2)\n puts \"arg1: #{arg1}, arg2: #{arg2}\"\nend", "def puts_two_again(arg1, arg2)\n puts \"arg1: #{arg1}, arg2: #{arg2}\"\nend", "def puts_two_again(arg1, arg2)\n puts \"arg1: #{arg1}, arg2: #{arg2}\"\nend", "def puts_two_again(arg1, arg2)\n puts \"arg1: #{arg1}, arg2: #{arg2}\"\nend", "def puts_two_again(arg1, arg2)\n puts \"arg1: #{arg1}, arg2: #{arg2}\"\nend", "def puts_two_again(arg1, arg2)\n puts \"arg1: #{arg1}, arg2: #{arg2}\"\nend", "def printing\n \n # \"print\" print data in console without linebrean by default\n print \"HELLO WORLD! \"\n # \"puts\" print data in console with linebreak\n puts \"Hello World!\"\n puts 2+2\n\n#\"end\" finalize the function \nend", "def do_something this , that ,other\n puts \"We , I mean I ,am doing ,er , something!\"\nend", "def function_a\n\tputs \"Hello hacker, what is your name?\"\n\tprint \"> \"\nend", "def say_hello_ten_times\n phrase = \"Hello World\"\n 10.times do\n puts phrase\n end \nend", "def puts(str='')\n output(str)\n end", "def greet_me\n puts \"Hello\"\nend", "def say_hello\n puts \"HELLOOOOO!\"\nend", "def captain_obvious(name, what_you_love)\n\tputs(\"My name is #{name} and I love #{what_you_love}!\")\nend", "def cheese_and_crackers(cheese_count, boxes_of_crackers)\n # We tell Ruby what to put when the method is called.\n # We insert parameters in to the strings.\n # We close the method with \\n and end.\n puts \"You have #{cheese_count} cheeses!\"\n puts \"You have #{boxes_of_crackers} boxes of crackers!\"\n puts \"Man that's enough for a party!\"\n puts \"Get a blanket.\\n\"\nend", "def say_moo\n puts 'mooooooo...'\nend", "def puts_two_again arg1, arg2\n\tputs \"arg1: #{arg1}, arg2: #{arg2}\"\nend", "def sayHiToBob\n puts \"Hello Bob\"\nend", "def say_hello\n\tputs \"Bonjour!\"\nend", "def puts\n end", "def sayMoo\n puts 'mooooooo...'\nend", "def speak\n puts \"ho, ho, ho! happy hollandaise!\"\n end", "def greet2\n puts \"\\n\n We've looked up some interesting\\n\n facts about characters from the films,\\n\n and we invite you to learn some \\n\n more about them.\\n\"\n sleep 4\n end", "def saysomething\n puts \"Hello\"\nend", "def talk\n puts 'Hello'\nend", "def meow\n puts \"meow!\"\nend", "def puts_done()\n puts \"I got nothin.\"\nend", "def meow\n puts \"meow!\"\n end" ]
[ "0.76101637", "0.7546645", "0.7523486", "0.74707955", "0.7434251", "0.7405326", "0.7393472", "0.73853785", "0.73527735", "0.73527735", "0.7350173", "0.73296875", "0.7326525", "0.7315331", "0.7311952", "0.72535115", "0.7166823", "0.7153316", "0.7135454", "0.7129923", "0.71186197", "0.7117268", "0.7108928", "0.7021676", "0.70073223", "0.69997567", "0.69961494", "0.696381", "0.6951722", "0.6936411", "0.6903899", "0.68977034", "0.6891249", "0.68865937", "0.68795526", "0.6878886", "0.68679786", "0.6866507", "0.6866507", "0.6866507", "0.6866507", "0.6866507", "0.6866507", "0.6866507", "0.68622994", "0.68561304", "0.6853314", "0.68485665", "0.6844391", "0.6837766", "0.6836399", "0.68236727", "0.6816429", "0.6814944", "0.6799504", "0.67918915", "0.6776819", "0.6761787", "0.67534965", "0.67515796", "0.67501634", "0.6731107", "0.672152", "0.6708081", "0.6705046", "0.6704498", "0.668202", "0.66731215", "0.6665646", "0.66616976", "0.66556597", "0.6655256", "0.6655256", "0.6651569", "0.66448206", "0.66435355", "0.66435355", "0.66435355", "0.66435355", "0.66348225", "0.6633958", "0.663233", "0.663189", "0.66087025", "0.6608272", "0.65967065", "0.6591865", "0.6591588", "0.6590656", "0.6586433", "0.65862185", "0.65838206", "0.6581055", "0.6579315", "0.65764314", "0.6574701", "0.65744495", "0.65743876", "0.657273", "0.6572128", "0.65711445" ]
0.0
-1
r = 20 gK = 10
def g2r(n) n*(Math::PI/180) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def guage(n) \n r = (255 * n) / 100\n g = (255 * (100 - n)) / 100 \n b = 0\n return { :r => r, :g => g, :b => b }\nend", "def calc_k(n, g)\n H(n, n, g)\n end", "def gravitational_acceleration(r)\n return self.gravitational_parameter / r**2\n end", "def rpkm(counts, total_mapped_reads, gene_length)\n if counts && gene_length\n sprintf(\"%.2f\",(1e9*counts.to_f)/(total_mapped_reads*gene_length)).to_f\n else\n 0.0\n end\nend", "def gear_inches\n\tratio * (rim + (tire * 2))\nend", "def t_r(r,size)\n r * Math::sqrt(((size)-2).to_f / (1 - r**2))\n end", "def rh_factor; end", "def calc_k(n, g)\n H(n, n, g)\n end", "def k_value(r_A)\n if !@adj || r_A < 2100\n K_VALUE\n elsif r_A > 2400\n 16\n else\n 24\n end\n end", "def grasa\n\t\t1.2*imc+0.23*@edad-10.8*@sexo-5.4\n\tend", "def kcalglucidos\n\t\t\t@carbohidratos * 4\n\t\tend", "def recessive k, m, n\n all = k + m + n\n mix = m + n\n total = 4.0 * triangle(all) # 2 * squareish all = 2 * 2 * triangle all\n\n lhs = triangle n\n mid = n * mix - n\n rhs = triangle mix\n\n 1 - (lhs+mid+rhs) / total\n end", "def base_radius\n 200\n end", "def valorenergeticoKcal\n veKJ=(cgrasas * 9) + (cgrasassa * 9) + (grasasmono * 9) + (grasaspoli * 9) + (hcarbono * 4) + (polialcoholes * 2.4) + (almidon * 4) + (fibra * 2) + (proteinas * 4) + (sal * 6)\n veKJ.round(2)\n end", "def rerlang(k, beta)\n -(beta.to_f / k) * Math.log((k.times.collect { rand }).prod)\nend", "def s_radius\n modulus\n end", "def grasaIR\n\t\t((valorEnergeticoKJ.to_f*70)/8400).round(2)\n\tend", "def rg2ekn(rg, a, z)\n momentn = rg * z / a\n Math.sqrt(MP * MP + momentn * momentn) - MP\n end", "def rms\n Math.sqrt(energy / size)\n end", "def kaprekar?(k)\n=begin\n sqr = k**2\n digits = k.to_s.length\n right_n_digits = sqr.to_s[(0-digits)..-1].to_i\n left_digits = sqr.to_s[0..(1-digits)].to_i\n left_digits + right_n_digits == k\n=end\n n = Math.log10(k).to_i + 1\n value = k**2 \n k == value % (10**n) + value / (10**n) \nend", "def gear_inches\n\tratio * diameter\nend", "def valorenergeticoKJ\n\t\tveKJ=(cgrasas * 37) + (cgrasassa * 37) + (grasasmono * 37) + (grasaspoli * 37) + (hcarbono * 17) + (polialcoholes * 10) + (almidon * 17) + (fibra * 8) + (proteinas * 17) + (sal * 25)\n\t\tveKJ.round(2)\n\tend", "def prop_g\n bases_g / length.to_f\n end", "def gravitational_constant\n Constants.ratio(6, -5) / 8 * PI\n end", "def calc_generator_size(val1= chp_salable_elec, val2 = 364 * 24)\n\t\t(val1 / val2).round 3\n\tend", "def volume_of_sphere(r)\nreturn 4/3 * 3.141592653 * ( r ** 3 )\nend", "def gear_inches\n ratio * diameter\nend", "def k(n); 5 * n * n; end", "def dopri\n a21 = 1.0/5.0\n a31 = 3.0/40.0\n a32 = 9.0/40.0\n a41 = 44.0/45.0\n a42 = -56.0/15.0\n a43 = 32.0/9.0\n a51 = 19372.0/6561.0\n a52 = -25360.0/2187.0\n a53 = 64448.0/6561.0\n a54 = -212.0/729.0\n a61 = 9017.0/3168.0\n a62 = -355.0/33.0\n a63 = 46732.0/5247.0\n a64 = 49.0/176.0\n a65 = -5103.0/18656.0\n a71 = 35.0/384.0\n a72 = 0.0\n a73 = 500.0/1113.0\n a74 = 125.0/192.0\n a75 = -2187.0/6784.0\n a76 = 11.0/84.0\n\n c2 = 1.0 / 5.0\n c3 = 3.0 / 10.0\n c4 = 4.0 / 5.0\n c5 = 8.0 / 9.0\n c6 = 1.0\n c7 = 1.0\n\n b1order5 = 35.0/384.0\n b2order5 = 0.0\n b3order5 = 500.0/1113.0\n b4order5 = 125.0/192.0\n b5order5 = -2187.0/6784.0\n b6order5 = 11.0/84.0\n b7order5 = 0.0\n\n b1order4 = 5179.0/57600.0\n b2order4 = 0.0\n b3order4 = 7571.0/16695.0\n b4order4 = 393.0/640.0\n b5order4 = -92097.0/339200.0\n b6order4 = 187.0/2100.0\n b7order4 = 1.0/40.0\n\n @x[0] = @xmin\n @u[0] = @yini\n @fx[0] = self.f(@x[0], @u[0])\n h = @dx \n i = 0\n\n 0.upto(@maxiter) do |iter|\n # Compute the function values\n k1 = @fx[i] \n k2 = self.f(@x[i] + c2*h, @u[i] + h*(a21*k1))\n k3 = self.f(@x[i] + c3*h, @u[i] + h*(a31*k1+a32*k2))\n k4 = self.f(@x[i] + c4*h, @u[i] + h*(a41*k1+a42*k2+a43*k3))\n k5 = self.f(@x[i] + c5*h, @u[i] + h*(a51*k1+a52*k2+a53*k3+a54*k4))\n k6 = self.f(@x[i] + h, @u[i] + h*(a61*k1+a62*k2+a63*k3+a64*k4+a65*k5))\n k7 = self.f(@x[i] + h, @u[i] + h*(a71*k1+a72*k2+a73*k3+a74*k4+a75*k5+a76*k6))\n\n error = (b1order5 - b1order4)*k1 + (b3order5 - b3order4)*k3 + (b4order5 - b4order4)*k4 + \n (b5order5 - b5order4)*k5 + (b6order5 - b6order4)*k6 + (b7order5 - b7order4)*k7\n error = error.abs\n\n # error control\n if error < @tol then\n @x[i+1] = @x[i] + h\n @u[i+1] = @u[i] + h * (b1order5*k1 + b3order5*k3 + b4order5*k4 + b5order5*k5 + b6order5*k6)\n @fx[i+1] = self.f(@x[i+1], @u[i+1])\n i = i+1\n end\n\n delta = 0.84 * (@tol / error)**0.2\n if delta <= 0.1 then\n h = h * 0.1\n elsif delta >= 4.0 then\n h = h * 4.0\n else \n h = delta * h\n end\n\n # set h to the user specified maximal allowed value\n h = @dx if h > @dx \n\n if @x[i] >= @xmax then\n break\n elsif @x[i] + h > @xmax then\n h = @xmax - @x[i]\n end\n end\n\n @mx = @x.length # Number of x steps\n\n raise(RuntimeError, \"Maximal number of iterations reached \n before evaluation of the solution on the entire x interval \n was completed (try to increase maxiter or use a different method\") if @x.last < @xmax\n end", "def kaprekar(num)\n kaprekars_constant = 6174\n iterations = 0\n\n until num == kaprekars_constant\n num = (desc_digits(num) - asce_digits(num))\n iterations = iterations + 1\n end\n iterations\nend", "def weight r, h\n len_r = r.magnitude\n\n if len_r.xbetween? 0, h\n 315.0 / (64 * Math::PI * h**9) * (h**2 - len_r**2)**3\n else\n 0.0\n end\n end", "def quantum_number\n @l + @m + @n\n end", "def gear_inches\n ratio * wheel.diameter\nend", "def pmt(pv, n, r)\n (pv*r)/(1.0 - 1/(1.0+r)**n)\nend", "def cigaret_tar\n Unitwise(0.00001, 'kilogram')\n end", "def zrdo\n\n # we seed a good b and keep estimating up\n b = 120\n prevb = 21\n\n calcb = 0\n while prevb < 10 ** 12 do\n\n # this part will find us the next good b\n calcb = Math.sqrt(1 + 2 * b ** 2 - 2 * b)\n while calcb.round != calcb do\n b += 1\n calcb = Math.sqrt(1 + 2 * b ** 2 - 2 * b)\n end\n\n c = b\n # this is our estimation phase... since b / prevb\n # approaches a limit, i think it is quite accurate\n b = (b**2 / prevb.to_f).floor\n prevb = c\n # prevb directly correlates to calcb at this point,\n # that is why we have while prevb < 10**12\n end\n 1/2.0 + calcb / 2.0\nend", "def rate_scale; end", "def grass_to_kilogram(value)\n value / 1000.0\n end", "def weight; end", "def geometric_factor\n hourly_beam_radiation_on_tilted_surface / hourly_beam_radiation_on_horizontal_surface\n end", "def max_radius\n 4000\n end", "def knight_radiant; end", "def valormonograsasp\n\t\tvag=(grasasmono * 70) / 100\n vag.round(2)\n\tend", "def calc_B(b, k, v, n, g)\n (modpow(g, b, n) + k * v) % n\n end", "def weight\n 2 # ounces\n end", "def calc_FPKM(weight_per_kb, total_num_read)\n\treturn weight_per_kb * 1_000_000 / total_num_read\nend", "def circleArea(r)\n PI * r ** 2 \nend", "def construct_ms_per_r(gm, rd, md)\n ms_per_r = Array.new(rd[:num_regions]) {|e| e = Array.new}\n for y in 0...gm[:y] do\n for x in 0...gm[:x] do\n if md[:moves][y][x] != 0\n r = rd[:regions][y][x]\n ms_per_r[r].push(md[:moves][y][x])\n end\n end\n end\n # puts \"ms_per_r = \" + ms_per_r.to_s #NICE TO HAVE\n return ms_per_r\nend", "def grams\n gram_equivalent / amount\n end", "def gear_inches\n ratio * wheel.diameter\n end", "def gear_inches\n ratio * wheel.diameter\n end", "def gear_inches\n ratio * wheel.diameter\n end", "def r\n Math::sqrt(r2)\n end", "def valorenergeticoKJp\n\t\tvag=(valorenergeticoKJ * 70) / 100\n\t\tvag.round(2)\n\tend", "def birds_eye(n, k)\n return ((n-k)/(k+1));\nend", "def kilogram_to_grass(value)\n value * 1000.0\n end", "def gravitational_parameter\n Constants::G * self.mass\n end", "def construct_ms_per_r(gm, rd, md)\n ms_per_r = Array.new(rd[:num_regions]) {|e| e = Array.new}\n for y in 0...gm[:y] do\n for x in 0...gm[:x] do\n if md[:moves][y][x] != 0\n r = rd[:regions][y][x]\n ms_per_r[r].push(md[:moves][y][x])\n end\n end\n end\n puts \"ms_per_r = \" + ms_per_r.to_s\n return ms_per_r\nend", "def step_divisor\n 10 + features_sum(:anger_mantain)\n end", "def rk2(gpna,dt,n=1,&f)\n\t\t t = gpna[0].coord(-1) # 時間軸(長さ1)を取り出す\n\t\t var_num = gpna.length\n\t\t n.times{\n\t\t\t k1na = f.call(gpna,t)*dt\n\t\t\t k2na = f.call(gpna+k1na,t+dt)*dt\n\t\t\t gpna = gpna + (k1na+k2na)/2\n\t\t\t t = t + dt\n\t\t\t}\n\t\t\tvar_num.times{|v| gpna[v].axis(-1).set_pos(t) }\n\t\t return gpna\n\t\tend", "def ln_rek(x,n)\r\n\r\nend", "def refined_super_digit(n, k)\n \nend", "def pv(fv, n, r)\n fv*(1.0/((1+r)**n))\nend", "def refined_super_digit(n, k)\n \nend", "def refined_super_digit(n, k)\n \nend", "def g_width \n Config.ground_width\n end", "def krr_short()\n\tputs 'KRR_short'\n\n\twidth=0.8; tau=1e-6\n# *** \tkrr=KernelRidgeRegression(tau, GaussianKernel(0, width), RegressionLabels(label_train))\n\tkrr=Modshogun::KernelRidgeRegression.new(tau, GaussianKernel(0, width), RegressionLabels(label_train))\n\t#krr.set_features(tau, GaussianKernel(0, width), RegressionLabels(label_train))\n\tkrr.train(RealFeatures(fm_train))\n\tout = Modshogun::RegressionLabels.obtain_from_generic(krr.apply(RealFeatures(fm_test)).get_labels())\n\n\treturn krr,out\n\nend", "def irenergeticoKJ\n vag=(valorenergeticoKJ() * 100) / 8400\n vag.round(2)\n end", "def calories_per_liter\n 672\n end", "def irenergeticoKJp\n vag=(valorenergeticoKJp() * 100) / 8400\n vag.round(2)\n end", "def ctof (degres)\n return ((degres * 1.8) + 32)\nend", "def angl(hrs, mnts)\n 0.5 * (60 * hrs - 11 * mnts)\nend", "def ftks\n limit = 50\n result = 0\n # these are the correct right angles\n result += limit * limit * 3\n # these are the ones on their side\n (1..limit).each do |i|\n result += i / 2 * 2\n end\n # there are some on their little angles\n (2..limit).each do |long|\n (1..(long - 1)).each do |tall|\n gcd = intGCDInts long, tall\n longd, talld = long / gcd, tall / gcd\n maxn = ((limit - long) / talld > tall / longd ? tall / longd : (limit - long) / talld)\n minn = (long / talld > (limit - tall) / longd ? (limit - tall) / longd : long / talld)\n # p \"(#{long}, #{tall}) => (#{longd}, #{talld}) : #{maxn} + #{minn}\"\n result += (maxn + minn) * 2\n end\n end\n result\nend", "def porcentajegrasa\n\t\t1.2 * imc + 0.23 * @edad - 10.8 * @sexo - 5.4\n\tend", "def display_size\n # (1.9 ** @magnitude) / 3.0 + 2.5\n (2.15 ** @magnitude) / 3.6 + 2.5\n end", "def r2 \n 0.5 #apparently not 0.5 * sqrt(2)\n end", "def volume_of_sphere(r)\n return (((4*(MATH::PI))/3)*r**3).round()\nend", "def rm_edgy_x() (ppu_x * reg_radius).round + 5 end", "def irmonograsas\n vag=(grasasmono * 100) / 70\n vag.round(2)\n end", "def flamegraph_sample_rate; end", "def ener_kj \n\t\t@ener_kj = @saturadas * 37 + @monoinsaturadas * 37 + @polinsaturadas * 37 + @azucares * 17 + @polialcoles * 10 + @almidon * 17 + @fibra * 8 + @proteinas * 17 + @sal * 25\n\t\treturn @ener_kj\n\tend", "def v(r,c) (f(r)/f(c)/f(r-c)).to_s end", "def calc_r_range\n R_RANGE\n end", "def radian_multiplier\n { \n :feet => 364491.8,\n :meters => 111170,\n :kms => 111.17,\n :miles => 69.407,\n :radians => 1\n }\n end", "def volumeOfSphere( r )\n return (4.0/3.0) * Math::PI * (r**3)\nend", "def gamma_linear\n a = (0..31).each.map { |v| v.chr }.join\n gamma_ctl 61697, a\n end", "def radius\n \twidth / 2\n end", "def volume (l, w, h)\n\tl * w * h\nend", "def calc_upg_gigjoules_ch4 (val1 = 0.03778, val2 = calc_upg_ch4)\n\t\t(val1 * val2).round 2 \n\tend", "def kaprekar_step(n)\n\n # TODO: Solve it!\n\n end", "def grasa\n\t\t1.2 * imc + 0.23 * @edad - 10.8 * ( sexo ? 1 : 0) - 5.4\n\tend", "def problem_80(size = 100)\n total = 0\n (2..100).each do |n|\n n,d = n.sqrt_frac(2*size)\n next unless n\n r = n * (10 ** (size * 1.1).to_i) / d\n r = r.to_s[0,size].split(//).map(&:to_i).reduce(&:+)\n total += r\n# puts r.inspect\n end\n total\nend", "def gear_inches\n #... some complex code\n ratio * diameter\n #... some complex code\n end", "def valorenergeticoKcalp\n\t\tvag=(valorenergeticoKcal * 70) / 100\n\t\tvag.round(2)\n\tend", "def radius=(r); @@R = r; end", "def rek(n)\n if n == 1\n return 1/3.to_f\n end\n if n == 0\n return 1.to_f\n end\n return 13.0*rek(n-1)/3.0 - 4.0*rek(n-2)/3.0\nend", "def smoothing\n 0.9\n end", "def t(k)\n k = k.to_f\n output = 0.0\n if k < @M\n output = 1/(k*@M)\n elsif k == @M\n r = @N/@M\n output = Math.log(r/DELTA)/@M\n else\n output = 0.0\n end\n return output\n end", "def escapeVel( r )\n m = massFromRadius( r )\n return Math::sqrt( (2*G*m)/r )\nend", "def gravy\n @gravy ||= begin\n hydropathy_sum = 0.0\n each_aa do |aa|\n hydropathy_sum += HYDROPATHY[aa]\n end\n round(hydropathy_sum / @seq.length.to_f, 3)\n end\n end" ]
[ "0.6338533", "0.6126679", "0.6123701", "0.60982937", "0.6056641", "0.60554034", "0.6044585", "0.60369277", "0.59697884", "0.59556246", "0.5948858", "0.59208816", "0.58816075", "0.5877764", "0.5877089", "0.5855692", "0.58093685", "0.5778032", "0.5773195", "0.5755947", "0.5745404", "0.56854296", "0.56705725", "0.5667178", "0.5659165", "0.5646764", "0.5644743", "0.5626099", "0.5617691", "0.56085855", "0.5602414", "0.55993354", "0.55605054", "0.5559687", "0.55410135", "0.5538425", "0.5528322", "0.55261934", "0.5522685", "0.5509817", "0.550276", "0.54996485", "0.5496703", "0.5485012", "0.54787076", "0.5464468", "0.5462751", "0.5457698", "0.54575205", "0.5444635", "0.5444635", "0.5444635", "0.5433611", "0.5433554", "0.5429222", "0.5428698", "0.54139584", "0.5409199", "0.5396926", "0.53952533", "0.53775764", "0.537607", "0.537511", "0.5373534", "0.5373534", "0.5372667", "0.5364364", "0.535818", "0.53426164", "0.53407234", "0.53374016", "0.5335504", "0.5332897", "0.5330544", "0.53279257", "0.53271085", "0.5324746", "0.5316191", "0.531386", "0.5304395", "0.5304386", "0.5300129", "0.52993816", "0.52963424", "0.5292429", "0.52918994", "0.52824765", "0.52797496", "0.5278181", "0.5272575", "0.52714086", "0.5269393", "0.5267868", "0.52666163", "0.5258325", "0.52535653", "0.52358645", "0.52356666", "0.5231607", "0.5229407" ]
0.61902887
1
If token has existed for more than 6000 seconds, refresh it.
def refresh_token? if (@token_timer + 6000) < Time.now self.get_token true else false end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def refreshToken\n # is there a token? (and is it's timestamp not older than 24h?)\n if @token.nil? or @tokenTimeStamp < Time.now - 86400\n @token = getToken(@email,@password)\n @tokenTimeStamp = Time.now\n end\n end", "def refresh_token\n get_comm_token if Time.now.to_i - @comm_token_ttl > TOKEN_TTL\n end", "def refresh_token\n get_comm_token if Time.now.to_i - @comm_token_ttl > TOKEN_TTL\n end", "def refresh_token_if_needed\n token_timestamp = decoded_jwt['exp']\n current_timestamp = DateTime.now.to_i\n return unless token_timestamp - current_timestamp <= 0\n\n refresh_token\n end", "def refresh_access_token()\n\n\t\tif(Time.now - @start_time) >=3000\n\t\t\tputs \"Access Token Expired .......Creating a new one\"\n\t\t\t@access_token = @new_token.get_access_token\n\t\t\t@start_time = Time.now\n\t\tend\n\tend", "def refresh_token\n return if token\n refresh_token!\n end", "def refresh_token\n authorize current_user\n original = current_user.api_token\n current_user.generate_token!\n @success = current_user.api_token != original\n end", "def fresh_token\n refresh! if token_expired?\n access_token\n end", "def fresh_token\n refresh! if expired?\n access_token\n end", "def refresh_session\n self.with_lock(timeout: 60, retries: 120, retry_sleep: 0.5) do\n if self.token_expires_at <= 10.seconds.from_now\n begin\n session.refresh_tokens\n self.refresh_token = session.refresh_token\n self.access_token = session.access_token\n self.token_expires_at = session.expires_at\n self.save\n rescue ExactOnlineLib::Api::Sdk::AuthError\n @session = nil\n end\n else\n @session = nil\n end\n @client = nil\n end\n end", "def refresh_auth_token\n generate_token(:auth_token)\n save!\n end", "def refresh_time\n self.update_column( :expires, Time.zone.now + TOKEN_LIFE )\n end", "def refresh \n decoded_refresh_token = JWT.decode(params[:refreshToken], 's3cr3t', true, algorithm: 'HS256')\n # Check if token was decoded\n if decoded_refresh_token\n refresh_token_record = Blacklist.find_by(jwt: params[:refreshToken])\n if refresh_token_record && Time.now < refresh_token_record.expiration \n @user = User.find_by(id: decoded_refresh_token[0]['user_id'])\n if @user # user exists\n Blacklist.find_by(jwt: params[:refreshToken]).delete\n # update the device_key for the user\n new_access_token = create_access_token(@user.id)\n new_refresh_token = create_refresh_token(@user.id)\n render json: {status: \"Refreshed Tokens\", auth: {accessToken: new_access_token.jwt, accessTokenExpiration: new_access_token.expiration, refreshToken: new_refresh_token.jwt, refreshTokenExpiration: new_refresh_token.expiration }}\n else\n render json: {error: \"Invalid User\"}\n end\n else\n render json: {status: \"Token Expired\"}\n end \n else # token is null\n render json: {error: \"Invalid Token\"}\n end\n end", "def with_fresh_token\n retried = false\n\n begin\n token = File.exist?(TOKEN_PATH) && File.read(TOKEN_PATH) || refresh_token()\n yield(token)\n rescue StaleTokenError\n raise if retried # Avoid endless loops.\n retried = true\n\n FileUtils.rm(TOKEN_PATH)\n retry\n end\nend", "def refresh_access_token!\n self.save! if refresh_access_token\n end", "def refresh_token\n if remember_token?\n self.remember_token = self.class.make_token \n save(false) \n end\n end", "def token\n refresh_token! if token_expired?\n super\n end", "def refresh_token\n if remember_token?\n self.remember_token = self.class.make_token\n save(false)\n end\n end", "def refresh_access_token\n self.expires_at = Time.now + 3600 \n save\n end", "def refresh!\n now = Time.now\n raise RefreshTokenExpired if refresh_token_expires_at&.<= now\n\n data = refresh_token_request!\n\n @access_token = data[\"access_token\"]\n @access_token_expires_at = (now + data[\"expires_in\"])\n\n on_refresh&.call(@access_token, @access_token_expires_at)\n end", "def refresh!(params={})\n @token = token.refresh!(params)\n end", "def refresh_token\n self.generate_token\n end", "def refresh_token!\n create_token = Kickit::API::CreateToken.new\n resp = create_token.execute(:username => username,\n :developerKey => Kickit::Config.developerKey)\n @token = resp\n end", "def refresh_token\n if remember_token?\n self.remember_token = make_token \n save(:validate => false) \n end\n end", "def refresh_access_token\n new_token = FireCloudClient.generate_access_token(self.service_account_credentials)\n new_expiry = Time.zone.now + new_token['expires_in']\n self.access_token = new_token\n self.expires_at = new_expiry\n new_token\n end", "def refresh_token\n if remember_token?\n self.remember_token = self.class.create_token\n save(:validate => false)\n end\n end", "def renew\n req_body = { grant_type: 'refresh_token', refresh_token: @token.refresh_token }\n\n response = JSON.parse(request_token(req_body).body)\n\n @token.update!(response['access_token'], response['expires_in'], response['refresh_token'])\n\n save\n rescue StandardError => e\n puts \"Unable to refresh token\\n#{e.message}\"\n end", "def refresh!\n response = request_token_from_google\n data = JSON.parse(response.body)\n if data[\"access_token\"].present?\n update_attributes(access_token: data['access_token'], expires_at: Time.now + (data['expires_in'].to_i).seconds)\n else\n puts data[\"error_description\"]\n end\n end", "def refreshToken(token)\n makeToken(:refresh_token, :refresh_token => token.refresh_token)\n end", "def token_refresh!\n self.access_token = access_token.refresh!\n end", "def refresh_token\n end", "def update_token\n client.authorization.update_token!(oauth_data)\n if client.authorization.refresh_token && client.authorization.expired?\n client.authorization.fetch_access_token!\n end\n end", "def refresh_access_token\n return false unless @oauth_access_token.expired?\n\n @oauth_access_token = @oauth_access_token.refresh!\n write_attribute :access_token, @oauth_access_token.token\n write_attribute :access_token_expires_at, @oauth_access_token.expires_at\n true\n end", "def renew_token\n body_params = token_request_body\n body_params << [\"refresh_token\", current_user.refresh_token]\n body_params << [\"grant_type\", \"refresh_token\"]\n\n get_token(body_params)\n redirect_to sage_accounting_data_path\n end", "def refresh_token\n return nil unless (temp_refresh_token = read_attribute(:refresh_token))\n # logger.debug2 \"temp_refresh_token = #{temp_refresh_token}\"\n encrypt_remove_pre_and_postfix(temp_refresh_token, 'refresh_token', 45)\n end", "def refresh_token\n return nil unless (temp_refresh_token = read_attribute(:refresh_token))\n # logger.debug2 \"temp_refresh_token = #{temp_refresh_token}\"\n encrypt_remove_pre_and_postfix(temp_refresh_token, 'refresh_token', 45)\n end", "def update_token\n\trequire 'date'\n\ttil = Time.at(settings.exp) - Time.now\n\tleft = (til/60).to_i\n\tp left\n\tif left < 5\n\t\tres = RestClient.post( \"https://auth.exacttargetapis.com/v1/requestToken\",\n\t\t\t\t\t\t\t {\n\t\t\t\t\t\t\t\t:clientId => settings.clientId,\n\t\t\t\t\t\t\t\t:clientSecret => settings.clientSecret,\n\t\t\t\t\t\t\t\t:refreshToken => settings.refreshToken,\n\t\t\t\t\t\t\t\t:accessType => \"offline\"\n\t\t\t\t\t\t\t })\n\t\t@res = JSON.parse(res)\n\t\tsettings.oauthToken = @res[\"accessToken\"]\n\t\tsettings.exp = Time.now + @res[\"expiresIn\"]\n\t\tsettings.refreshToken = @res[\"refreshToken\"]\n\tend\t\nend", "def refresh_token\n @token = @authenticator.refresh_token\n end", "def refresh_token\n @agent.shutdown\n @agent = Mechanize.new\n\n get_token\n set_headers\n end", "def refresh_token(token = @token)\n raise \"invalid token\" unless token\n @access_token ||= OAuth2::AccessToken.new(@oauth_client, token, :refresh_token => @refresh_token, :expires_at => @token_expires_at)\n if @access_token.expired?\n @access_token.refresh!\n @access_token_refreshed = true\n end\n end", "def refresh_token\n @refresh_token ||= nil\n end", "def fresh_hubstaff_access_token\n raise 'Organiaztion not connected to HB. Please edit hubstaff start auth code.' if hubstaff_token_will_end.blank?\n\n # still fresh\n return hubstaff_access_token if hubstaff_token_will_end - 2.hours > DateTime.now\n\n res = HubstaffClient.new.refresh_access_token_request hubstaff_refresh_token\n\n raise StandardError, res.parsed_response['error'] if res.code != 200\n\n self.hubstaff_access_token = res.parsed_response['access_token']\n self.hubstaff_refresh_token = res.parsed_response['refresh_token']\n self.hubstaff_token_get_at = DateTime.now\n self.hubstaff_token_will_end = DateTime.now + (res.parsed_response['expires_in'].to_i / 60.0 / 60.0).round.hours\n\n self.save\n\n hubstaff_access_token\n end", "def maybe_reauthenticate\n if @keystone_token_expiration < Time.now + 2*@timeout\n @logger.info \"Permanent token will expire soon. Re-authenticating...\"\n authenticate\n end\n end", "def refresh_single_use_oauth2_token!\n assign_single_use_oauth2_token\n save!\n end", "def refresh_tokens\n @token = @token.refresh!\n tokens\n end", "def initialize(token)\n @token = token\n if Time.now.to_i > @token.get_info['token_expiry']\n self.refresh\n end\n end", "def refresh_oauth2_token!\n ensure_oauth2_token(true)\n save!\n end", "def access_token\n refresh! if access_token_expires_at&.<= Time.now + 60 # time drift margin\n @access_token\n end", "def refresh_token\n QueryHelper::HeaderValidationChecker.new(request.headers, 'RefreshToken', 'Authorization').check_null_headers\n TokenHelper.check_blacklist(request.headers['RefreshToken'])\n # access token validation check\n payload = TokenHelper.jwt_decode(request.headers['Authorization'])\n unless payload && !payload['refresh']\n raise Exceptions::AuthenticationError.new(\"invalid access_token\")\n end\n # refresh_token validation and expiry check\n payload = TokenHelper.jwt_decode(request.headers['RefreshToken'])\n unless payload && payload['refresh'] && payload['expiry_time'] > Time.now\n raise Exceptions::AuthenticationError.new(\"invalid refresh_token\")\n end\n # user validation\n user = User.find(payload['user_id'])\n unless user\n Rails.logger.error \"no user found with the given user_id -- #{payload['user_id']}\"\n raise Exceptions::AuthenticationError.new(\"invalid token\")\n end\n res = user.generate_auth_token\n # Adding both the tokens to blacklist in redis, disabling its use again\n TokenHelper.add_blacklist(request.headers['Authorization'],\"access\")\n TokenHelper.add_blacklist(request.headers['RefreshToken'],\"refresh\")\n render json: res, status: 200, scope: nil\n rescue Exceptions::AuthenticationError, Exceptions::InvalidHeaderError, Exceptions::WrongHeaderError, JWT::DecodeError, JWT::VerificationError => e\n render json: {error: e}, status: 401, scope: nil\n end", "def refresh_token\n debug { \"Performing token refresh\" }\n return false if token.nil?\n return false if token['refresh_token'].nil?\n uri = URI.parse(token_account['token_endpoint'])\n endpoint_data = { path: uri.path }\n endpoint_data[:host] = uri.host if uri.host\n endpoint_data[:port] = uri.port if uri.port\n\n debug { \"Token refresh endpoint: #{endpoint_data.inspect}\" }\n\n return false unless endpoint_data[:path]\n\n response = request(\n {\n http_method: token_account['token_method'].downcase.to_sym,\n body: refresh_request_params,\n headers: {\n CONTENT_TYPE => token_account['token_post_content_type']\n }.merge(\n token_account['code_requires_basic_auth'] ? basic_auth_header : {}\n ),\n expects: [200, 201, 400, 401, 403],\n auth: false\n }.merge(endpoint_data)\n )\n\n if response && response['access_token']\n debug { \"Got response to refresh request\" }\n token['access_token'] = response['access_token']\n token['refresh_token'] = response['refresh_token']\n token['expires_at'] = in_to_at(response['expires_in'])\n token.config.write if token.respond_to?(:config)\n true\n else\n debug { \"Got null or bad response to refresh request: #{last_response.inspect}\" }\n false\n end\n rescue => ex\n error { \"Access token refresh exception\" }\n error { ex }\n false\n end", "def refresh_tokens!\n update(Twitch.new_tokens!(refresh_token))\n rescue RestClient::Unauthorized, RestClient::BadRequest\n # If the refresh got 401 Unauthorized, we were probably de-authorized from using the user's Twitch account. If it\n # got 400 Bad Request, we probably have a nil refresh token, perhaps because the authorization was created before we\n # started saving refresh tokens to the database.\n #\n # Ideally we'd destroy the TwitchUser here, but that may leave the user with no way to sign in. Instead, force a\n # sign out so we can get some fresh tokens. Until that happens we technically have no way to verify this Twitch user\n # and this Splits.io user are the same person, only that they once were in the past.\n #\n # Once we have linkless accounts, change this to destroy the TwitchUser.\n user.sessions.destroy_all\n end", "def refresh_token\n raise NotImplementedError\n end", "def access_token_was_refreshed; end", "def refresh\n begin\n oauth_attrs = source.create(params: refresh_token_params)\n oauth_attrs.each do |attr, value|\n send(\"#{attr}=\", value)\n end\n rescue My::Oauth::Unauthorized => e\n errors << 'Error refreshing token'\n end\n self\n end", "def valid_access_token\n\t\t\t\t# The token we have stored is expired - fetch a new one using the refresh token\n\t\t\t\tself.refresh_access_token if self.access_token_expired?\n\n\t\t\t\tself.access_token\n\t\t\tend", "def refresh_session_token\n session_signature = Digest::MD5.hexdigest(@toodle_uid + Babar::Base.toodle_app_token) \n session_token_url = \"http://api.toodledo.com/2/account/token.php?\" + self.parse_params({:userid => @toodle_uid, :appid => Babar::Base.toodle_app_id , :sig => session_signature,})\n puts session_signature, session_token_url\n @session_token = JSON.parse(Typhoeus::Request.get(session_token_url).body)[\"token\"]\n @toodle_token_death = Time.now + Babar::Base.toodle_app_token_lifetime\n [@session_token, @toodle_token_death]\n end", "def update_token(token)\n DB[:Token].where(Token: token).update(Timestamp: Time.now.to_i + 600)\n end", "def refresh\n new_token = EsdlSuiteService.setup.refresh(to_access_token)\n return delete if new_token.empty?\n\n update(new_token)\n end", "def refresh_access_token(refresh_token = @config.REFRESH_TOKEN)\r\n get_new_access_token(refresh_token)\r\n end", "def refresh_tokens\n\t\tbegin\n\t\t\t\n\t\t\tresp = $c.initiate_auth({\n\t\t\t auth_flow: \"REFRESH_TOKEN\",\n\t\t\t auth_parameters: {\n\t\t\t \"REFRESH_TOKEN\" => self.refresh_token,\n\t\t\t \"SECRET_HASH\" => calculate_cognito_hmac\n\t\t\t },\n\t\t\t client_id: ENV[\"COGNITO_CLIENT_ID\"]\n\t\t\t})\n\t\t\t\n\t\t\tself.access_token = resp[:authentication_result][:access_token]\n\t\t\tself.refresh_token = resp[:authentication_result][:refresh_token]\n\t\t\tself.token_expires_at = Time.now + (resp[:authentication_result][:expires_in]).seconds\n\t\t\tself.save\n\t\t\t#session[:user] = self\n\t\trescue => e\n\t\t\te.to_s\n\t\tend\t\n\tend", "def refresh_token()\n if @access_token_obj\n @access_token_obj.refresh_token\n else\n return nil\n end\n end", "def refresh(access_token)\n new_access_token = access_token.client.access_token!\n expires = Time.zone.at(decode_id_token(new_access_token.id_token).exp).to_datetime\n\n {\n access_token: new_access_token.access_token,\n expires_at: expires\n }\n rescue Rack::OAuth2::Client::Error\n # Refresh token was expired\n {}\n end", "def is_token_expired?\n (Time.now - self.updated_at) > 3300\n end", "def refreshed!(body)\n @access_token = body[:access_token]\n @expires_at = Time.now + body[:expires_in]\n end", "def refresh_token(oauth_drive_token)\n begin\n @client.authorization.fetch_access_token!\n rescue\n # Error when updating, so lets remove token and user will have to re-autrhorize\n oauth_drive_token.destroy\n return false\n end\n \n saved = OauthDriveToken.create_or_update(:user => oauth_drive_token.user, \n :access_token => @client.authorization.access_token, \n :refresh_token => @client.authorization.refresh_token,\n :expires_at => Time.now + @client.authorization.expires_in.seconds,\n :provider_number => APP_CONFIG['oauth']['google']['provider_number'])\n\n # Hasn't been updated, so lets remove token\n oauth_drive_token.destroy if !saved\n \n saved\n end", "def refresh_if_near_expiration; end", "def refresh_token\n @auth.refresh_token\n end", "def get_new_token\n auth = 'Basic ' + Base64.strict_encode64( session[:client_id] + \":\" + session[:client_secret]).chomp\n \n rcResultJson = RestClient.post(\n session[:token_url],\n {\n grant_type: 'refresh_token', \n refresh_token: session[:refresh_token], \n },\n {\n :Authorization => auth\n }\n )\n rcResult = JSON.parse(rcResultJson)\n\n session[:patient_id] = rcResult[\"patient\"]\n session[:access_token] = rcResult[\"access_token\"]\n session[:refresh_token] = rcResult[\"refresh_token\"]\n session[:token_expiration] = (Time.now.to_i + rcResult[\"expires_in\"].to_i )\n rescue StandardError => exception\n # binding.pry \n err = \"Failed to refresh token: \" + exception.message\n redirect_to root_path, alert: err\n end", "def cache_refresh_token(config, token)\n File.write(config.refresh_token_filename, token)\n end", "def get_new_token\n binding.pry \n auth = 'Basic ' + Base64.strict_encode64( session[:client_id] +\":\"+session[:client_secret]).chomp\n \n rcResultJson = RestClient.post(\n session[:token_url],\n {\n grant_type: 'refresh_token', \n refresh_token: session[:refresh_token], \n },\n {\n :Authorization => auth\n }\n )\n rcResult = JSON.parse(rcResultJson)\n\n session[:patient_id] = rcResult[\"patient\"]\n session[:access_token] = rcResult[\"access_token\"]\n session[:refresh_token] = rcResult[\"refresh_token\"]\n session[:token_expiration] = Time.now.to_i + rcResult[\"expires_in\"].to_i \n rescue => exception\n binding.pry \n err = \"Failed to refresh token\"\n redirect_to root_path, flash: { error: err }\n end", "def refresh_from_store\n ext_token = store.read\n raise \"Cannot refresh token : Unable to read store data\" if !ext_token&.is_a?(Hash) || ext_token.empty?\n client.update_token!(ext_token)\n end", "def refresh_access_token\n\t\t\t\trequire \"open-uri\"\n\t\t\t\trequire \"net/http\"\n\t\t\t\trequire \"openssl\"\n\t\t\t\trequire \"base64\"\n\n\t\t\t\turi = URI.parse(\"#{self.add_on.jive_url}/oauth2/token\")\n\t\t\t\thttp = Net::HTTP.new(uri.host, uri.port)\n\t\t\t\thttp.use_ssl = true\n\n\t\t\t\trequest = Net::HTTP::Post.new(uri.request_uri)\n\t\t\t\trequest.basic_auth self.add_on.client_id, self.add_on.client_secret\n\t\t\t\trequest.set_form_data({\n\t\t\t\t\t\"refresh_token\" => \"#{self.refresh_token}\",\n\t\t\t\t\t\"grant_type\" => \"refresh_token\",\n\t\t\t\t})\n\n\t\t\t\tresponse = http.request(request)\n\t\t\t\tjson_body = JSON.parse(response.body)\n\n\t\t\t\tif (response.code.to_i != 200)\n\t\t\t\t\traise RuntimeError, json_body[\"error\"].to_s.upcase\n\t\t\t\tend\n\n\t\t\t\tself.access_token = json_body[\"access_token\"]\n\t\t\t\tself.expires_in = json_body[\"expires_in\"]\n\t\t\t\tself.expires_at = json_body[\"expires_in\"].to_i.seconds.from_now\n\t\t\t\tself.save\n\t\t\tend", "def test_feature_refresh_token_for_expired_oauth_token\n # Setup\n opts = {\n 'email' => @user.email,\n 'expires_in' => 3\n }\n\n @user = setup_user(opts)\n\n sleep(opts['expires_in'])\n\n # Step 1\n headers = { 'Authorization' => \"Bearer #{@user.oauth_token}\" }\n\n get '/me', {}, headers\n assert_response(@response, :client_error)\n\n # Step 2\n @user.acquire_refreshed_oauth_token\n\n # Step 3\n headers = { 'Authorization' => \"Bearer #{@user.oauth_token}\" }\n\n get '/me', {}, headers\n assert_response(@response, :success)\n end", "def refresh_access_token!\n @access_token = access_token.refresh!\n end", "def renew_access_token!\n @access_token = nil\n true\n end", "def call\n api_key = ApiKey.find_by(access_token: @expired_token)\n\n if api_key.present?\n api_key.refresh\n api_key.save\n\n {\n token: api_key.access_token\n }\n else\n raise ExceptionService.new('Invalid Token.')\n end\n end", "def refresh_user_access_token(user_refresh_token)\r\n get_new_access_token(user_refresh_token)\r\n end", "def refresh_token\n @connection.refresh_token\n end", "def needs_refresh?\n @access_token.nil? ||\n (Time.now + TOKEN_BUFFER) > @expires_at\n end", "def refresh!(params = {})\n raise('A refresh_token is not available') unless refresh_token\n params[:grant_type] = 'refresh_token'\n params[:refresh_token] = refresh_token\n new_token = @client.get_token(params)\n new_token.options = options\n new_token.refresh_token = refresh_token unless new_token.refresh_token\n new_token\n end", "def update_refresh_tokens(mrrt)\n raise ArgumentError, 'Token must contain an MRRT.' unless mrrt.mrrt?\n\n @token_cache.find.each do |entry|\n entry.refresh_token = mrrt.refresh_token if mrrt.can_refresh?(entry)\n end\n end", "def isTokenExpired\n if Time.now.to_i - @access_token_timestamp >= access_token_expires\n puts \"had to reauthenticate\"\n self.authenticate\n end\n end", "def regenerate_token\n self.auth_token = nil\n generate_token\n save!\n end", "def set_RefreshToken(value)\n set_input(\"RefreshToken\", value)\n end", "def set_RefreshToken(value)\n set_input(\"RefreshToken\", value)\n end", "def set_RefreshToken(value)\n set_input(\"RefreshToken\", value)\n end", "def set_RefreshToken(value)\n set_input(\"RefreshToken\", value)\n end", "def set_RefreshToken(value)\n set_input(\"RefreshToken\", value)\n end", "def set_RefreshToken(value)\n set_input(\"RefreshToken\", value)\n end", "def set_RefreshToken(value)\n set_input(\"RefreshToken\", value)\n end", "def set_RefreshToken(value)\n set_input(\"RefreshToken\", value)\n end", "def set_RefreshToken(value)\n set_input(\"RefreshToken\", value)\n end", "def set_RefreshToken(value)\n set_input(\"RefreshToken\", value)\n end", "def set_RefreshToken(value)\n set_input(\"RefreshToken\", value)\n end", "def set_RefreshToken(value)\n set_input(\"RefreshToken\", value)\n end", "def set_RefreshToken(value)\n set_input(\"RefreshToken\", value)\n end", "def set_RefreshToken(value)\n set_input(\"RefreshToken\", value)\n end", "def set_RefreshToken(value)\n set_input(\"RefreshToken\", value)\n end", "def set_RefreshToken(value)\n set_input(\"RefreshToken\", value)\n end", "def set_RefreshToken(value)\n set_input(\"RefreshToken\", value)\n end" ]
[ "0.82410866", "0.8219974", "0.8219974", "0.81419677", "0.8070416", "0.798323", "0.78873384", "0.7586039", "0.75629735", "0.7454076", "0.74383026", "0.7424787", "0.73904794", "0.73677343", "0.73658806", "0.73493737", "0.7342838", "0.73345065", "0.7309849", "0.73093146", "0.729579", "0.7274203", "0.72617555", "0.7251843", "0.7251719", "0.72360486", "0.72040606", "0.71881956", "0.7161756", "0.71366245", "0.7136112", "0.7135805", "0.70443434", "0.7028926", "0.702799", "0.702799", "0.7024655", "0.7021689", "0.7015985", "0.7014677", "0.70025957", "0.70021534", "0.69941455", "0.699193", "0.69793266", "0.6961913", "0.69458085", "0.69369596", "0.6924863", "0.6905448", "0.6890146", "0.6862692", "0.683899", "0.6835844", "0.68210775", "0.6818916", "0.6802717", "0.6786323", "0.6781513", "0.67794263", "0.6756485", "0.6747906", "0.67357147", "0.67165333", "0.6704383", "0.670367", "0.66781044", "0.666648", "0.6657135", "0.6653784", "0.66465634", "0.6627293", "0.6623577", "0.66227734", "0.6622586", "0.6616746", "0.66045535", "0.659244", "0.65922344", "0.6586447", "0.6584838", "0.65735096", "0.65702647", "0.656208", "0.656208", "0.656208", "0.656208", "0.656208", "0.656208", "0.656208", "0.656208", "0.656208", "0.656208", "0.656208", "0.656208", "0.656208", "0.656208", "0.656208", "0.656208", "0.656208" ]
0.83003277
0
Conditional recursive function ends if nextPageToken is not available in response. Writes results to CSV.
def rest_lead_stream url, list_id response = self.get url json = JSON.parse(response.body, :symbolize_names => true) results = json[:result] if results results.each { |result| @csv << [result[:id]] } @csv.flush puts "#{Time.now} => REST:leadList:#{list_id}:Results:#{results.length}" if GoodDataMarketo.logging end next_page_token = json[:nextPageToken] # If there is another page, remember it and then attempt the next load. if next_page_token self.remember_next_page :token => token, :list => list_id domain = @marketo_domain parameters = "&fields=#{@fields}" endpoint= "/rest/v1/list/#{list_id}/leads.json" url = domain + endpoint + @token_uri + parameters + "&nextPageToken=" + next_page_token rest_lead_stream url, list_id else # Update the local and remote lead lists File.open('lead_list_ids.json','w'){ |f| JSON.dump(@lead_list_ids, f) } @webdav.upload('lead_list_ids.json') end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def export_csv\n\n group_array = []\n @page = 1\n @per_page = 50\n\n groups = @context.get(:groups, :page => @page, :per_page => @per_page, :access_token => ENV[\"API_TOKEN\"])\n group_array << groups\n group_array, group_hash = check_paging(groups, group_array, \"groups\", @context, true)\n\n group_array.each_with_index do |group, index|\n is_new = index == 0 ? true : false\n membership_array = []\n @page = 1\n\n group_model = Group.find(group['id'], :params => { :access_token => ENV[\"API_TOKEN\"] })\n memberships = group_model.get(:memberships, :page => @page, :per_page => @per_page, :access_token => ENV[\"API_TOKEN\"])\n membership_array << memberships\n membership_array, @membership_hash = check_paging(memberships, membership_array, \"memberships\", group_model, is_new)\n end\n\n export_data = [group_array, @membership_hash]\n perform_export(export_data) \n\n respond_to do |format|\n format.html { render :inline => \"<a href=<%= @download_url %>>Download CSV</a>\" }\n format.json { render :json => @download_url.to_json }\n end\n end", "def end_paginate! results\n # set the link headers\n link = \"\"\n link += \"<#{@next_page}>; rel=\\\"next\\\"\" unless results.empty? or results.count < @limit\n headers['X-Next-Page'] = @next_page unless results.empty? or results.count < @limit\n if not results.empty? and @page > 1\n link += \", \"\n end\n link += \"<#{@prev_page}>; rel=\\\"prev\\\"\" unless @page == 1\n headers['X-Prev-Page'] = @prev_page unless @page == 1\n headers['Link'] = link\n headers['X-Total-Count'] = @count.to_s\n end", "def start_scraping(page_limit, output_file_path)\n\n # add header in csv file before loop starts\n CSV.open(output_file_path, 'w') do |csv|\n csv << ['Artist Name', 'City', 'Venue', 'Date', 'Time', 'Ticket Price']\n end\n\n # loop for retrieving data for pages upto page_limit\n page_no = 1\n loop do\n url = \"http://www.wegottickets.com/searchresults/page/#{page_no}/all#paginate\"\n parse_page = self.parse_data(url)\n \n puts \"Processing url:\", url\n\n concert_div_arr = parse_page.css('.content.block-group.chatterbox-margin')\n if concert_div_arr.length == 0 # no concert elements are found\n break\n end\n concert_details_array = self.extract_concert_details_data(concert_div_arr)\n\n self.store_concert_details_data(concert_details_array, output_file_path)\n \n # add up pagginations results\n if page_no >= page_limit && page_limit != -1 \n break\n end\n page_no = page_no + 1\n end\n end", "def make_recursive_call\n offset = 0\n p_args = set_pagination(offset, page_size, args)\n api_caller.verbosity(conn, method, *p_args)\n\n return if api_caller.opts[:noop]\n\n ret = api_caller.respond(conn.public_send(method, *p_args))\n\n return ret unless ret.more_items?\n\n loop do\n offset += page_size\n p_args = set_pagination(offset, page_size, p_args)\n api_caller.verbosity(conn, method, *p_args)\n resp = api_caller.respond(conn.public_send(method, *p_args))\n raise StopIteration unless resp.ok?\n\n ret.response.items += resp.response.items\n return finalize_response(ret) unless resp.more_items?\n end\n end", "def each(&block)\n \n # first time each() is called\n if (@test_response == false)\n @test_response = true\n return\n end\n \n # if we have already generated content, return, otherwise generate\n return if @generated\n \n # generate a CSV header\n yield generate_header()\n \n # generate the CSV content\n # in our production code, we have this method being called in a X.times {} cycle\n # where the block passes the iteration number into the method, calculates and returns data\n # X.times { |t| generate_content(t) }\n yield generate_content()\n \n # generate a CSV footer\n yield generate_footer()\n \n # this lets the renderer know we are done\n yield \"\\r\\n\"\n \n @generated = true\n end", "def write_remaining_call_numbers\n cursor_mark = '*'\n CSV.open(filename, 'ab') do |csv|\n loop do\n body = solr_cursor_page_body(cursor_mark)\n body['response']['docs'].each do |record|\n next unless record[facet_field]\n record[facet_field].each do |cn|\n sort_cn = StringFunctions.cn_normalize(cn)\n next if multi_cn_lookup.key?(sort_cn)\n csv << parse_cn_row(record, cn, sort_cn)\n end\n end\n next_cursor_mark = body['nextCursorMark']\n break if cursor_mark == next_cursor_mark\n cursor_mark = next_cursor_mark\n end\n end\n end", "def to_csv\n initialize_generator\n csv_report_generator.records = pull_join\n csv_report_generator.generate_report\n end", "def index\n get_paginated_departments\n respond_to do |format|\n format.html\n format.js\n format.xls\n if(params[:a] == \"a\")\n format.csv { send_data Department.where(\"parent_id IS NULL\").to_csv2 }\n else\n format.csv { send_data Department.where(\"parent_id IS NULL\").to_csv }\n end\n end\n end", "def process_page_results(output_file_name, result_items)\n file = output_file(output_file_name)\n\n result_items.each do |result|\n file.write(\"#{result['id']},#{result['full_name']},#{result['language']}\\n\")\n end\nend", "def search_github_repos(search_names, language, output_file_name)\n search_names.each do |name|\n # get results for first page\n response = query_api(name, language, 1)\n\n # figure out how many pages exist\n last_page = results_last_page(response[:headers])\n\n # process first page\n # response[:results] is JSON object\n # method is responsible for writing repo data to csv\n process_page_results(output_file_name, response[:results]['items'])\n\n # process results for each successive page\n (2..last_page).each do |page|\n # query api for results for the specified page\n response = query_api(name, page)\n\n # process results from the page\n # response[:results] is JSON object\n # method is responsible for writing repo data to csv\n process_page_results(output_file_name, response[:results]['items'])\n end\n end\nend", "def process_result_dirs(parent_dir)\n # get all the result dirs in the parent dir\n results_dirs = Dir.glob(\"#{parent_dir}/*/\").sort\n puts \"processing #{results_dirs.length} results directories...\"\n\n # process each result dir\n results_dirs.each do |dir|\n dirname = File.basename(dir)\n puts \"processing results dir: #{dirname}\"\n\n csv_path = \"#{dir}#{dirname}.csv\"\n puts \"csv_path: #{csv_path}\"\n\n # create a summary csv with the headings and last 2 rows\n heading = \"<h1>#{dirname}</h1>\"\n table = extract_table_from_results_csv(csv_path)\n @tables += heading + table unless table.nil?\n end\nend", "def csv(section = 'main', q='google',date='ytd',geo='')\n trend_params = {\"graph\"=>\"all_csv\", \"sa\" => \"N\"}\n trend_params[\"q\"] = q\n trend_params[\"date\"] = date\n if !geo || geo != ''\n trend_params[\"geo\"] = geo\n end\n\n data = @client.get_content(URI.parse(@url_Export), trend_params)\n # empty to return all data\n if section == ''\n return CSV.parse(data)\n end\n # split data into sections\n segments = data.split(\"\\n\\n\\n\")\n if section == 'main'\n section = ['Week', 'Year', 'Day','Month']\n else\n section = [section]\n end\n\n for x in segments do\n if section.include? x.split(',')[0].strip\n maindata = CSV.parse(x)\n return maindata\n end\n end\n end", "def done(project, api_key, limit=1, skip=0)\n\treturn _iterations('done', project, api_key, limit, (skip*-1)-1)\nend", "def csv_download\n filename = sprintf(\"%s-%s.csv\",\n model_name,\n Time.now.strftime(\"%Y%m%d-%H%M%S\"))\n file = Tempfile.new(filename, Settings.edgarj.csv_dir)\n csv_visitor = EdgarjHelper::CsvVisitor.new(view_context)\n file.write CSV.generate_line(model.columns.map{|c| c.name})\n for rec in user_scoped.where(page_info.record.conditions).\n order(\n page_info.order_by.blank? ?\n nil :\n page_info.order_by + ' ' + page_info.dir) do\n array = []\n for col in model.columns do\n array << csv_visitor.visit_column(rec, col)\n end\n file.write CSV.generate_line(array)\n end\n file.close \n File.chmod(Settings.edgarj.csv_permission, file.path)\n send_file(file.path, {\n type: 'text/csv',\n filename: filename})\n #file.close(true)\n end", "def download_csv_file_most_seached_merchants\n if (params[:start_date]&&params[:end_date]).blank?\n header = 'Merchant Id, Merchant Name, Created On, last time Searched'\n file_name = \"Most_searched_merchant.csv\"\n File.open(file_name, \"w\") do |writer|\n writer << header\n writer << \"\\n\"\n Merchant.all.order('view_counter desc').each do |merchant|\n csv_value = merchant.id, merchant.try(:name), merchant.created_at\n writer << csv_value.map(&:inspect).join(', ')\n writer << \"\\n\"\n end\n end\n send_file(file_name)\n else\n start_date = Date.strptime(params[:start_date], \"%m/%d/%Y\")\n end_date = Date.strptime(params[:end_date], \"%m/%d/%Y\")\n header = 'Merchant Id, Merchant Name, Created On, last time Searched'\n file_name = \"Most_searched_merchant#{start_date}_to_#{end_date}.csv\"\n File.open(file_name, \"w\") do |writer|\n writer << header\n writer << \"\\n\"\n Merchant.where(\"DATE(created_at) >= ? AND DATE(created_at) <= ?\", start_date, end_date).order('view_counter desc').each do |merchant|\n csv_value = merchant.id, merchant.try(:name), merchant.created_at, merchant.updated_at\n writer << csv_value\n writer << \"\\n\"\n end\n end\n \n send_file(file_name)\n end\n end", "def fill_pages(site, csv)\r\n site.pages.each do |page|\r\n if !excluded?(site, page.path_to_source)\r\n if File.exists?(page.path)\r\n url = fill_url(site, page)\r\n csv << [page.title, 100, page.title, url, \"\", \"page\"] \r\n end\r\n end\r\n end\r\n end", "def index\n @trainers = Trainer.paginate(:page => params[:page]).order(email_dirigeant: :desc, crawled_for_email: :desc)\n respond_to do |format|\n format.html\n format.csv { send_data Trainer.all.to_csv}\n end\n end", "def paginate_all_data(url:, more_params: nil)\n query_params = \"since=#{@start_date}&until=#{@end_date}#{more_params ? \"&#{more_params}\" : ''}\"\n total_count = parse_json_from_api(\"#{url}?#{query_params}\")['page']['total_elements']\n puts \"[#{Time.new.strftime('%k:%M')}] total count for #{url}?#{query_params}: #{total_count}\"\n idx = 0\n result = []\n while idx * PER_PAGE < total_count\n result += parse_json_from_api(\"#{url}?page=#{idx}&per_page=#{PER_PAGE}&#{query_params}\")['data']\n idx += 1\n end\n puts \"[#{Time.new.strftime('%k:%M')}] ERROR: result.length #{result.length} != total_count #{total_count}\" if result.length != total_count\n result\nrescue MyApiError\n []\nend", "def collect_results\n while collect_next_line; end\n end", "def export\n @transactions = Transaction.find_all_by_user_id(current_user.id)\n csv = \"\"\n i = 0\n @transactions.each do |trans|\n if (i==0)\n csv += trans.to_csv(true)\n else\n csv += trans.to_csv(false)\n end\n i += 1\n end\n\n respond_to do |format|\n format.csv { send_data csv }\n end\n end", "def get_results\n\n # An internal counter to get the next\n # set of results from the API\n @result_count = 0\n\n # An array into which the API results can\n # be collected\n @results = []\n\n # Get the first set of results from the API\n json_response = self.query\n\n while true\n\n # Exit the loop if the API doesn't return\n # any results and set the \"skip\" attribute\n # to nil\n if json_response['result_count'] == 0\n self.skip= nil\n break\n end\n\n # Add the count of the returned results to the\n # internal result counter's current value\n @result_count += json_response['result_count']\n\n # Append the current results to the results\n # array\n @results << json_response['results']\n\n # Set the \"skip\" attribute to the value\n # on the internal result counter\n self.skip= @result_count\n\n # Get the next set of results from the API\n json_response = self.query\n\n # A simple progress bar\n print \"#\"\n\n end\n\n # Print the total result count to the console\n puts \"\\nFound #{@result_count} results.\"\n\n return @results\n\n end", "def main()\n request_url = \"#{$canvas_url}/api/v1/users/#{$canvas_user_id}/page_views?per_page=100&start_time=#{$start_time}&end_time=#{$end_time}\"\n method = \"get\"\n options = {}\n data = canvasApiRequest(method,request_url,options)\n compiledHash = []\n data.each do |hash|\n hashData = flattenHash(hash)\n compiledHash.push(hashData)\n end\n outputToCSV(compiledHash)\nend", "def csv\n @records = Reply.all\n csv_string = FasterCSV.generate do |csv|\n csv << %w{Code Name Email Engagement Engagement_Adults Engagement_Children Wedding Wedding_Adults Wedding_Children Camping Diet Notes Updated}\n @records.each do |line|\n csv << [\n line['code'],\n line['name'], \n line['email'],\n line['engagement'],\n line['engagement_adults'],\n line['engagement_children'],\n line['wedding'],\n line['wedding_adults'],\n line['wedding_children'],\n line['camping'],\n line['diet'],\n line['notes'],\n line['updated_at']\n ]\n end\n end\n filename = \"rsvp_download\" + Time.now.strftime(\"%d%m%y-%H%M\").to_s + \".csv\"\n send_data(csv_string,\n :type => 'text/csv; charset=utf-8; header=present',\n :filename => filename)\n end", "def csv\n @resource[:count].times do |retry_number|\n csv = get_csv\n return csv if csv\n debug \"Could not get CSV. Retry: '#{retry_number}'\"\n sleep @resource[:step]\n end\n fail \"Could not get CSV after #{@resource[:count] * @resource[:step]} seconds!\"\n end", "def generate_CSV\n CSV.open(\"./veterans.csv\", \"w\", \n write_headers: true,\n headers: [\"login\", \"name\", \"location\", \"repo count\"]\n ) do |csv|\n @top_ten.each { |row| csv << row }\n end\n end", "def print_csv_results\n FileUtils.mkdir_p(\"data\")\n File.open(\"data/results.csv\", \"w\") do |f|\n @data.sort_by{|id, values| values[\"sum_dose\"]}.each do |id, values|\n f.puts \"%s\\t%d\\t%.4f\" % [id, values[\"ddays\"], dose_unit(values[\"sum_dose\"])]\n end\n end\n end", "def data_results(data)\n #data search p row. Creates a tree pattern that mirrors the html.\n results = data.search('p.row')\n\n apartments = []\n results.each do |listing| #listing is a bunch of data for each p row.\n apartments.push(create_apartment(listing))\n end\n create_csv(apartments) #list of apartments here.\nend", "def write_multiple_call_numbers\n # This request takes a minute or so\n resp = conn.get \"#{facet_request}#{facet_field}&facet.mincount=2\"\n req = JSON.parse(resp.body)\n CSV.open(filename, 'wb') do |csv|\n req['facet_counts']['facet_fields'][facet_field].each_slice(2) do |mcn, record_count|\n sort_cn = StringFunctions.cn_normalize(mcn)\n multi_cn_lookup[sort_cn] = record_count\n csv << [sort_cn, mcn, 'ltr', '', \"#{record_count} titles with this call number\", '', '', \"?f[#{facet_field}][]=#{CGI.escape(mcn)}\", '', 'Multiple locations']\n end\n end\n end", "def finalize_response(resp)\n resp.tap do |r|\n r.response[:limit] = r.response.items.size - 1\n r.response[:moreItems] = false\n end\n end", "def export\n @entries = []\n lines = []\n lines << \"Rating Diff,Score Diff\"\n @games = Game.where('home_score >= 0')\n stat_entries = StatEntry.where(stat_id: @stat.id)\n @games.each do |game|\n home_team = Team.where(short_name: game.home_team)[0]\n away_team = Team.where(short_name: game.away_team)[0]\n\n home_stat_entry = StatEntry.where(stat_id: @stat.id, week: game.week-1, year: game.year, team_id: home_team.id)[0]\n away_stat_entry = StatEntry.where(stat_id: @stat.id, week: game.week-1, year: game.year, team_id: away_team.id)[0]\n\n if home_stat_entry and away_stat_entry\n if home_stat_entry.rating\n if home_stat_entry.rating - 2*home_stat_entry.rating_deviation > away_stat_entry.rating + 2*away_stat_entry.rating_deviation\n lines << \"#{home_stat_entry.rating - away_stat_entry.rating},#{game.home_score - game.away_score}\"\n end\n if away_stat_entry.rating - 2*away_stat_entry.rating_deviation > home_stat_entry.rating + 2*home_stat_entry.rating_deviation\n lines << \"#{away_stat_entry.rating - home_stat_entry.rating},#{game.away_score - game.home_score}\"\n end\n end\n end\n end\n\n csv_string = lines.join(\"\\n\")\n\n respond_to do |format|\n format.html { render :export }\n format.json { render json: @entries }\n format.csv { send_data(csv_string, :filename => \"#{@stat.name.camelize}.csv\", :type => \"text/csv\") }\n end\n\n end", "def generate_csv\n\n fields = @resource[:class].typus_fields_for(:csv).collect { |i| i.first }\n\n require 'csv'\n if CSV.const_defined?(:Reader)\n # Old CSV version so we enable faster CSV.\n begin\n require 'fastercsv'\n rescue Exception => error\n raise error.message\n end\n csv = FasterCSV\n else\n csv = CSV\n end\n\n filename = \"#{Rails.root}/tmp/export-#{@resource[:self]}-#{Time.now.utc.to_s(:number)}.csv\"\n\n options = { :conditions => @conditions, :batch_size => 1000 }\n\n csv.open(filename, 'w', :col_sep => ';') do |csv|\n csv << fields\n @resource[:class].find_in_batches(options) do |records|\n records.each do |record|\n csv << fields.map { |f| record.send(f) }\n end\n end\n end\n\n send_file filename\n\n end", "def next\n return nil unless next?\n ensure_service!\n gapi = service.job_query_results job_id, token: token\n QueryData.from_gapi gapi, service\n end", "def next_batch\n summoner = Summoner.by_name(params[:name])\n matches = $redis.get(\"#{summoner.id}#{params[:limit]}\")\n\n unless matches\n matches = Match.get(\n summoner,\n params[:offset].to_i,\n params[:limit].to_i\n ).to_json\n $redis.set(\"#{summoner.id}#{params[:limit]}\", matches)\n end\n @matches = JSON.parse matches\n\n data_ids = get_ids\n @champions = Champion.in_match(data_ids[:champions])\n @spells = Spell.in_match(data_ids[:spells])\n @items = Item.in_match(data_ids[:items])\n\n render :index\n end", "def index\n # @association_pages, @associations = paginate(:association,\n # { :order => \"created_at DESC\", :per_page => 20 })\n \n @associations = Association.paginate(:page => params[:page], \n :conditions => ['nom like ?', \"%#{params[:search]}%\"],\n :per_page => 20,\n :order => \"created_at DESC\")\n @nb_to_verify = Association.to_verify.size\n @asso_incomplete = Association.incomplete.size\n \n respond_to do |format|\n format.html # index.rhtml\n format.xml { render :xml => @associations.to_xml }\n format.csv { send_data @associations.to_csv(:columns => [\"nom\", \"adresse_siegesocial\", \"code_postal\", \"ville\", \"email\"]), :type => \"text/csv\" }\n end\n end", "def list_of_placements_csv\n @events = Event.find(:all)\n\n @report_name = \"List of Placements\"\n \n\n csv_string = FasterCSV.generate do |csv|\n csv << [\"Class\", \"Class Name\", \"Result\", \"Horse\", \"Horse Number\", \"Rider\", \"Owner\" ]\n\n @events.each do |event|\n eps = event.event_participants.reject{|ep| ep.result.blank?}\n eps.sort_by{ |ep| ep.result}.each do |ep|\n csv << [event.event_class,\n event.name,\n ep.result,\n ep.horse.name,\n ep.horse.number,\n ep.rider.name,\n ep.owner.name\n ]\n \n end\n eps = event.event_participants.select{|ep| ep.result.blank?}\n eps.each do |ep|\n csv << [event.event_class,\n event.name,\n ep.result,\n ep.horse.name,\n ep.horse.number,\n ep.rider.name,\n ep.owner.name\n ]\n \n end\n end\n end\n\n filename = \"Horseshow_#{ENV['YEAR_OF_HORSESHOW']}_list_of_placements.csv\"\n send_data(csv_string,\n :type => 'text/csv; charset=utf-8; header=present',\n :filename => filename)\n \n end", "def index\n @search = Claim.order('claims.created_at desc').search(params[:q])\n @claims = @search.result(:distinct => true).paginate(:per_page => 50, :page => params[:page])\n respond_to do |format|\n format.html{}\n format.csv {\n send_data generate_csv, :type => 'text/csv; charset=iso-8859-1; header=present', :disposition => \"attachment; filename=claims_list.csv\" \n }\n end\n end", "def get_characters_from_api\n #establish initial url from api\n url = 'http://www.swapi.co/api/people/'\n #get all data from first page of API\n page_of_characters = JSON.parse(RestClient.get(url))\n all_characters = page_of_characters\n \n next_call = page_of_characters[\"next\"]\n\n while next_call do\n next_page = next_call\n next_page_of_characters = JSON.parse(RestClient.get(next_page))\n (all_characters[\"results\"] << next_page_of_characters[\"results\"])\n next_call = next_page_of_characters[\"next\"]\n end\n\n all_characters\nend", "def nextResults\n # Update results\n updateResults(@nextURI)\n\n # Update nextURI\n updateNextURI\n end", "def export\n params[:page] = 1 unless params[:page]\n \n case params[:type]\n when 'stop'\n get_start_and_end_date\n return export_events(StopEvent.find(:all, {:order => \"created_at desc\", :conditions => get_device_and_dates_with_duration_conditions}))\n when 'idle'\n get_start_and_end_date\n return export_events(IdleEvent.find(:all, {:order => \"created_at desc\", :conditions => get_device_and_dates_with_duration_conditions}))\n when 'runtime'\n get_start_and_end_date\n return export_events(RuntimeEvent.find(:all, {:order => \"created_at desc\", :conditions => get_device_and_dates_with_duration_conditions}))\n when 'maintenance'\n return export_maintenance\n end\n\n get_start_and_end_date\n event_type = params[:type] == 'geofence' ? '%geofen%' : '%'\n readings = Reading.find(:all,:order => \"created_at desc\",:offset => ((params[:page].to_i-1)*ResultCount),:limit=>MAX_LIMIT,\n :conditions => \"#{get_device_and_dates_conditions} and event_type like '#{event_type}'\")\n\n # Name of the csv file\n @filename = params[:type] + \"_\" + params[:id] + \".csv\"\n csv_string = FasterCSV.generate do |csv|\n csv << [\"Location\",\"Speed (mph)\",\"Started\",\"Latitude\",\"Longitude\",\"Event Type\"]\n readings.each do |reading|\n local_time = reading.get_local_time(reading.created_at.in_time_zone.inspect)\n csv << [reading.short_address,reading.speed,local_time,reading.latitude,reading.longitude,reading.event_type]\n end\n end\n\n send_data csv_string,\n :type => 'text/csv; charset=iso-8859-1; header=present',\n :disposition => \"attachment; filename=#{@filename}\"\n rescue\n flash[:error] = $!.to_s\n redirect_to :back\n end", "def index\n @responses = Response.paginate(:page=>params[:page],per_page:30).order('id')\n @all_responses = Response.all.order('id')\n respond_to do |format|\n format.html\n format.csv { send_data @all_responses.to_csv }\n format.xls\n end\n end", "def export(organization_name)\n CSV.open(\"/tmp/#{organization_name}.csv\", \"w\") do |csv|\n csv << [\"Name\", \"Upload Date\", \"Steps\", \"Aerobic Steps\", \"Calories\", \"Miles\", \"Device Serial\", \"Input Method\"]\n self.uploads.includes(:person).except(:order).find_each(batch_size: 6000) do |upload|\n if upload.is_device_input == 1\n input_method = \"Synced from Device\"\n else\n input_method = \"Manually Uploaded\"\n end\n csv << [\"#{upload.person.first_name} #{upload.person.last_name}\", upload.date.try(:strftime, \"%b %d %Y\"), upload.total_steps, upload.aerobic_steps, upload.calories, ('%.2f' % upload.distance), upload.device_serial, input_method]\n end\n end\n end", "def save_results\n\t\t# save search results\n\t\tCSV.open('results.csv', 'wb') {|csv|\n\t\t\t@checked_links.each {|link|\n\t\t\t\tcsv << [link[0], link[1][:res], link[1][:time]]\n\t\t\t}\n\t\t}\n\t\t# save list of external links\n\t\tCSV.open('external-links.csv', 'wb') {|csv|\n\t\t\t@external_links.each do |link|\n\t\t\t csv << [link[0], link[1][:res], link[1][:time]]\n\t\t\tend\n\t\t}\n\t\t# save list of invalid links\n\t\tCSV.open('invalid.csv', 'wb') {|csv|\n\t\t\t@error_links.each do |link|\n\t\t\t csv << link\n\t\t\tend\n\t\t}\n\tend", "def next_page_token\n report[\"nextPageToken\"]\n end", "def output_to_csv\n if (@values[:result] != nil)\n @additions << calculate_worktime\n filename = get_entry(\"Specify output file: \")\n p = Menu.data_handler.find_person_by_id(@values[:id])\n CSVWriter.output(filename, p, @values[:result], @additions)\n else\n puts \"Nothing to write right now.\"\n end\n end", "def index \n @q = Task.accessible_by(current_ability).search(params[:q])\n @tasks = @q.result(distinct: true).page params[:page]\n @accounts = Account.accessible_by(current_ability)\n @opportunities = Opportunity.accessible_by(current_ability)\n respond_to do |format|\n format.html\n format.csv { send_data @tasks.to_csv }\n format.xls { send_data @tasks.to_csv(col_sep: \"\\t\") }\n end\n end", "def index\n # no pagination for CSV export\n per_page = request.format.to_s.eql?('text/csv') ? 10000 : Person.per_page\n @results = if index_params[:q]\n Person.search index_params[:q], per_page: per_page, page: (index_params[:page] || 1)\n elsif index_params[:adv]\n Person.complex_search(index_params, per_page) # FIXME: more elegant solution for returning all records\n else\n []\n end\n @tags = index_params[:tags].blank? ? '[]' : Tag.where(name: index_params[:tags].split(',').map(&:strip)).to_json(methods: [:value, :label, :type])\n\n respond_to do |format|\n format.json { @results.map { |r| r['type'] = 'person' }.to_json }\n format.html {}\n format.csv do\n fields = Person.column_names\n fields.push('tags')\n output = CSV.generate do |csv|\n # Generate the headers\n csv << fields.map(&:titleize)\n\n # Some fields need a helper method\n human_devices = %w(primary_device_id secondary_device_id)\n human_connections = %w(primary_connection_id secondary_connection_id)\n\n # Write the results\n @results.each do |person|\n csv << fields.map do |f|\n field_value = person[f]\n if human_devices.include? f\n human_device_type_name(field_value)\n elsif human_connections.include? f\n human_connection_type_name(field_value)\n elsif f == 'tags'\n if person.tag_values.blank?\n ''\n else\n person.tag_values.join('|')\n end\n else\n field_value\n end\n end\n end\n end\n send_data output\n end\n end\n end", "def after_pagination\n end", "def main \n skip_first = true\n headers = []\n CSV.foreach('to_email.csv', :encoding => \"UTF-8\") do |row|\n if skip_first\n headers = row\n skip_first = false\n next\n else\n cur = File.open(\"#{row[5].to_s}.txt\", 'a')\n cur << \"Hi,\n\nBelow is the feedback you received from your customer:\" << \"\\n\\n\"\n headers.zip(row).each do |header, val|\n if header == \"emails\"\n tmp = val.split\n val = tmp.join(\",\")\n cur << \"Customer Feedback, Iteration 2\\n\"\n cur << val << \"\\n\\n\"\n next\n end\n if header == \"Which project is this feedback for?\"\n next\n end\n cur << header << \"\\n\"\n cur << val << \"\\n\\n\"\n end\n cur.close\n end\n end\n\nend", "def perform\n get_all_email_of_department_townhalls\n CSV.open(\"thp_free_com/db/db.csv\",\"a+\") {|csv| get_all_email_of_department_townhalls.to_a.each {|elem| csv << elem} }\n puts \"done envoyé vers un csv\"\n end", "def csv_export(config, patients, inner_batch_size)\n # Get all of the field data based on the config\n field_data = get_field_data(config, patients)\n\n # Determine selected data types for export\n data_types = CUSTOM_EXPORT_OPTIONS.keys.select { |data_type| field_data.dig(data_type, :checked).present? }\n\n files = []\n csvs = {}\n packages = {}\n\n data_types.each do |data_type|\n # Create CSV with column headers\n package = CSV.generate(headers: true) do |csv|\n csv << field_data.dig(data_type, :headers)\n csvs[data_type] = csv\n end\n packages[data_type] = package\n end\n\n # NOTE: in_batches appears to NOT sort within batches, so explicit ordering on ID is also done deeper down.\n # The reorder('') here allows this ordering done later on to work properly.\n patients.reorder('').in_batches(of: inner_batch_size).each do |batch_group|\n # Get export data in batches to decrease size of export data hash maintained in memory\n exported_data = get_export_data(batch_group.order(:id), config[:data], field_data)\n data_types.each do |data_type|\n exported_data[data_type]&.each { |record| csvs[data_type] << record }\n end\n end\n\n data_types.each do |data_type|\n files << { filename: build_export_filename(config, nil), content: StringIO.new(packages[data_type]) }\n end\n files\n end", "def index \n @phones = Phone.paginate :per_page => 20, \n :page => params[:page],\n :conditions => ['number like ?', \"%#{params[:search]}%\"], \n :order => 'updated_at DESC'\n respond_to do |format|\n format.html # index.rhtml\n format.xml { render :xml => @phones.to_xml }\n format.csv {\n @phones = Phone.find :all, :order => 'updated_at DESC'\n render :text => @phones.to_csv \n response.headers['Content-Type'] = 'text/csv; charset=utf-8; header=present'\n response.headers['Content-Disposition'] = \"attachment; filename=phones_#{Time.now.strftime(\"%d-%m-%Y-%H-%M-%S\")}.csv\"\n }\n end\n end", "def dump_results\n progress('END')\n t = total\n\n if config.live_logging || false === config.print_limit || t > config.print_limit.to_f\n log_header(true)\n @buffer.each { |message| log_line(message) }\n log_footer(t)\n end\n end", "def index\n # @registrations = Registration.all.page params[:page]\n # @people = Registration.all.page params[:page] \n result = Registry.search(params[:fromDate],\n params[:toDate],\n params[:purchaseDate],\n params[:dealerAccount],\n params[:dealerName],\n params[:flagship], \n params[:model], \n params[:serialNumber], \n params[:territory], \n params[:region],\n params[:firstName], \n params[:lastName])\n\n @people = result.page(params[:page]).per(10)\n\n # @people = Registration.find( {first_name: \"Angel\"});\n if request.xhr?\n @people = result; \n render(:print_view, :layout => false)\n return \n end\n respond_to do |format|\n \tformat.html\n \tformat.csv { \n \t\tRails.logger.debug(\"Search params: #{params[:dealerAccount]}\")\n \t\t# @people = Registry.search(params[:fromDate],params[:toDate],params[:purchaseDate],\n # params[:dealerAccount],params[:dealerName],params[:flagship], params[:model], params[:serialNumber], \n # params[:territory], params[:region],\n # params[:firstName], params[:lastName]);\n \t\tsend_data result.to_csv }\n \t# format.xls { send_data @people.to_csv(col_sep: \"\\t\") }\n end\n end", "def attach_results\n # if @asq.result && @asq.result.is_json? && @email_delivery.attach_results\n if @asq.result && @asq.result.is_json? && @email_delivery.attach_results\n attachments[@asq.get_processed_filename] = @asq.to_csv\n end\n end", "def generate_payer_bulk_data\n p \"There are no payer resources available to generate bulk data.\" if PAYERS.empty?\n\n PAYERS.each do |payer|\n NDOUTS.clear\n request = \"#{FHIR_SERVER}/fhir/InsurancePlan/#{payer[:id]}/$export\"\n output_directory = File.join(\"bulk_export\", payer[:id])\n FileUtils.mkdir_p(output_directory)\n related_formularies_id = []\n payer[:coverage].each do |coverage|\n formulary = coverage[:extension].find { |ext| ext[:url] == \"http://hl7.org/fhir/us/davinci-drug-formulary/StructureDefinition/usdf-FormularyReference-extension\" }\n unless formulary.nil?\n related_formularies_id << formulary[:valueReference][:reference].split(\"/\").last\n end\n end\n\n related_formularies = FORMULARIES.find_all { |formulary| related_formularies_id.include?(formulary[:id]) }\n related_formularies.prepend(payer)\n generate_ndjson(\"InsurancePlan\", related_formularies, output_directory)\n\n related_formularies_id.each { |id| get_related_basic_and_medicationknowledge(id, output_directory) }\n location_ids = extract_location_id_from_reference(payer[:coverageArea])\n get_related_locations(location_ids, output_directory)\n generate_export_json(output_directory, request, NDOUTS)\n end\nend", "def getCSV(restid)\n # get all ordered items of users who ordered in restaurant \"restid\"\n userItems=OrderMng.getAllUsersItemsByRestID(restid)\n csv=[]\n for userItem in userItems\n tmp=Array.new\n for item in userItem\n tmp.push(item)\n end\n csv.push(tmp)\n end\n puts(\"B4 P\")\n p csv\n p userItems\n return userItems\nend", "def index\n @memberships = @website.memberships.includes(:user).paginate(page: params[:page], per_page: 30)\n \n respond_to do |format|\n format.html\n format.json\n format.csv { send_data @memberships.to_csv(current_user.id) }\n format.js\n end\n end", "def scg_report\n @scg_csv_array = []\n get_resource_list('compute', 'storage-connectivity-groups', 'storage_connectivity_groups', name = 'display_name', id = 'id')\n @resource_id_list.each do |scgid|\n scg = rest_get(\"#{@resource_url}/storage-connectivity-groups/#{scgid}\", @token_id)\n scg_array = JSON.parse(scg)['storage_connectivity_group']\n scg_name = scg_array['display_name']\n scg_auto_add_vios = scg_array['auto_add_vios']\n scg_fc_storage_access = scg_array['fc_storage_access']\n scg_ports_per_fabric_npiv = scg_array['ports_per_fabric_npiv']\n @scg_host_list = []\n @scg_host_array = scg_array['host_list']\n @scg_host_array.each do |host|\n @scg_host_list.push(host['name'])\n end\n @scg_vios_array = scg_array['host_list'][0]['vios_list']\n @scg_vios_names = []\n @scg_vios_array.each do |vios|\n @scg_vios_names.push(vios['name'])\n end\n @scg_csv_headers = %w(SCG_Name SCG_Auto_Add_VIOs SCG_FC_Storage_Access SCG_Ports_per_Fabric SCG_Host_List SCG_VIOs_List)\n @scg_csv_array << [scg_name, scg_auto_add_vios, scg_fc_storage_access, scg_ports_per_fabric_npiv, @scg_host_list, @scg_vios_names]\n end\n end", "def print_google_map_data(options={})\n options={ header: false,\n }.merge(options)\n \n puts CSV.generate_line(@output_fields.values) if options[:header]\n \n \n @final_data.each do |record|\n if @command_options[:elders_only]\n # remove spouse name if possible to make elder easier to find..\n name = record[:name].gsub(/& \\w+ /, \"\")\n # check against 2 lists of elders in case names don't match\n if [email protected]?(name) and [email protected]?(name)\n next\n end\n end\n if record[:name].nil?\n next\n end\n \n record_values=[]\n @output_fields.each_key do |field|\n record_values.push(record[field])\n end\n line = CSV.generate_line(record_values)\n \n # print line to output\n puts line\n end\nend", "def generate_csv\n\n fields = @resource.typus_fields_for(:csv)\n\n require 'csv'\n if CSV.const_defined?(:Reader)\n # Old CSV version so we enable faster CSV.\n begin\n require 'fastercsv'\n rescue Exception => error\n raise error.message\n end\n csv = FasterCSV\n else\n csv = CSV\n end\n\n filename = Rails.root.join(\"tmp\", \"export-#{@resource.to_resource}-#{Time.now.utc.to_s(:number)}.csv\")\n\n options = { :conditions => @conditions, :batch_size => 1000 }\n\n csv.open(filename, 'w', :col_sep => ';') do |csv|\n csv << fields.keys\n @resource.find_in_batches(options) do |records|\n records.each do |record|\n csv << fields.map do |key, value|\n case value\n when :transversal\n a, b = key.split(\".\")\n record.send(a).send(b)\n when :belongs_to\n record.send(key).to_label\n else\n record.send(key)\n end\n end\n end\n end\n end\n\n send_file filename\n\n end", "def export_csv\n correct\n CSV.open('output.csv', 'a') do |csv|\n @suggestion.each_with_index{|(name,score), index|\n word = @typed_name.capitalize #typed name\n sugges = name.capitalize #suggestion name\n score = (@suggestion[name].round(3)).to_s #score of this suggestion\n years = @year #which years are inserted to dictionary\n ed = \"NA\" #which ED this name belongs to\n ed_count = \"NA\" #how many candidates fall in ED of this name\n\n # Check frequency for the words in suggestion list\n # Typed word, not in the dictionary has zero frequency\n if @working_dictionary.has_key?(name)\n freq = (@working_dictionary[name]).to_s \n else\n freq = 0\n end\n \n # Check which edit distance the word belongs to.\n\n # Check if the word is from @ed1\n if [email protected]?\n if @ed1.include?(name)\n ed = 1.to_s\n ed_count = @count_ed1.to_s\n end\n end\n\n # Check if the word is from @ed2\n if [email protected]?\n if @ed2.include?(name)\n ed = 2.to_s\n ed_count = @count_ed2.to_s\n end \n end\n\n # Build a string to insert in csv file\n if index == 0 # years are displayed only in first line for this @typed_name\n line = [word, sugges, score, freq, ed, ed_count, year] \n else\n line = [word, sugges, score, freq, ed, ed_count] \n end\n\n csv << line\n }\n csv << [] # insert blank line as last line\n end\n end", "def index\n @q = Servidor.ransack(params[:q])\n @q.sorts = 'nome'\n @servidores = @q.result.page params[:page]\n respond_to do |format|\n format.html\n format.csv do\n send_data @servidores.except(:limit, :offset).to_csv, filename: \"concentradores -#{Date.today}.csv\"\n end\n end\n end", "def index\n @q = SubTask.ransack(params[:q])\n @sub_tasks = @q.result\n\n @sub_tasks = @sub_tasks.order(id: :desc).page(params[:page]).per(10)\n respond_to do |format|\n format.html\n format.csv { send_data sub_task.to_csv(@sub_tasks), filename: \"sub_tasks-#{Date.today}.csv\" }\n end\n end", "def index\n get_paginated_promotions\n respond_to do |format|\n format.html\n format.js\n format.xls\n if(params[:a] == \"a\")\n format.csv { send_data Promotion.all.to_csv2 }\n else\n format.csv { send_data Promotion.all.to_csv }\n end\n end\n end", "def fetch_billing_results\n previous_response = nil\n begin\n page = get_page_number\n\n response = Select.fetch_billing_results(@start_timestamp, @end_timestamp,\n page, @page_size)\n unless !response.is_a?(Array)\n process_response(response)\n previous_response = response\n end\n end until !response.is_a?(Array)\n reset_page_number\n\n set_empty_last_fetch_soap_id(response, previous_response)\n end", "def generate_csv\n @project = Project.find(params[:id])\n \n content_type = if request.user_agent =~ /windows/i\n ' application/vnd.ms-excel '\n else\n ' text/csv '\n end\n \n project_net = @project.find_all_connections(friend = true, follower = false) \n \n CSV::Writer.generate(output = \"\") do |csv|\n csv << [\"DL n=\" + @project.persons.count.to_s ]\n csv << [\"format = edgelist1\"]\n csv << [\"labels embedded:\"]\n csv << [\"data:\"]\n project_net.each do |entry|\n csv << [entry[0], entry[1], \"1\"]\n end\n @project.persons.each do |person|\n csv << [person.username]\n end\n end\n send_data(output,\n :type => content_type,\n :filename => @project.name.to_s + \"_FF_SNA.csv\")\n end", "def raw_export\n return unless has_permission :can_do_billing\n case request.method\n when :get\n @page_title = 'Generate Raw Data Report'\n @from = Time.now.beginning_of_month\n @to = @from.next_month\n when :post\n from = Date.new(params[:from][:year].to_i,params[:from][:month].to_i)\n to = Date.new(params[:to][:year].to_i,params[:to][:month].to_i)\n @records = BillingCharge.find(:all,:conditions=>['billing_charges.created_at > ? and billing_charges.created_at < ?',from,to],\n :order=>['billing_charges.id'],:include=>[:pilot,:instructor,:aircraft])\n report = StringIO.new\n CSV::Writer.generate(report, ',') do |csv|\n csv << %w(Timestamp Pilot Amount Total Type Note Aircraft Aircraft_Rate Instructor Instructor_Rate Hobbs_Start Hobbs_End Tach Ground_Instruction)\n @records.each do |r|\n csv << [r.created_at, r.pilot.full_name_with_id, r.charge_amount, r.running_total, \n r.type.to_s == 'FlightRecord' ? 'flight/ground instruction' : r.type, r.notes, r.aircraft_id.nil? ? nil : r.aircraft.identifier, \n r.aircraft_rate, r.instructor_id.nil? ? nil : r.instructor.full_name_with_id,\n r.instructor_rate, r.hobbs_start, r.hobbs_end, r.tach_end, r.ground_instruction_time]\n end\n end\n\n report.rewind\n send_data(report.read,\n :type => 'text/csv; charset=iso-8859-1; header=present',\n :filename => 'report.csv')\n end\nend", "def get_character_movies_from_api(character)\n all_characters = RestClient.get('http://www.swapi.co/api/people/')\n character_hash = JSON.parse(all_characters)\n\n results = []\n still_searching = true\n\n while still_searching\n results << character_hash[\"results\"] # results pushed into results array on every loop iteration\n if character_hash[\"next\"]\n next_page = RestClient.get(character_hash[\"next\"])\n character_hash = JSON.parse(next_page)\n else\n still_searching = false\n end\n end\n\n results = results.flatten\n results = results.select {|chara| chara[\"name\"].downcase == character}\n # results is an array containing a hash\n\n if results.length > 0\n film_hash = results[0][\"films\"].map do |film_url|\n film = RestClient.get(film_url)\n film = JSON.parse(film)\n end\n end\nend", "def csv_report\n tire_cols = params[:tire] || {}\n ar_cols = params[:activerecord] || {}\n assocs_to_include = params[:assoc][:include] || {}\n params[:assoc][:max] ||= {}\n klass = model_class.constantize\n @filename = \"#{model_class.humanize}.csv\"\n\n response.headers['Content-Type'] ||= 'text/plain'\n response.headers['Content-Disposition'] = \"attachment; filename=#{@filename}\"\n response.headers['Content-Transfer-Encoding'] = 'binary'\n response.headers['Last-Modified'] = Time.now.to_s\n\n # Right, try to define a header:\n header = []\n tire_cols.keys.each { |x| header.push(x.humanize) }\n ar_cols.keys.each { |x| header.push(x.humanize) }\n assocs_to_include.keys.each do |assoc|\n if params[:assoc][:max][assoc] == 'join' # Is a has_many with only one real column\n header.push params[:assoc][assoc.to_sym].keys.first\n elsif params[:assoc][:max][assoc] # has_many\n (1 .. (params[:assoc][:max][assoc].to_i)).each do |i|\n params[:assoc][assoc.to_sym].keys.each do |k|\n header.push(\"#{assoc.singularize.humanize} #{i} #{k.humanize}\")\n end\n end\n else # has_a\n params[:assoc][assoc.to_sym].keys.each do |k| # Each key requested from the associated record\n header.push \"#{assoc.humanize} #{k.humanize}\"\n end\n end\n end\n\n results = klass.search({ per: TireSearch::INFINITY }, 1, '')\n self.response_body = Enumerator.new do |y|\n results.each_with_index do |result, i|\n y << header.to_csv if i == 0\n\n line = []\n tire_cols.keys.each { |x| line.push(result[x]) }\n\n result = result.load if ar_cols.count > 0 || assocs_to_include.keys.count > 0\n\n ar_cols.keys.each { |x| line.push(result.send(x)) } if ar_cols.count > 0\n\n assocs_to_include.keys.each do |assoc|\n related = result.send(assoc)\n if params[:assoc][:max][assoc] == 'join' # Is a has_many with only one real column\n col = params[:assoc][assoc.to_sym].keys.first\n line.push related.map { |x| x.send(col) }.join(' // ')\n elsif params[:assoc][:max][assoc]\n (0 .. (params[:assoc][:max][assoc].to_i - 1)).each do |j|\n params[:assoc][assoc.to_sym].keys.each do |k|\n line.push(related[j] ? related[j].send(k) : nil)\n end\n end\n else\n params[:assoc][assoc.to_sym].keys.each do |k| # Each key requested from the associated record\n line.push related ? related.send(k) : nil\n end\n end\n end\n y << line.to_csv\n GC.start if i % 500 == 0\n end\n end\n end", "def write_csv_report\n unless @csv.nil?\n @csvexport.write(@csv)\n end\n end", "def response_time_entries\n response_time_entries_raw = wrapper(@user).time_entries(1, harvest_project_id)\n\n # Getting the number of pages of the paginated response from projects API\n number_of_pages = response_time_entries_raw['total_pages']\n\n response_time_entries_per_project = []\n\n if number_of_pages == 1\n response_time_entries_per_project = response_time_entries_raw.dig('time_entries')\n else\n # for loop to loop through all the pages and fetch all and put into the variable response_time_entries_per_project\n\n (1..number_of_pages).each do |i|\n time_entries_raw = wrapper(@user).time_entries(i, harvest_project_id)\n\n next_array = time_entries_raw['time_entries']\n\n # add projects array to complete array\n response_time_entries_per_project += next_array\n end\n\n end\n response_time_entries_per_project\n end", "def write_csv(type, final_arr)\n csv_data = CSV.generate do |csv|\n val = []\n final_arr[0][0].each do |title|\n val << title\n end\n csv << val\n final_arr[1].each do |elem|\n elem.each do |doc|\n data = []\n json_doc = JSON.generate(doc)\n json_doc = JSON.parse(json_doc)\n final_arr[0][0].each do |key|\n key_array = key.split('.')\n data << get_nested_val(key_array,json_doc)\n end\n csv << data\n end\n end\n end\n File.write(\"#{type}_details.csv\",csv_data)\n end", "def index\n @q = Bairro.ransack(params[:q])\n @bairros = @q.result(order: :nome).page params[:bairros_page]\n respond_to do |format|\n format.html\n format.csv { send_data @bairros.except(:limit, :offset).to_csv, filename: \"bairros-#{Date.today}.csv\" }\n end\n @params = {}\n end", "def index\n @api_calls = ApiCall.search(params[:search]).order(sort_column + \" \" + sort_direction).paginate(per_page: 10,page: params[:page])\n @api_calls_temp = ApiCall.search(params[:search]).order(sort_column + \" \" + sort_direction)\n puts @api_calls\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @api_calls }\n format.csv { send_data @api_calls_temp.to_csv(session[:user_id].to_s ) }\n format.xls #{ send_data @api_calls.to_csv(col_sep: \"\\t\")}\n format.js\n\n end\n end", "def render_csv_works_response( works )\n @records = works\n headers['Content-Disposition'] = 'attachment; filename=\"work-list.csv\"'\n headers['Content-Type'] ||= 'text/csv'\n render 'csv/v1/works'\n end", "def get_harvest_users(harvest_webapp, dbg = 0)\n\n member_page = \"/team\"\n pt = get_config(\"COMMON\", \"CSVPath\")\n um = get_config(\"Harvest\", \"MUsers\")\n\n site = harvest_webapp\n\n begin\n \n # move to User list\n site.Go(member_page)\n \n # save user list to csv\n data = site.RetrieveList(\"li.manage-list-item\", method(:proc_split_list_to_array))\n\n p data if dbg\n \n # append email and dept_code\n data.each do |member|\n if (member[0] != \"Admin\")\n site.Go(\"/people/\" + member[1] + \"/edit#profile_base\")\n member.push(site.GetItem(\"#user_email\").attribute(\"value\").value.strip);\n member.push(site.GetItem(\"#user_department\").attribute(\"value\").value.strip);\n else\n member.push(\"\");\n member.push(\"\");\n end\n end\n \n # sort by dept_code\n sorted = data.sort { |a, b|\n a[3] <=> b[3]\n }\n \n # flush to file\n flush_to_csv(sorted, pt + um)\n \n p (pt + um) if dbg\n \n rescue => e\n\n p e\n p e.backtrace\n \n end\n \nend", "def index\n get_paginated_categories\n respond_to do |format|\n format.html\n format.js\n format.xls\n if(params[:a] == \"a\")\n format.csv { send_data Category.all.to_csv2 }\n else\n format.csv { send_data Category.all.to_csv }\n end\n end\n end", "def index\n profile_id = current_user.account_summary.default_profile\n @url_builders = current_user.url_builders.where(profile: profile_id).order(id: :desc).page params[:page]#UrlBuilder.all\n\n #TODO 每天取一次就好 不要每次都取\n # @url_builders.each(&:fetch_and_save_short_url_analytics)\n @url_builders.each do |ub|\n ub.fetch_and_save_short_url_analytics if((ub.url_analytics.empty? || ub.url_analytics.last.created_at < (DateTime.now - 1.hours)) && !ub.short_url.nil?)\n end\n\n ic = Iconv.new(\"big5\", \"utf-8\")\n respond_to do |format|\n format.html\n format.csv {send_data(ic.iconv(send_csv(@url_builders)))} #{ send_data @url_builders.to_csv }\n # format.xls # { send_data @products.to_csv(col_sep: \"\\t\") }\n end\n end", "def index\n @samples = Sample.approved_or_rejected.page(params[:page]).per(100)\n respond_to do |format|\n format.html\n format.csv do\n render_csv\n end\n end\n end", "def parse_results(raw_results)\n results = []\n results << ['Name', 'URL', 'Price', 'Neighborhood']\n\n raw_results.each do |result|\n link = result.css('a')[1] #mechanize library\n name = link.text.strip\n url = \"http://sfbay.craigslist.org\" + link.attributes[\"href\"].value\n price = result.search('span.price').text\n neighborhood = result.search('span.pnr').text[3..-13]\n\n puts \"This apartment is located in #{neighborhood}\"\n\n a = Apartment.new(name,url,price,neighborhood)\n\n results << [a.name, a.url, a.price, a.neighborhood]\n create_csv(results)\n end\nend", "def index\n # @articles = Article.includes(:criminal_code)\n @articles = Article.with_current_and_all_prisoner_count\n\n respond_to do |format|\n format.html\n format.json\n format.csv do\n send_data Article.to_csv,\n filename: GeneratedFile.clean_filename('articles', 'csv'),\n type: 'text/csv'\n end\n end\n end", "def each_row_for_next_chunk\n return nil if finished?\n raise \"#{self.class}: instance not prepared before running the iteration\" unless @prepared\n\n select_sql = @select_sql.present? ? @select_sql : '*'\n sql = \"SELECT #{select_sql} FROM #{data_table_name} WHERE #{data_where_scope} ORDER BY id ASC LIMIT #{Import::CHUNK_ROWS_COUNT} OFFSET #{@iteration_number * Import::CHUNK_ROWS_COUNT}\"\n pg_result = postgres.copy(\"COPY (#{sql}) TO STDOUT WITH CSV DELIMITER ','\") do |row|\n yield row\n end\n\n @iteration_number += 1\n check_if_finished\n end", "def next\n return nil unless next?\n ensure_service!\n\n gapi = @service.list_files @bucket, prefix: @prefix,\n delimiter: @delimiter,\n token: @token,\n max: @max,\n versions: @versions,\n user_project: @user_project\n File::List.from_gapi gapi, @service, @bucket, @prefix,\n @delimiter, @max, @versions,\n user_project: @user_project\n end", "def export_csv(csv_data)\n\t\tFile.write(\"kfit_partners.csv\", csv_data.map(&:to_csv).join)\n\tend", "def export_csofeed\n # Create header row #\n header = ['Record Type', 'Device Key', 'IP Addresses', 'MAC Addresses', 'System Name', 'FQDN', 'Status', 'Function', 'Runs MOTS/PRISM Apps', 'MOTS/PRISM IDs', 'Runs Non-MOTS/PRISM Apps', 'Internet Facing', 'Device Criticality', 'Device Owner', 'Operating System', 'Operating System Version', 'Administrator\\'s ATTUID', 'Support Group', 'Serial Number', 'Asset Tag Number', 'Location', 'Location CLLI', 'Comments' \"\\n\"]\n csvdoc = [header.join(',')]\n Node.all.each do |node|\n result = make_csoline(node)\n csvdoc << result.join(',') if result\n end\n fname = \"public/csvexports/csofeed_#{Time.now.strftime(\"%d%m%Y\")}.csv.gz\"\n File.open(fname, 'w') do |f|\n gz = Zlib::GzipWriter.new(f)\n gz.write csvdoc\n gz.close\n end\n end", "def report_activities\n @project = Project.find params[:project_id]\n ###\n retrieve_query\n # sort_init(@query.sort_criteria.empty? ? [['id', 'desc']] : @query.sort_criteria)\n # sort_update(@query.sortable_columns)\n \n if @query.valid?\n @limit = Setting.issues_export_limit.to_i\n\n @issue_count = @query.issue_count\n @issue_pages = Paginator.new self, @issue_count, @limit, params['page']\n @offset ||= @issue_pages.current.offset\n @issues = @query.issues(:include => [:assigned_to, :tracker, :priority, :category, :fixed_version],\n # :order => sort_clause,\n :offset => @offset,\n :limit => @limit)\n\n @iss = []\n @issues.each{|i|\n issue = Issue.find i.id\n @iss << issue\n }\n respond_to do |format|\n format.html { send_data(statuses_to_csv_activities(@iss, @project), :type => 'text/csv; header=present', :filename => 'export.csv') }\n end \n end\n ###\n end", "def do_csv_search(params, download)\n s = do_search(params.merge({:limit => self.count, :offset => 0}))\n \n # any possible 'speed up' would need to be done here:\n results = s.results.map do |obj|\n obj.search_result_format\n end\n\n headers = results.first.keys\n filename = download.filename\n user = download.user\n id = download.id\n path = \"tmp/#{id}_#{user}_#{filename}\"\n \n csv_file = CSV.open(path, \"wb\") do |csv|\n csv << headers\n results.each do |r|\n csv << r.values \n end\n end\n\n Zip::File.open(\"#{path}.zip\", Zip::File::CREATE) do |zipfile|\n zipfile.add(filename, path)\n end\n\n File.delete(path) if File.exist?(path)\n\n download.update({status: 1, filename: \"#{filename}.zip\"})\n #download.created_by.notify(\"Your download '#{download.filename}' is ready.\")\n end", "def search_results(all_pages)\n formatted_list = []\n all_pages.each do |show_hash|\n formatted_list << \"id. #{show_hash[\"id\"]} - #{show_hash[\"name\"]}\"\n end\n if formatted_list.count != 1\n self.print_search_results(formatted_list)\n else\n fetch_show_by_id(all_pages[0][\"id\"].to_s)\n end\nend", "def index\n @experiments = policy_scope Experiment.where(user_id: current_user.id).order(created_at: :desc).page(params[:page])\n respond_to do |format|\n format.html\n format.csv {\n send_data( @experiments.to_csv,\n filename: \"CO2_by_experiment_#{Time.zone.now}.csv\", \n disposition: 'inline', type: \"multipart/related\")\n }\n end\n end", "def seek #DESC by date.here\n \n args = {sort: \"-contribution_receipt_date\", api_key: API_KEY[:fec], committee_id: id, per_page: 100, last_index: @last_index, last_contribution_receipt_date: @last_date}\n json = JSONByURL.new(\"https://api.open.fec.gov/v1/schedules/schedule_a?\" + args.build_params)\n\n res = json.snag.json #fix \n\n #log page info, number results, last index retrieved\n donations = res[\"results\"]\n @record_count = res[\"pagination\"][\"count\"] if @record_count.nil? #initial population of instance variable that knows total records in dataset for user experience info\n if @save_record_info_to_db\n @committee.update(num_records_available: @record_count) #update committee with total available on first download this instance\n @save_record_info_to_db = false\n end\n\n @num_accessed += donations.count #Log how many records we have accessed so far so we don't download Nancy Pelosi's donor base from 1987 and blow our API KEY\n donations.select! {|d| d[\"is_individual\"]} #MUTATES ARRAY!!!! KEEPS ONLY WHERE FIELD is_individual = true, AVOIDING DUPLICATE RECORDS FROM INTERNAL MEMOS\n \n #build an array of hashes of 2-element hashes (:donation & :donor) to pass to save_donation method\n page = donations.map {|d| {:donation=> {amount: d[\"contribution_receipt_amount\"], date: d[\"contribution_receipt_date\"]}, :donor=> {zip: d[\"contributor_zip\"], name: d[\"contributor_name\"], street_1: d[\"contributor_street_1\"], street_2: d[\"contributor_street_2\"], employer: d[\"contributor_employer\"], state: d[\"contributor_state\"], city: d[\"contributor_city\"], occupation: d[\"contributor_occupation\"], line_number: d[\"line_number\"]}}} \n @last_index = res[\"pagination\"][\"last_indexes\"][\"last_index\"] #set pagination\n @last_date = res[\"pagination\"][\"last_indexes\"][\"last_contribution_receipt_date\"] #set pagination part 2\n \n # puts \"#{last_index} #{last_date}\"\n page.each {|item| save_donation(item)}\n pct_done = (@num_accessed.to_f / @stop_after * 100).round(1) #xx.x% format for progress downloading records per flags [flags to:do]\n puts \"#{pct_done}% complete. Downloaded #{@num_accessed} of #{@stop_after} from a total of #{@record_count} records.\"\n @num_accessed < @stop_after ? seek : @committee.update(last_date: @last_date, last_index: @last_index, num_records_downloaded: @num_accessed + (@committee.num_records_downloaded || 0)) #keep seeking.....if done, push last record retrieved into db\n # res #un-comment to view JSON data for this page\n end", "def get_csv\n CSV.generate do |csv|\n csv << ['Name','Student ID','User ID','Role','Email Address','Sections']\n get_feed[:students].each do |student|\n name = student[:last_name] + ', ' + student[:first_name]\n user_id = student[:login_id]\n student_id = student[:student_id]\n email_address = student[:email]\n role = ENROLL_STATUS_TO_CSV_ROLE[student[:enroll_status]]\n sections = sections_to_name_string(student[:sections])\n csv << [name, student_id, user_id, role, email_address, sections]\n end\n end\n end", "def sales\n @orders = Order.where(seller: current_user).order(\"created_at DESC\").paginate(:page => params[:page], :per_page => 48)\n respond_to do |format|\n format.html\n format.csv { send_data @orders.to_csv(@orders) } \n end\n end", "def each(&block)\n current_page = self\n while current_page\n current_page.results.each(&block)\n current_page = current_page.next_page\n end\n end", "def index\n @parts = Part.paginate(:page => params[:page])\n #@q = Part.ransack(params[:q])\n #@parts = @q.result.paginate(:page => params[:page])\n respond_to do |format|\n format.html\n format.csv { send_data @parts.to_csv }\n end\n end", "def get_report_list_by_next_token(next_token)\n operation('GetReportListByNextToken')\n .add('NextToken' => next_token)\n\n run\n end", "def export_contacts_to_csv\n start_time = Time.now\n \n contacts = RESULT_COLL.find()\n \n FasterCSV.open(\"../output/healthcare_sg_export.csv\", 'w') {|csv|\n csv << [\"contact_name\", \"company_name\", \"address\"]\n contacts.each do |row|\n csv << row[\"contact\"]\n end\n }\n p Time.now - start_time\nend", "def each\n current_result = self\n begin \n last_result = current_result\n current_result.elements[:results].each do |result|\n\t# The collection of refs we are holding onto could grow without bounds, so dup\n\t# the ref\n\tyield result.dup\n end\n current_result = current_result.next_page if current_result.more_pages?\n end while !last_result.equal? current_result\n end", "def write_csv(pages)\n CSV.open('pages.csv', 'w' ) do |writer|\n writer << [\"title\", \"path\", \"hero\", \"hero_tagline\", \"body\"]\n $pages.each do |c|\n writer << [c['title'], c['path'], c['hero'], c['hero_tagline'], c['body']]\n end\n end\nend", "def download_csv\n case current_user.role.name\n when 'Edutor Admin'\n usages = Usage\n when 'Institute Admin'\n usages = current_user.usages\n when 'Center Representative'\n usages = current_user.usages\n when 'Teacher'\n class_contents = current_user.class_contents\n section_students = current_user.sections.map{|section| section.students.select('id')}.flatten\n student_group_students = current_user.sections.map{|section| section.student_groups.select('id')}.flatten\n total_students = (section_students + student_group_students).uniq\n usages_ids = class_contents.map(&:uri).map{|uri|\n Usage.where('uri like ?',\"%#{uri}%\").where(:user_id=>total_students).map(&:id)\n }\n usages = Usage.where(:id=>usages_ids)\n end\n filename =\"usages_#{Date.today.strftime('%d%b%y')}\"\n csv_data = FasterCSV.generate do |csv|\n csv << Usage.csv_header\n usages.each do |c|\n csv << c.to_csv\n end\n end\n send_data csv_data, :type => 'text/csv; charset=iso-8859-1; header=present', :disposition => \"attachment; filename=#{filename}.csv\"\n end", "def export_contacts_to_csv\n start_time = Time.now\n \n contacts = RESULT_COLL.find()\n \n FasterCSV.open(\"../output/healthcare_sg_export.csv\", 'w') {|csv|\n csv << [\"contact_name\", \"country\", \"auction_time\"]\n contacts.each do |row|\n csv << row[\"contact\"]\n end\n }\n p Time.now - start_time\nend" ]
[ "0.59054226", "0.5886693", "0.57631433", "0.56273973", "0.55369157", "0.53110886", "0.5273082", "0.5225724", "0.5205569", "0.5189812", "0.51852566", "0.5183687", "0.5146786", "0.50919545", "0.5083705", "0.50778383", "0.50744", "0.5055878", "0.5038904", "0.50206846", "0.5017838", "0.49945036", "0.49716586", "0.494892", "0.49240077", "0.49051362", "0.490099", "0.4897726", "0.48862562", "0.4879672", "0.48779216", "0.48582757", "0.4840778", "0.48405167", "0.48329514", "0.4831738", "0.48258328", "0.48254675", "0.48242074", "0.48233035", "0.4819576", "0.48070633", "0.48046798", "0.48025247", "0.4798108", "0.47875196", "0.47780797", "0.4774065", "0.47723523", "0.4770657", "0.47705394", "0.476575", "0.4760388", "0.47547942", "0.47541597", "0.4748617", "0.47443053", "0.474176", "0.47386816", "0.4730627", "0.47148344", "0.4711302", "0.47090316", "0.4708378", "0.47082338", "0.47061515", "0.4697549", "0.4690701", "0.4685953", "0.46792227", "0.4677973", "0.46765715", "0.4674812", "0.4667796", "0.46654794", "0.46550062", "0.46534085", "0.46466997", "0.46419787", "0.46402133", "0.46287793", "0.46268362", "0.46244827", "0.46228182", "0.4613765", "0.46133095", "0.4608257", "0.46040845", "0.46006122", "0.45999056", "0.4595914", "0.4590049", "0.45863754", "0.45861098", "0.4582205", "0.4582046", "0.45806614", "0.4580104", "0.45713726", "0.4568047" ]
0.4776956
47
endpoint to send a friend request
def add_friend # byebug #we get user_id from jwt! user = User.find(decode_jwt(cookies.signed[:jwt])["user_id"]) #we get friend_id from frontend if !Block.where(blocker_id: user.id, blockee_id:follow_params[:user2]).empty? return render json: {error: "There was a problem! (Ya been blocked!)"} end followee = User.find(follow_params[:user2]) #insert the one way relation in db! friend_request = Follow.new(follower_id: user.id, followee_id: followee.id) if friend_request.save render json: {friend_request: followee} else render json: {error: "There was a problem!"} end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def friend_request\n\nend", "def send_friend_request\n request_response = post(\"/users/#{id}/request\")[\"response\"]\n @user = Foursquared::Response::User.new(client, request_response[\"userrequest_\"])\n end", "def send_friend_request user_id\n response = post(\"/users/#{user_id}/request\")[\"response\"]\n @user = Foursquared::Response::User.new(self,response[\"user\"])\n end", "def send_friend_request(other_user)\n friend_requests << other_user\n end", "def send_friend_request(id)\n get(\"users/#{id}\").user\n end", "def befriend\n \t#friend = User.find(params[:friend_id])\n respond_to do |format|\n \t\tif Friendship.befriend(@user, @friend)\n \t\t\tmessage = \"Friend request succeed\"\n format.json{render :json => {:message => message, :status => \"200\"}}\n \t\telse\n \t\t\terror = \"No friendship request from #{@friend.nick_name}.\"\n format.json{render :json => {:error => error, :status => \"400\"}}\n \t\tend\n end\n end", "def friend_request(id)\n deprecated('friend_request', 'send_friend_request')\n send_friend_request(id)\n end", "def friend_requests\n # Requires authorization\n raise PutioError::AuthorizationRequired if authentication_required!\n\n make_get_call('/friends/waiting-requests').friends\n end", "def add_friend\n # If there is no pending connection between persons,\n # add pendind/requested connections between them.\n # If there is already a pending connection requested from the other direction,\n # change friendship status to accepted.\n\n if (params['user_id'] == params['friend_id'])\n render_json :messages => \"Cannot add yourself to your friend.\", :status => :bad_request and return\n end\n\n if ! ensure_same_as_logged_person(params['user_id'])\n render_json :status => :forbidden and return\n end\n\n @person = Person.find_by_guid(params['user_id'])\n if ! @person\n render_json :status => :not_found and return\n end\n @friend = Person.find_by_guid(params['friend_id'])\n if ! @friend\n render_json :status => :not_found and return\n end\n\n if @person.association? or @friend.association?\n render_json :messages => \"Association users cannot have friends.\", :status => :bad_request and return\n end\n\n if @person.pending_contacts.include?(@friend) #accept if pending\n Connection.accept(@person, @friend)\n else\n unless @person.requested_contacts.include?(@friend) || @person.contacts.include?(@friend)\n Connection.request(@person, @friend) #request if didn't exist\n end\n end\n\n render_json :status => :ok\n end", "def friend_requests\n get(\"users/requests\").requests\n end", "def friends(options={})\n get('/friends', options)\n end", "def friend_request(type, options)\n options = options.clone\n\n if options[:subreddit]\n options[:r] = options[:subreddit]\n options.delete :subreddit\n end\n\n post(\"api/#{type}\", options)\n end", "def request_friend(friend)\n self.friendships.create!(friend_id: friend.id, status: 'requested')\n friend.friendships.create!(friend_id: self.id, status: 'pending')\n end", "def requests_sent\n friends = Friend.where(user_id: params[:user_id], accepted: false)\n\t render json:friends\n end", "def make_friend_with(username)\n # Requires authorization\n raise PutioError::AuthorizationRequired if authentication_required!\n\n make_post_call('/friends/%s/request' % [username]).status == \"OK\"\n end", "def create_request\n Friendship.request(@user, @friend)\n flash[:notice] = \"Friend request sent.\"\n redirect_to :back\n end", "def friend_wrapper opts = {}\n logged_in?\n params = {uh: @modhash, api_type: 'json'}\n params.merge! opts\n post('/api/friend', body: params)\n end", "def create\n\t\tFriendship::request(@user, @friend)\n\t\tUserMailer::friend_request(\n\t\t\t:user => @user,\n\t\t\t:friend => @friend,\n\t\t\t:user_url => profile_for(@user),\n\t\t\t:accept_url => url_for(:action => \"accept\", :id => @user.screen_name),\n\t\t\t:decline_url => url_for(:action => \"decline\", :id => @user.screen_name)\n\t\t).deliver_now\n\t\tflash[:notice] = \"Friend request sent.\"\n\t\tredirect_to profile_for(@friend)\n\tend", "def request_friendship_with(friend)\n\t\t\t\t\tunless is_friends_or_pending_with?(friend) || self == friend\n \t \t::Friendship.create!(:friendshipped_by_me => self, :friendshipped_for_me => friend)\n\t\t\t\t\tend\n \t end", "def invite_facebook_friends\n end", "def requests_received\n friends = Friend.where(friend_id: params[:user_id], accepted: false)\n\t render json:friends\n end", "def sent_friend_requests\n self.followed_users.where(\"friend_status = 'PENDING'\")\n end", "def friend_request_notification(user, friend)\n @user = user\n @friend = friend\n @friend_url = \"http://www.lacquerloveandlend.com/friendships/new?friend_id=#{@user.id}\"\n\n mail(to: @friend.email, subject: \"#{@user.name} wants to be friends with you on Lacquer Love&Lend!\")\n\n headers['X-MC-Track'] = \"opens, clicks_all\"\n end", "def send_friendship_request(other_user)\n friendship_requests.create!(requested_user: other_user) if !self.friends_with?(other_user)\n end", "def friends(id)\n friends_data(request(\"users/friends/#{id}.xml\", :auth => true))\n end", "def index\n # byebug\n if params[:requester] == \"sent\"\n @friend_requests = current_user.friend_requests_as_requester\n else\n @friend_requests = current_user.friend_requests_as_requested\n end\n filter_friend_requests\n end", "def accept_friend\n \tif @user.pending_friends.include?(@friend)\n Friendship.accept(@user, @friend)\n message = \"Friendship with #{@friend.nick_name} accepted!\"\n format.json{render :json => {:message => message, :status => \"200\"}}\n else\n error = \"No friendship request from #{@friend.nick_name}.\"\n format.json{render :json => {:error => error, :status => \"400\"}}\n\t\tend\n #redirect_to :back\n end", "def friend_params\n params.require(:friend).permit(:requester_id, :destiny_id, :status)\n end", "def add_friend\n # check if this friend isnt't to our list friend\n if params[:user_id].to_i == params[:friend_id].to_i \n render json: {\n message: \"Not yourself\"\n }, status: 400\n else\n @current_user = User.find(params[:user_id])\n if @current_user.friend.find_by(friend: params[:friend_id])\n render json: {\n message: \"#{User.find(params[:user_id]).name} can't be added, You are friend with this user\",\n errors: {\n error_code: \"\",\n error_description: \"\"\n }\n }, status: 401\n else\n # add friend\n puts \"Starting adding friend ...\"\n @new = @current_user.friend.new(friend: params[:friend_id], status: :pending)\n if @new.save\n render json: {\n message: \"#{User.find(params[:user_id]).name} added as friend\"\n }, status: 201\n else\n render json: {\n message: @new.errors.messages\n }, status: 401\n end\n end\n end\n\n end", "def set_friendship\n end", "def request_friendship(user_2)\n \tself.friendships.create(friend: user_2)\n end", "def add_friend_request(user_id)\n friend_suggestions.where(user_id: user_id).delete_all\n req = pending_friends.where(id: user_id).first\n if req.present?\n req.accept!\n else\n req = UserFriendRelationship.between(user_id, id).first || user_friend_relationships.create(user_to_id: user_id)\n PubSub::Publisher.new.publish_for([req.user_to], 'friend_request', {source: self.as_basic_json}, {title: full_name(false), body: 'wants to be your friend'})\n end\n # reset_cache('suggested_friends')\n end", "def create\n user = User.find(params[:user_id])\n friend_request = user.incoming_requests.build(sender_id: current_user.id)\n if friend_request.save\n if friend_request.sender == current_user\n respond_to do |format|\n format.html { redirect_back(fallback_location: root_path, notice: \"Friend request sent to #{user.name}.\") }\n format.json do\n render json: {\n request: {\n id: friend_request.id,\n name: friend_request.sender.name\n },\n message: \"Friend request sent to #{user.name}.\"\n }\n end\n end\n else\n redirect_back(fallback_location: root_path, notice: \"Friend request sent to #{user.name}.\")\n end\n else\n redirect_back(fallback_location: root_path, alert: 'Friend request could not be sent.')\n end\n end", "def send_request; end", "def friends\n\t\t@user = User.includes(:owned_friendships => :friend, :sent_requests => :requestee,\n\t\t\t\t\t\t\t :received_requests => :requestor).find(params[:id])\n\tend", "def request_friendship_with(friend)\n Friendship.create!(:friendshipped_by_me => self, \n :friendshipped_for_me => friend) unless self.is_friends_or_pending_with?(friend) || self == friend\n end", "def friend_request(user, other_user)\n @friendship = Friendship.where(other_friend_id: other_user).where(friend_id: user).first\n @user = user\n @other_user = other_user\n\n mail to: @other_user.email, subject: \"Friend request.\"\n end", "def sent_friend_requests\n Friendship.where(:sender_uid => self.uid, :accepted => false)\nend", "def is_friend\n friend_request_exists? && @friend.accepted?\n end", "def friend_request(other_user)\n\t\trequested_friends.push(other_user)\n\tend", "def add_buddies_to_friends_list\n if user_id.blank?\n render :status=>401,\n :json=>{:Message=>\"The user id cannot be blank for this api.\",\n :Response => \"Fail\",\n :Data => nil} \n end\n friend = User.find_by_id(user_id)\n friend.follow(requested_user)\n render :status=>200,\n :json=>{:Message=>\"Added #{friend.name} to buddy list!\",\n :Response => \"Success\",\n :Data => nil} \n end", "def list_pending_friend_request\n @current_user = User.find(params[:user_id])\n render json: {\n pending_friend: @current_user.friend.where(status: :pending).map do |friend|\n {\n id: friend.friend,\n name: User.find(friend.friend).name,\n avatar: \"#{request.base_url}#{Rails.application.routes.url_helpers.rails_blob_path(User.find(friend.friend).avatar, only_path: true)}\",\n status: friend.status\n }\n end\n }, status: :ok\n end", "def friend_params\n params.require(:friend).permit(:user_id, :to_id, :establish, :apply)\n end", "def create_request\n friendship = Friend.new(:user_id => params[:user_id], \n\t\t\t\t\t\t\t :friend_id => params[:friend_id])\n\t if friendship.save\n\t render text: friendship.id \n\t else\n\t render text:'-1'\n\t end\n end", "def create\n user = User.find(params[:friend_id])\n if current_user == user\n redirect_to root_path, notice: \"You can't send request to yourself\"\n return\n elsif Friendship.where(friend_id: user.id, user_id: current_user, confirm: false).exists?\n redirect_to root_path, notice: \"Friend request already sent\"\n return\n elsif Friendship.where(friend_id: current_user, user_id: user.id, confirm: false).exists?\n redirect_to root_path, notice: \"This user already sent friend request to you. Respond to it!\"\n return\n end\n @friendship = current_user.friendships.build(friend_id: user.id)\n\n respond_to do |format|\n if @friendship.save\n format.html { redirect_to root_path, notice: \"Friends request sent\" }\n format.json { render :show, status: :created, location: @friendship }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @friendship.errors, status: :unprocessable_entity }\n end\n end\n end", "def friend_params\n params[:friend]\n end", "def request_friendship (user_2)\n #self will be current user (user_1)\n self.friendships.create(friend: user_2)\n end", "def friend_requests\n friends = current_user.friends.where accepted: false\n profiles = friends.map{ |friend| Profile.find(friend.profile_id)}\n render json: profiles\n end", "def friend(action, value)\n raise ArgumentError, \"Invalid friend action provided: #{action}\" unless @@FRIENDSHIP_URIS.keys.member?(action)\n value = value.to_i unless value.is_a?(String)\n uri = \"#{@@FRIENDSHIP_URIS[action]}/#{value}.json\"\n response = http_connect {|conn| create_http_post_request(uri) }\n bless_model(Twitter::User.unmarshal(response.body))\n end", "def request_friendship(user_2)\n self.friendships.create(friend: user_2)\n end", "def sent_friend_requests\n friendships = Friendship.where(:user_id => self.id, :confirmed => false)\n users = []\n friendships.each do |friendship|\n users.push User.find friendship.friend_id\n end\n\n users\n end", "def create_friend\n\n \t\tif params[:friends].blank? || params[:friends].size != 2\n \t\t\tlogger.info(\"[FriendUserController]create_friend == params is error ,params ==#{params[:friends]}\")\n \t\t\treturn render :json => {:success => false}\n \t\tend\n \t\temail = params[:friends][0]\n \t\tfriend_email = params[:friends][1]\n\n \t\t# block updates from an email address,if they are not connected as friends, then no new friends connection can be added\n \t\tif BlockUser.exists?(:email=>email,:block_email=>friend_email) || BlockUser.exists?(:email=>friend_email,:block_email=>email)\n \t\t return render :json => {:success => false}\n \t\tend\n \t\t#if they are connected as friends,retrun success is true\n \t\tif FriendUser.exists?(:email=>email,:friend_email=>friend_email) || FriendUser.exists?(:email=>friend_email,:friend_email=>email)\n \t\t\treturn render :json => {:success => true}\n \t\tend\n\n \t\t#create a friend connection between two email addresses.\n \t\tfriend_user = FriendUser.create(:email=>email,:friend_email=>friend_email)\n\n \t\tif friend_user.save\n \t\t\tlogger.info(\"[FriendUserController]create_friend == create a friend connection between two email addresses ,params ==#{params[:friends]}\")\n \t\t\treturn render :json => {:success => true}\n \t\telse\n \t\t\tlogger.info(\"[FriendUserController]create_friend == can not create a friend connection between two email addresses,error=#{friend_user.errors.full_messages} ,params ==#{params[:friends]}\")\n \t\t\treturn render :json => {:success => false}\n \t\tend\n \t\t\n \tend", "def subscribe\n if Friend.subscribe_friend(params[:requestor], params[:target])\n render json: {\n success: true\n }\n else\n render json: {\n success: false\n }\n end\n end", "def invite_friends (owner_name, friend_name, friend_email)\n @greeting = \"Hi\"\n @owner_name=owner_name\n @friend_name=friend_name\n mail to: friend_email\n end", "def friend_params\n params.require(:friend).permit(:user_id, :friend_id, :facebook_id, :friend_confirm)\n end", "def invite_friend\n friend = params[:invite_friend]\n profile = User.find_by_id(friend[:user_id])\n\n UserMailer.send_invitation(friend, profile).deliver\n redirect_to profile_path(profile)\n end", "def confirm_follow\n req = Net::HTTP::Post.new(@uri, initheader = {'Content-Type' => 'application/json'})\n req.body = {'queue': 'USER', 'method': 'confirm_follow', 'user_id': '2', '_id': ''}.to_json\n res = Net::HTTP.start(@uri.hostname, @uri.port) do |http|\n http.request(req)\n end\n return res.body\nend", "def friend_params\n params.require(:friend).permit(:user_id, :friend_id, :status)\n end", "def send_request(req); end", "def create\n Friendship.request(@user, @friend)\n @friend = Friendship.find_by_user_id_and_friend_id(@user, @friend)\n render :json => { :success => true, :new_friend => @friend }\n end", "def friend_request(other_user)\n\tif other_user.present?\n\t\trelationship = get_relationship(other_user)\n\t\t\n\t\tif !relationship\n\t\t\trelationships.create(followed_id: other_user.id, friend_status: 'PENDING', follow1: false, follow2: false)\n\t\tend\n\tend\n end", "def friend_params\n params.require(:friend).permit(:name, :email, :phone, :instagram, :user_id)\n end", "def friend_params\n params.require(:friend).permit(:name, :address, :user_id)\n end", "def create\n friend = Friend.create user_id: current_user.id, other_id: params[:id]\n o = friend.other\n datum = {id: friend.id, email: o.email, own: true}\n reciprocal = friend.mutual\n if reciprocal\n datum[:subscribed_to_me] = reciprocal.subscribed\n datum[:mutual] = true\n datum[:other_id] = reciprocal.id\n end\n notify friend, 1\n render json: datum\n end", "def friend_params\n params.require(:friend).permit(:user_id, :friend_id)\n end", "def new\n #Find the current user and the requested friend\n @user=current_user\n friend=User.find_by_email(params[:email])\n #make sure the friend exists\n if(friend)\n #Check to see if the friendship already exists\n friendCheck=Friendship.find_by_user_id_and_friend_id(@user.id, friend.id)\n if(!friendCheck)\n #If there is no friendship between the two users, continue as normal\n @friendship = @user.friendships.build(:friend_id => friend.id)\n\n respond_to do |format|\n #Do it again for the reverse relationship (a friends with b and b friends with a are two separate relationships)\n if @friendship.save\n @friendship=friend.friendships.build(:friend_id => @user.id)\n if @friendship.save\n #Send an email to the friend so they can confirm that they want to be friends\n UserMailer.confirmation_email(@user,friend).deliver\n format.html { redirect_to @friendship, notice: 'Friendship was successfully created.' }\n format.json { render json: {:created => 'true', :exists => 'true', :friends => 'false'}}\n else\n format.html { render action: \"new\" }\n format.json { render json: {:created => 'false', :friends => 'false', :exists => 'true'}}\n end\n else\n render json: {:created => 'false', :friends => 'false', :exists => 'true'}\n end\n end\n else\n #If the friendship exist, return this fact to the app. It will notify the user.\n render json: {:friends => 'true', :exists => 'true', :created => 'false'}\n end\n else\n #If the user does not exist, let the app know.\n render json: {:friends => 'false', :exists => 'false', :created => 'false'}\n end\n end", "def create\n if current_user.friends.exists?(params[:friend_request][:receiver_id])\n redirect_to users_show_path(current_user), flash: {notice: \"You are already friend\"}\n else\n @friend_request = FriendRequest.new(friend_request_params.merge(sender_id: current_user.id))\n respond_to do |format|\n if @friend_request.save\n format.html { redirect_to @friend_request, notice: 'Friend request was successfully created.' }\n format.json { render :show, status: :created, location: @friend_request }\n else\n format.html { render :new }\n format.json { render json: @friend_request.errors, status: :unprocessable_entity }\n end\n end\n\n end\n\n\n end", "def accept_request\n @user = User.friendly.find(params[:user_id])\n @follow = Follow.find_by followable_id: current_user.id, follower_id: @user.id\n @follow.status = 1\n if @follow.save\n @user.add_notifications(\" has accepted your <strong>friend request</strong>.\", \n \" a accepté ta <strong>demande d'ami</strong>.\",\n current_user , nil, nil, Notification.type_notifications[:friend_request], nil)\n render :json => {:success => true}\n else \n render :json => {:success => false}\n end \n \n end", "def friend_request_params\n params.require(:friend_request).permit(:sender_id, :receiver_id, :status_id)\n end", "def send_request(method, params, &block); end", "def friends\n # Requires authorization\n raise PutioError::AuthorizationRequired if authentication_required!\n\n make_get_call('/friends/list').friends\n end", "def friend_params\n params.permit(:user_id, :friend_id)\n end", "def friend_params\n params.permit(:user_id, :friend_id)\n end", "def friend_params\r\n params.require(:friend).permit(:user_id, :friend_id)\r\n end", "def friend(user)\n friend_name = extract_string(user, :username)\n friend_request 'friend', :container => current_user.full_name, :name => friend_name, :type => :friend\n end", "def get_friend_id \n uri = \"https://api.twitter.com/1/friends/ids.json\" \n \n #http request for friend search start\n req = Typhoeus::Request.new(uri,\n :method =>\"get\",\n :params =>{:cursor=>\"-1\", :screen_name=>crt_user.handler}) \n #sign_request(req,uri)\n hydra = Typhoeus::Hydra.new\n hydra.queue(req)\n hydra.run\n JSON.parse(req.response.body)[\"ids\"]\n end", "def friend(note = nil)\n name = get_attribute(:name)\n body = JSON.generate(note ? { name: name, note: note } : { name: name })\n @client.request(:put, \"/api/v1/me/friends/#{name}\", body: body)\n end", "def friend(note = nil)\n name = get_attribute(:name)\n body = JSON.generate(note ? { name: name, note: note } : { name: name })\n @client.request(:put, \"/api/v1/me/friends/#{name}\", body: body)\n end", "def create\n @friend = User.find(params[:friend_id])\n if @friend.super_user?\n redirect_to(root_url)\n return\n end\n current_user.require_friend(@friend)\n msg = { resource: current_user.name+\" has just sent you a request friend\" ,\n friend_id: params[:friend_id],\n user_id: current_user.id\n } \n $redis.publish 'rt-change', msg.to_json\n redirect_to request.referrer\n \n end", "def find_friend\n\n \t\tif params[:email].blank?\n \t\t\tlogger.info(\"[FriendUserController]find_friend == email is blank !!!\")\n \t\t\treturn render :json => {:success => false}\n \t\tend\n\t\n \t\temail = params[:email]\n\n \t\t#get a list of friends\n \t\tfriend_emails = get_friend_email(email)\n\n \t\t#get a list of block friends by email\n \t\tblock_mails = BlockUser.where(:email=>email).map(&:block_email)\n\n \t\tfriend_emails = friend_emails - block_mails\n \t\t\n \t\tlogger.info(\"[FriendUserController]find_friend == friends list =======>#{friend_emails}\")\n\n \t\treturn render :json => {:success => true,:friends=>friend_emails,:count=>friend_emails.size}.to_json\n \t\t\n \tend", "def add_friend_verification_params\n if User.exists?(id: params[:user_id]) && User.exists?(id: params[:friend_id])\n # good\n else\n\n render json: {\n message: \"Unknow user or friend\"\n }, status: 401\n\n end\n end", "def my_friends\n @current_user = User.find(params[:user_id])\n render json: {\n friends: @current_user.friend.where(status: 'accept').map do |friend|\n {\n id: friend.id,\n name: User.find(friend.friend).name.upcase,\n avatar: \"#{request.base_url}#{Rails.application.routes.url_helpers.rails_blob_path(User.find(friend.friend).avatar, only_path: true)}\",\n date: friend.created_at\n }\n end\n }\n end", "def friend_request\n @friend_requests = FriendRequest.where(:friend_id=>current_user.id,:status=>\"friend request pending\")\n end", "def friends(options={})\n perform_get(\"statuses/friends.#{Twitter.format}\", options)\n end", "def friendOnline( user )\n photoRequest( user[\"uid\"] ) + raw(\"<br />\") +\n name( user )\n end", "def create\n @user.make_friend_with! params[:id]\n render_created\n end", "def pending_friend_requests_to\n \tself.friendships.where(state: \"pending\")\n end", "def create\n #@friendrequest = Friendrequest.new(params[:friendrequest])\n \n @user = User.find(params[:user_id])\n #@friend = Friend.new(params[:friend])\n @friendreq = @user.friendrequests.create(params[:friendrequest].permit(:futurefriend))\n\n respond_to do |format|\n if @friendrequest.save\n format.html { redirect_to @friendrequest, notice: 'Friendrequest was successfully created.' }\n format.json { render json: @friendrequest, status: :created, location: @friendrequest }\n else\n format.html { render action: \"new\" }\n format.json { render json: @friendrequest.errors, status: :unprocessable_entity }\n end\n end\n end", "def friendships_outgoing(options = {})\n @req.get(\"/1.1/friendships/outgoing.json\", options)\n end", "def friend_request_accepted_notification(user, friend)\n @user = user\n @friend = friend\n @friend_url = \"http://www.lacquerloveandlend.com/users/#{@friend.id}\"\n\n mail(to: @user.email, subject: \"#{@friend.name} accepted your friendship on Lacquer Love&Lend!\")\n\n headers['X-MC-Track'] = \"opens, clicks_all\"\n end", "def friend(note = nil)\n name = read_attribute(:name)\n body = JSON.generate(note ? { name: name, note: note } : { name: name })\n client.request(:put, \"/api/v1/me/friends/#{name}\", body: body)\n end", "def give_gift_friends\t\n @gift_template = GiftTemplate.find(params[:gift_template_id])\n @organization = @gift_template.organization\n @related_trinkons = @gift_template.similar_trinkons\n if (@organization.display_type == \"unbranded\")\n\t @postTitle = @gift_template.title\n else\n\t @postTitle = @organization.display_name + \" \" + @gift_template.title\n end\n\t @app_id = APP_ID\n\t @link = FACEBOOK_URL\n\n @gift_state = create_gift_state\n @redirect_uri = GIVE_CALLBACK\n\t\t\n\t current_user = IndividualUser.find(session[:individual_user_id])\n\t\t\n\t friend_list = []\n\t\n\t current_user.friends_from_api(session[:access_token]).each do |friend| \n\t friend_list << Hash[\"label\" => friend[\"name\"], \n \"value\" => friend[\"name\"], \"id\" => friend[\"id\"].to_i]\n\t end\n\t @friend_list_json = friend_list.to_json\n\t\n\t # assures the gift can be sent (see gift_template.rb, can_be_sent), and that its quantity is > 0. Provides proper error message if not. -TS April 27, 2012\n\t unless (@gift_template.can_be_sent(current_user.facebook_id) && \n @gift_template.quantity_available > 0)\n\t\t redirect_to(:action => \"show_organization\", \n :organization_id => @gift_template.organization_id)\n\t\t if @gift_template.status == \"admin_visible\"\n\t\t flash[:notice] = \"Sorry, that gift can only be sent by authorized senders.\"\n\t\t else\n\t\t flash[:notice] = \"Sorry, that gift can't be sent right now.\"\n\t end\n\t end\n end", "def friends\n @person = Person.find(params[:id]) \n @project = Project.find(params[:project_id]) \n @persons = @person.get_all_friends\n \n respond_to do |format|\n format.html # friends.html.erb\n format.xml { render :xml => @persons }\n end\n \n end", "def create\n @pending_friend_request = PendingFriendRequest.new(pending_friend_request_params)\n\n respond_to do |format|\n if @pending_friend_request.save\n format.html { redirect_to '/profile', notice: 'Friend request sent!' }\n format.json { render action: 'show', status: :created, location: @pending_friend_request }\n else\n format.html { render action: 'new' }\n format.json { render json: @pending_friend_request.errors, status: :unprocessable_entity }\n end\n end\n end", "def friend_params\n params.require(:friend).permit(:user_id, :name)\n end", "def invite_friend(opts)\n unless opts[:email] || opts[:user_id]\n raise InvalidArgumentError.new \"invite_friend hash argument must include :email or :user_id\"\n end\n translated_options = {}\n translated_options[:invitedUserEmail] = opts[:email] if opts[:email]\n translated_options[:invitedUserId] = opts[:user_id] if opts[:user_id]\n post(\"/user/#{@user_id}/friends/invitations.json\", translated_options)\n end", "def add_friend(sender_id)\n Logger.d(\"Got install referrer : sender_id:#{sender_id}\")\n \n return if sender_id.nil? or sender_id.length == 0\n\n json = {\n :id => sender_id,\n :auth_token => get(:auth_token)\n }.to_json\n\n network_post(CONFIG.get(:add_friend), nil, json, @on_api_call_failed) do |user_object|\n if is_valid_network_user_object?(user_object)\n @data = user_object\n Logger.d(\"Success in @user.add_friend => \" + user_object.to_s)\n serialiaze() # Write the object to persistent storage\n request_ui_refresh \n end\n end \n end", "def can_request\n return true if is_requested\n\n return true if both_user_verified? && !friend_request_exists?\n\n false\n end", "def request_friendship_params\n params.require(:request_friendship).permit(:message, :status, :sender_id, :recipient_id)\n end", "def approve_friend_request user_id\n response = post(\"/users/#{user_id}/approve\")[\"response\"]\n @user = Foursquared::Response::User.new(self,response[\"user\"])\n end" ]
[ "0.7959861", "0.7613023", "0.72850055", "0.70780396", "0.6974054", "0.6882777", "0.6867623", "0.6754856", "0.6735797", "0.66879493", "0.66366", "0.663658", "0.66163576", "0.66040903", "0.6525826", "0.6514932", "0.64763224", "0.6474545", "0.6456232", "0.64350164", "0.6428833", "0.63875514", "0.6368195", "0.6330509", "0.6327456", "0.6325852", "0.6320175", "0.6316257", "0.6311573", "0.62994605", "0.6253265", "0.6232369", "0.62206906", "0.62177676", "0.6192685", "0.6180167", "0.6175592", "0.6167323", "0.61620706", "0.6150475", "0.61476994", "0.61431086", "0.61423784", "0.6136769", "0.6122168", "0.61098313", "0.61074674", "0.60974306", "0.6097363", "0.6092107", "0.6087734", "0.6082792", "0.607825", "0.60744214", "0.60709924", "0.60492516", "0.6035347", "0.6026399", "0.6026018", "0.60206467", "0.60154647", "0.6000665", "0.599962", "0.59969276", "0.59904945", "0.59875333", "0.5986528", "0.5984267", "0.5983639", "0.59797466", "0.5973905", "0.5964855", "0.5964855", "0.5963989", "0.5958355", "0.59457046", "0.5942065", "0.5942065", "0.59399885", "0.5937863", "0.5934604", "0.5933797", "0.5929117", "0.5929018", "0.59118843", "0.58931535", "0.5889112", "0.5889012", "0.58836806", "0.58807844", "0.5880381", "0.58675337", "0.5861153", "0.58573425", "0.5856644", "0.5853304", "0.58492064", "0.5847699", "0.5847396", "0.58449703" ]
0.64390707
19
endpoint to accept friend request
def reply_pos puts 'aw yeah!!!!' # byebug # we've got user from cookie # we're passing in the 2nd user from follow_params[:user2] ret_errors=[] half_follow_a = Follow.new(follower_id: decode_jwt(cookies.signed[:jwt])["user_id"], followee_id: follow_params[:user2]) if !half_follow_a.save ret_errors.push("Failure in the first half of follow. ") end if !!ret_errors render json: {ret_errors: ret_errors} else render json: {friend_request: "friendship established"} end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def friend_request\n\nend", "def requests_received\n friends = Friend.where(friend_id: params[:user_id], accepted: false)\n\t render json:friends\n end", "def send_friend_request\n request_response = post(\"/users/#{id}/request\")[\"response\"]\n @user = Foursquared::Response::User.new(client, request_response[\"userrequest_\"])\n end", "def befriend\n \t#friend = User.find(params[:friend_id])\n respond_to do |format|\n \t\tif Friendship.befriend(@user, @friend)\n \t\t\tmessage = \"Friend request succeed\"\n format.json{render :json => {:message => message, :status => \"200\"}}\n \t\telse\n \t\t\terror = \"No friendship request from #{@friend.nick_name}.\"\n format.json{render :json => {:error => error, :status => \"400\"}}\n \t\tend\n end\n end", "def accept_friend\n \tif @user.pending_friends.include?(@friend)\n Friendship.accept(@user, @friend)\n message = \"Friendship with #{@friend.nick_name} accepted!\"\n format.json{render :json => {:message => message, :status => \"200\"}}\n else\n error = \"No friendship request from #{@friend.nick_name}.\"\n format.json{render :json => {:error => error, :status => \"400\"}}\n\t\tend\n #redirect_to :back\n end", "def add_friend\n # If there is no pending connection between persons,\n # add pendind/requested connections between them.\n # If there is already a pending connection requested from the other direction,\n # change friendship status to accepted.\n\n if (params['user_id'] == params['friend_id'])\n render_json :messages => \"Cannot add yourself to your friend.\", :status => :bad_request and return\n end\n\n if ! ensure_same_as_logged_person(params['user_id'])\n render_json :status => :forbidden and return\n end\n\n @person = Person.find_by_guid(params['user_id'])\n if ! @person\n render_json :status => :not_found and return\n end\n @friend = Person.find_by_guid(params['friend_id'])\n if ! @friend\n render_json :status => :not_found and return\n end\n\n if @person.association? or @friend.association?\n render_json :messages => \"Association users cannot have friends.\", :status => :bad_request and return\n end\n\n if @person.pending_contacts.include?(@friend) #accept if pending\n Connection.accept(@person, @friend)\n else\n unless @person.requested_contacts.include?(@friend) || @person.contacts.include?(@friend)\n Connection.request(@person, @friend) #request if didn't exist\n end\n end\n\n render_json :status => :ok\n end", "def requests_sent\n friends = Friend.where(user_id: params[:user_id], accepted: false)\n\t render json:friends\n end", "def send_friend_request(other_user)\n friend_requests << other_user\n end", "def is_friend\n friend_request_exists? && @friend.accepted?\n end", "def index\n # byebug\n if params[:requester] == \"sent\"\n @friend_requests = current_user.friend_requests_as_requester\n else\n @friend_requests = current_user.friend_requests_as_requested\n end\n filter_friend_requests\n end", "def friend_requests\n # Requires authorization\n raise PutioError::AuthorizationRequired if authentication_required!\n\n make_get_call('/friends/waiting-requests').friends\n end", "def send_friend_request user_id\n response = post(\"/users/#{user_id}/request\")[\"response\"]\n @user = Foursquared::Response::User.new(self,response[\"user\"])\n end", "def friend_requests\n get(\"users/requests\").requests\n end", "def accepted\n FriendRequestNotifier.accepted\n end", "def friends(options={})\n get('/friends', options)\n end", "def invite_facebook_friends\n end", "def friend_request_accepted?\n friend_request_exists? && friend.accepted?\n end", "def add_friend\n # byebug\n #we get user_id from jwt!\n user = User.find(decode_jwt(cookies.signed[:jwt])[\"user_id\"])\n #we get friend_id from frontend\n if !Block.where(blocker_id: user.id, blockee_id:follow_params[:user2]).empty?\n return render json: {error: \"There was a problem! (Ya been blocked!)\"}\n end\n\n followee = User.find(follow_params[:user2])\n #insert the one way relation in db!\n friend_request = Follow.new(follower_id: user.id, followee_id: followee.id)\n if friend_request.save\n render json: {friend_request: followee} \n else\n render json: {error: \"There was a problem!\"}\n end\n end", "def accept_request\n @user = User.friendly.find(params[:user_id])\n @follow = Follow.find_by followable_id: current_user.id, follower_id: @user.id\n @follow.status = 1\n if @follow.save\n @user.add_notifications(\" has accepted your <strong>friend request</strong>.\", \n \" a accepté ta <strong>demande d'ami</strong>.\",\n current_user , nil, nil, Notification.type_notifications[:friend_request], nil)\n render :json => {:success => true}\n else \n render :json => {:success => false}\n end \n \n end", "def create_request\n Friendship.request(@user, @friend)\n flash[:notice] = \"Friend request sent.\"\n redirect_to :back\n end", "def send_friend_request(id)\n get(\"users/#{id}\").user\n end", "def can_request\n return true if is_requested\n\n return true if both_user_verified? && !friend_request_exists?\n\n false\n end", "def block_friend\n end", "def request_friend(friend)\n self.friendships.create!(friend_id: friend.id, status: 'requested')\n friend.friendships.create!(friend_id: self.id, status: 'pending')\n end", "def friend_params\n params.require(:friend).permit(:requester_id, :destiny_id, :status)\n end", "def accept\n\t\tuser.friends << friend\n\t\tdestroy\n\tend", "def create\n\t\tFriendship::request(@user, @friend)\n\t\tUserMailer::friend_request(\n\t\t\t:user => @user,\n\t\t\t:friend => @friend,\n\t\t\t:user_url => profile_for(@user),\n\t\t\t:accept_url => url_for(:action => \"accept\", :id => @user.screen_name),\n\t\t\t:decline_url => url_for(:action => \"decline\", :id => @user.screen_name)\n\t\t).deliver_now\n\t\tflash[:notice] = \"Friend request sent.\"\n\t\tredirect_to profile_for(@friend)\n\tend", "def add_friend\n # check if this friend isnt't to our list friend\n if params[:user_id].to_i == params[:friend_id].to_i \n render json: {\n message: \"Not yourself\"\n }, status: 400\n else\n @current_user = User.find(params[:user_id])\n if @current_user.friend.find_by(friend: params[:friend_id])\n render json: {\n message: \"#{User.find(params[:user_id]).name} can't be added, You are friend with this user\",\n errors: {\n error_code: \"\",\n error_description: \"\"\n }\n }, status: 401\n else\n # add friend\n puts \"Starting adding friend ...\"\n @new = @current_user.friend.new(friend: params[:friend_id], status: :pending)\n if @new.save\n render json: {\n message: \"#{User.find(params[:user_id]).name} added as friend\"\n }, status: 201\n else\n render json: {\n message: @new.errors.messages\n }, status: 401\n end\n end\n end\n\n end", "def friend_wrapper opts = {}\n logged_in?\n params = {uh: @modhash, api_type: 'json'}\n params.merge! opts\n post('/api/friend', body: params)\n end", "def friend_request(id)\n deprecated('friend_request', 'send_friend_request')\n send_friend_request(id)\n end", "def friend_params\n params.require(:friend).permit(:user_id, :to_id, :establish, :apply)\n end", "def friend_request(type, options)\n options = options.clone\n\n if options[:subreddit]\n options[:r] = options[:subreddit]\n options.delete :subreddit\n end\n\n post(\"api/#{type}\", options)\n end", "def subscribe\n if Friend.subscribe_friend(params[:requestor], params[:target])\n render json: {\n success: true\n }\n else\n render json: {\n success: false\n }\n end\n end", "def reject_friend_request\n if remove_any_connection_between(params['user_id'], params['friend_id'])\n render_json :entry => {}\n end\n end", "def friends(id)\n friends_data(request(\"users/friends/#{id}.xml\", :auth => true))\n end", "def friend_requests\n friends = current_user.friends.where accepted: false\n profiles = friends.map{ |friend| Profile.find(friend.profile_id)}\n render json: profiles\n end", "def set_friendship\n end", "def friendships_incoming(options = {})\n @req.get(\"/1.1/friendships/incoming.json\", options)\n end", "def friends\n #get friends page\n #get json from friends page\n #parse\n []\n end", "def add_friend_request(user_id)\n friend_suggestions.where(user_id: user_id).delete_all\n req = pending_friends.where(id: user_id).first\n if req.present?\n req.accept!\n else\n req = UserFriendRelationship.between(user_id, id).first || user_friend_relationships.create(user_to_id: user_id)\n PubSub::Publisher.new.publish_for([req.user_to], 'friend_request', {source: self.as_basic_json}, {title: full_name(false), body: 'wants to be your friend'})\n end\n # reset_cache('suggested_friends')\n end", "def sent_friend_requests\n Friendship.where(:sender_uid => self.uid, :accepted => false)\nend", "def received_friend_requests\n Friendship.where(:reciever_uid => self.uid, :accepted => false)\nend", "def friends\n\t\t@user = User.includes(:owned_friendships => :friend, :sent_requests => :requestee,\n\t\t\t\t\t\t\t :received_requests => :requestor).find(params[:id])\n\tend", "def friend_params\n params[:friend]\n end", "def accept_request\n friendship = Friend.find(params[:request_id])\n\t if friendship\n\t\t friendship.accepted = true\n\t\t friendship.save\n\t\t render text: friendship.id\n\t else\n\t\t render text: '-1'\n end\n end", "def accept_friend\n\t\tbinding.pry\n\n\t\t@friendship = Friendship.where({:user_id => session[:user_id], :friend_id => params[:friend_id]}).first\n\t\t\n\t\[email protected] = true\n\n\n\t\tif @friendship.save\n\n\t\t\tredirect_to root_path\n\n\t\telse\n\n\t\t\tredirect_to profile_path_path\n\t\tend\n\tend", "def friend_request(other_user)\n\t\trequested_friends.push(other_user)\n\tend", "def friendable_params\n params.require(:friendable).permit(:sender_id, :receiver_id, :user_id, :accepted)\n end", "def sent_friend_requests\n self.followed_users.where(\"friend_status = 'PENDING'\")\n end", "def add_friend_verification_params\n if User.exists?(id: params[:user_id]) && User.exists?(id: params[:friend_id])\n # good\n else\n\n render json: {\n message: \"Unknow user or friend\"\n }, status: 401\n\n end\n end", "def create\n if current_user.friends.exists?(params[:friend_request][:receiver_id])\n redirect_to users_show_path(current_user), flash: {notice: \"You are already friend\"}\n else\n @friend_request = FriendRequest.new(friend_request_params.merge(sender_id: current_user.id))\n respond_to do |format|\n if @friend_request.save\n format.html { redirect_to @friend_request, notice: 'Friend request was successfully created.' }\n format.json { render :show, status: :created, location: @friend_request }\n else\n format.html { render :new }\n format.json { render json: @friend_request.errors, status: :unprocessable_entity }\n end\n end\n\n end\n\n\n end", "def is_requested\n request_status_pending? && @friend.requester == current_user\n end", "def accept\n friend = User.find(@friendship.friend_id)\n \n # a notification message will be delivered to the the requestor anyway;\n # it may contain a personal note, if any was supplied\n from_id = friend.id\n to_id = @friendship.user_id\n subject = friend.name + \" is now your friend!\" \n body = \"<strong><i>Personal message from #{friend.name}:</i></strong><hr/>\"\n \n if params[:accept_msg] && !params[:accept_msg].blank?\n body += ae_some_html(params[:accept_msg])\n else\n body += \"NONE\"\n end\n body += \"<hr/>\"\n\n # the message will appear as 'deleted-by-sender', because the owner of the account effectively didn't send it,\n # so there is no reason for showing this message in their 'sent messages' folder\n message = Message.new( :from => from_id, :to => to_id, :subject => subject, :body => body, :reply_id => nil, :read_at => nil, :deleted_by_sender => true )\n message.save\n \n respond_to do |format|\n if @friendship.accept!\n Activity.create(:subject => User.find(from_id), :action => 'create', :objekt => @friendship)\n flash[:notice] = 'Friendship was successfully accepted.'\n else\n flash[:error] = \"Friendship already accepted.\"\n end\n\n format.html { redirect_to user_friendships_path(current_user.id) }\n end\n end", "def new\n @friend = current_user_or_guest.friends.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render xml: @friend }\n format.json { render json: @friend }\n end\n end", "def endpoint; end", "def endpoint; end", "def endpoint; end", "def endpoint; end", "def add_buddies_to_friends_list\n if user_id.blank?\n render :status=>401,\n :json=>{:Message=>\"The user id cannot be blank for this api.\",\n :Response => \"Fail\",\n :Data => nil} \n end\n friend = User.find_by_id(user_id)\n friend.follow(requested_user)\n render :status=>200,\n :json=>{:Message=>\"Added #{friend.name} to buddy list!\",\n :Response => \"Success\",\n :Data => nil} \n end", "def accept_friend(friend)\n begin\n @roster.accept_subscription(friend)\n rescue\n @log.error \"Error accepting friend #{friend}: #{$!}\"\n end\n end", "def match_friends\n me = User.find params[:id]\n respond_to do |format|\n format.json { \n friends = me.match_friends(params[:q], params[:channel]).collect { |friend|\n name = friend.handle\n name << \" (#{friend.email})\" unless params[:channel]\n { id: friend.id.to_s, name: name }\n }\n if friends.empty? \n if params[:q].match(Devise::email_regexp)\n # A \"valid\" address goes back paired with itself\n friends = [ { id: params[:q], name: params[:q] } ]\n end\n end\n render :json => friends\n }\n end\n end", "def request_friendship_with(friend)\n\t\t\t\t\tunless is_friends_or_pending_with?(friend) || self == friend\n \t \t::Friendship.create!(:friendshipped_by_me => self, :friendshipped_for_me => friend)\n\t\t\t\t\tend\n \t end", "def create\n user = User.find(params[:user_id])\n friend_request = user.incoming_requests.build(sender_id: current_user.id)\n if friend_request.save\n if friend_request.sender == current_user\n respond_to do |format|\n format.html { redirect_back(fallback_location: root_path, notice: \"Friend request sent to #{user.name}.\") }\n format.json do\n render json: {\n request: {\n id: friend_request.id,\n name: friend_request.sender.name\n },\n message: \"Friend request sent to #{user.name}.\"\n }\n end\n end\n else\n redirect_back(fallback_location: root_path, notice: \"Friend request sent to #{user.name}.\")\n end\n else\n redirect_back(fallback_location: root_path, alert: 'Friend request could not be sent.')\n end\n end", "def has_request\n request_status_pending? && @friend.requester != current_user\n end", "def make_friend_with(username)\n # Requires authorization\n raise PutioError::AuthorizationRequired if authentication_required!\n\n make_post_call('/friends/%s/request' % [username]).status == \"OK\"\n end", "def send_request; end", "def find_friend\n\n \t\tif params[:email].blank?\n \t\t\tlogger.info(\"[FriendUserController]find_friend == email is blank !!!\")\n \t\t\treturn render :json => {:success => false}\n \t\tend\n\t\n \t\temail = params[:email]\n\n \t\t#get a list of friends\n \t\tfriend_emails = get_friend_email(email)\n\n \t\t#get a list of block friends by email\n \t\tblock_mails = BlockUser.where(:email=>email).map(&:block_email)\n\n \t\tfriend_emails = friend_emails - block_mails\n \t\t\n \t\tlogger.info(\"[FriendUserController]find_friend == friends list =======>#{friend_emails}\")\n\n \t\treturn render :json => {:success => true,:friends=>friend_emails,:count=>friend_emails.size}.to_json\n \t\t\n \tend", "def friend_params\n params.require(:friend).permit(:user_id, :friend_id, :status)\n end", "def accept_request(other_user)\n\t\treceived_relationships.find_by(friend_active_id: other_user.id).update_attribute(:accepted, true)\n\tend", "def create\n @friend = User.find(params[:friend_id])\n if @friend.super_user?\n redirect_to(root_url)\n return\n end\n current_user.require_friend(@friend)\n msg = { resource: current_user.name+\" has just sent you a request friend\" ,\n friend_id: params[:friend_id],\n user_id: current_user.id\n } \n $redis.publish 'rt-change', msg.to_json\n redirect_to request.referrer\n \n end", "def receive(request); end", "def requestee_user\n unless @friendship.requested == current_user\n flash[:warning] = \"You are not authorized.\"\n redirect_to root_path\n end\n end", "def create\n user = User.find(params[:friend_id])\n if current_user == user\n redirect_to root_path, notice: \"You can't send request to yourself\"\n return\n elsif Friendship.where(friend_id: user.id, user_id: current_user, confirm: false).exists?\n redirect_to root_path, notice: \"Friend request already sent\"\n return\n elsif Friendship.where(friend_id: current_user, user_id: user.id, confirm: false).exists?\n redirect_to root_path, notice: \"This user already sent friend request to you. Respond to it!\"\n return\n end\n @friendship = current_user.friendships.build(friend_id: user.id)\n\n respond_to do |format|\n if @friendship.save\n format.html { redirect_to root_path, notice: \"Friends request sent\" }\n format.json { render :show, status: :created, location: @friendship }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @friendship.errors, status: :unprocessable_entity }\n end\n end\n end", "def remove_friend\n return if !remove_any_connection_between(params['user_id'], params['friend_id'])\n render_json :status => :ok and return\n end", "def new\n #Find the current user and the requested friend\n @user=current_user\n friend=User.find_by_email(params[:email])\n #make sure the friend exists\n if(friend)\n #Check to see if the friendship already exists\n friendCheck=Friendship.find_by_user_id_and_friend_id(@user.id, friend.id)\n if(!friendCheck)\n #If there is no friendship between the two users, continue as normal\n @friendship = @user.friendships.build(:friend_id => friend.id)\n\n respond_to do |format|\n #Do it again for the reverse relationship (a friends with b and b friends with a are two separate relationships)\n if @friendship.save\n @friendship=friend.friendships.build(:friend_id => @user.id)\n if @friendship.save\n #Send an email to the friend so they can confirm that they want to be friends\n UserMailer.confirmation_email(@user,friend).deliver\n format.html { redirect_to @friendship, notice: 'Friendship was successfully created.' }\n format.json { render json: {:created => 'true', :exists => 'true', :friends => 'false'}}\n else\n format.html { render action: \"new\" }\n format.json { render json: {:created => 'false', :friends => 'false', :exists => 'true'}}\n end\n else\n render json: {:created => 'false', :friends => 'false', :exists => 'true'}\n end\n end\n else\n #If the friendship exist, return this fact to the app. It will notify the user.\n render json: {:friends => 'true', :exists => 'true', :created => 'false'}\n end\n else\n #If the user does not exist, let the app know.\n render json: {:friends => 'false', :exists => 'false', :created => 'false'}\n end\n end", "def invite_people\n end", "def create_friend\n\n \t\tif params[:friends].blank? || params[:friends].size != 2\n \t\t\tlogger.info(\"[FriendUserController]create_friend == params is error ,params ==#{params[:friends]}\")\n \t\t\treturn render :json => {:success => false}\n \t\tend\n \t\temail = params[:friends][0]\n \t\tfriend_email = params[:friends][1]\n\n \t\t# block updates from an email address,if they are not connected as friends, then no new friends connection can be added\n \t\tif BlockUser.exists?(:email=>email,:block_email=>friend_email) || BlockUser.exists?(:email=>friend_email,:block_email=>email)\n \t\t return render :json => {:success => false}\n \t\tend\n \t\t#if they are connected as friends,retrun success is true\n \t\tif FriendUser.exists?(:email=>email,:friend_email=>friend_email) || FriendUser.exists?(:email=>friend_email,:friend_email=>email)\n \t\t\treturn render :json => {:success => true}\n \t\tend\n\n \t\t#create a friend connection between two email addresses.\n \t\tfriend_user = FriendUser.create(:email=>email,:friend_email=>friend_email)\n\n \t\tif friend_user.save\n \t\t\tlogger.info(\"[FriendUserController]create_friend == create a friend connection between two email addresses ,params ==#{params[:friends]}\")\n \t\t\treturn render :json => {:success => true}\n \t\telse\n \t\t\tlogger.info(\"[FriendUserController]create_friend == can not create a friend connection between two email addresses,error=#{friend_user.errors.full_messages} ,params ==#{params[:friends]}\")\n \t\t\treturn render :json => {:success => false}\n \t\tend\n \t\t\n \tend", "def friend_request_exists?\n both_user_verified?\n users = Friend.user_sequence(object.id, current_user.id)\n @friend = Friend.find_by(user1_id: users[0], user2_id: users[1])\n @friend.present?\n end", "def friend_params\n params.permit(:user_id, :friend_id)\n end", "def friend_params\n params.permit(:user_id, :friend_id)\n end", "def friend_params\n params.require(:friend).permit(:user_id, :friend_id, :facebook_id, :friend_confirm)\n end", "def accept\n @from = params[:from_id]\n @to = current_user.id\n @friend_request = FriendRequest.where(to_id: @to, from_id: @from)\n Notification.where(user_id: @to, from_id: @from).destroy_all\n @f = Friendship.new(user_id: @to, friend_id: @from)\n if @f.save\n User.find(@from).friends.push(User.find(@to))\n @friend_request.destroy_all\n flash[:notice] = \"Friendship accepted\"\n redirect_to friends_find_path\n else\n flash[:alert] = \"Couldnt accept friendship\"\n redirect_to friends_find_path\n end\n end", "def get_friend_id \n uri = \"https://api.twitter.com/1/friends/ids.json\" \n \n #http request for friend search start\n req = Typhoeus::Request.new(uri,\n :method =>\"get\",\n :params =>{:cursor=>\"-1\", :screen_name=>crt_user.handler}) \n #sign_request(req,uri)\n hydra = Typhoeus::Hydra.new\n hydra.queue(req)\n hydra.run\n JSON.parse(req.response.body)[\"ids\"]\n end", "def update\n if params[:status] == \"accepted\"\n @friendship = current_user.friendships.build(:friend_id => params[:friend_id], :status => 'accepted')\n friend = User.find(params[:friend_id])\n @friendship2 = friend.friendships.build(:friend_id => params[:user_id], :status => 'accepted')\n\n respond_to do |format|\n if @friendship.save\n format.html { redirect_to @friendship, notice: 'Your Connection Request has been sent' }\n format.json { render action: 'show', status: :created, location: @friendship }\n else\n format.html { render action: 'new' }\n format.json { render json: @friendship.errors, status: :unprocessable_entity }\n end\n if @friendship2.save\n format.html { redirect_to @friendship2, notice: 'You received a friendship invitation' }\n format.json { render action: 'show', status: :created, location: @friendship2 }\n else\n format.html { render action: 'new' }\n format.json { render json: @friendship2.errors, status: :unprocessable_entity }\n end\n end\n end\n\n end", "def pending_friend_requests\n if ! ensure_same_as_logged_person(params['user_id'])\n render_json :status => :forbidden and return\n end\n render_json :entry => @user.pending_contacts\n end", "def pending_friend_requests_to\n \tself.friendships.where(state: \"pending\")\n end", "def list_pending_friend_request\n @current_user = User.find(params[:user_id])\n render json: {\n pending_friend: @current_user.friend.where(status: :pending).map do |friend|\n {\n id: friend.friend,\n name: User.find(friend.friend).name,\n avatar: \"#{request.base_url}#{Rails.application.routes.url_helpers.rails_blob_path(User.find(friend.friend).avatar, only_path: true)}\",\n status: friend.status\n }\n end\n }, status: :ok\n end", "def all_friends\n requested_friends + accepted_friends\n end", "def request; end", "def request; end", "def request; end", "def request; end", "def request; end", "def request; end", "def request; end", "def request; end", "def request; end", "def request; end", "def request; end", "def friend_request_params\n params.require(:friend_request).permit(:sender_id, :receiver_id, :status_id)\n end", "def friends(g)\n \n ip = g.ll\n @friends = @access_token.get(\"/v1/checkins?geolat=#{ip[:lat]}&geolong=#{ip[:long]}\", {'User-Agent' => \"fourrific:#{Fourrific::VERSION}\"}).body \n \n begin\n @friends = Crack::XML.parse(@friends)\n if @friends['unauthorized']\n @error = \"#{@friends['unauthorized']}. Clear your cookies & cache and try again.\"\n elsif @friends['ratelimited']\n @error = \"#{@friends['ratelimited']}. Please try again later.\"\n elsif @friends['error']\n @error = \"#{@friends['error']}\"\n else\n @friends['checkins']['checkin'].each do |checkin|\n checkin['created'] = checkin['created'].to_time.iso8601\n checkin['distance'] = (checkin['distance'].to_i / 1609.344).to_i\n end\n end \n end\n \n \n @friends\n \n end" ]
[ "0.80551887", "0.6816098", "0.6752391", "0.6730919", "0.66701365", "0.66182554", "0.65832263", "0.65568876", "0.6523307", "0.65205026", "0.648987", "0.64888775", "0.6475923", "0.63806033", "0.6377061", "0.6357955", "0.63446057", "0.62786806", "0.627442", "0.6236807", "0.62128156", "0.62062025", "0.6200655", "0.6173869", "0.61733013", "0.6147473", "0.6127315", "0.611836", "0.6109406", "0.61032104", "0.61011237", "0.6095644", "0.6094156", "0.6090665", "0.6083242", "0.6072321", "0.60620093", "0.60598475", "0.60573167", "0.60561067", "0.6054192", "0.605365", "0.6048759", "0.604368", "0.6026685", "0.60167193", "0.6007239", "0.6001319", "0.59974355", "0.59911495", "0.59738904", "0.59692574", "0.5965826", "0.59570473", "0.59473926", "0.59473926", "0.59473926", "0.59473926", "0.59431577", "0.5942768", "0.5942096", "0.5940278", "0.5939425", "0.59350973", "0.5932297", "0.59313", "0.5931194", "0.590959", "0.5904915", "0.58993626", "0.58849216", "0.58782554", "0.58690286", "0.5869006", "0.5866917", "0.5864338", "0.5864174", "0.58565694", "0.585582", "0.585582", "0.58373976", "0.58311546", "0.58288693", "0.5822822", "0.5822727", "0.5822035", "0.5821322", "0.582012", "0.5818928", "0.5818928", "0.5818928", "0.5818928", "0.5818928", "0.5818928", "0.5818928", "0.5818928", "0.5818928", "0.5818928", "0.5818928", "0.5817585", "0.58133835" ]
0.0
-1
endpoint to deny friend request
def reply_neg puts 'AWW BOO' # byebug friend_deny=Follow.find_by(follower_id: follow_params[:user2], followee_id: decode_jwt(cookies.signed[:jwt])["user_id"]) if friend_deny.destroy render json: {friend_deny: 'success'} else render json: {friend_deny: 'failure'} end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def deny_friend_request\n request_response = post(\"/users/#{id}/deny\")[\"response\"]\n @user = Foursquared::Response::User.new(client, request_response[\"user\"])\n end", "def deny_friend_request(username)\n # Requires authorization\n raise PutioError::AuthorizationRequired if authentication_required!\n\n make_post_call('/friends/%s/deny' % [username]).status == \"OK\"\n end", "def reject_friend_request\n if remove_any_connection_between(params['user_id'], params['friend_id'])\n render_json :entry => {}\n end\n end", "def deny_friend_request user_id\n response = post(\"/users/#{user_id}/deny\")[\"response\"]\n @user = Foursquared::Response::User.new(self,response[\"user\"])\n end", "def forbidden\n respond_with 403\n end", "def forbidden\n respond_with 403\n end", "def friend_request\n\nend", "def not_friends\n @requested = User.find(params[:friend_id])\n if current_user.friend?(@requested)\n flash[:success] = \"You're already friends!\"\n redirect_to @requested\n end\n end", "def forbidden!\n render_api_error!('403 Forbidden', 403)\n end", "def block_friend\n end", "def ignored?\n self.friendship_status == IGNORED\n end", "def can_request\n return true if is_requested\n\n return true if both_user_verified? && !friend_request_exists?\n\n false\n end", "def deny\n self.granted = -1\n restaurant = Restaurant.find(self.restaurant_id)\n restaurant.mark_collaborative\n end", "def bots_not_allowed\n render_403('page') if visitor_is_bot?\n end", "def not_pending\n if user.incoming_friend_requests.pluck(:user_id).include?(friend_id)\n errors.add(:friend, 'already requested friendship')\n end\n end", "def access_denied\n\n end", "def access_denied\n end", "def denied\n end", "def deny_access\n respond_to do |format|\n format.json {\n # Some schools of thought advocate the use of 404 (:not_found). See\n # http://www.bennadel.com/blog/2400-handling-forbidden-restful-requests-401-vs-403-vs-404.htm\n render json: {}, status: :unauthorized\n }\n end\n end", "def deny\n @ride = current_user.fares.find_by_id(params[:ride])\n @ride.deny(User.find_by_id(params[:passenger]))\n redirect_to user_path(current_user)\n end", "def forbidden\n head :forbidden\n end", "def decline_friend\n \tif @user.pending_friends.include?(@friend)\n Friendship.breakup(@user, @friend)\n message = \"Friendship with #{@friend.nick_name} declined\"\n format.json{render :json => {:message => message, :status => \"200\"}}\n else\n error = \"No friendship request from #{@friend.nick_name}.\"\n format.json{render :json => {:error => error, :status => \"400\"}}\n end\n #redirect_to :back\n end", "def skip_authorization; end", "def filter_self\n if @user && !current_user.can_edit?(@user)\n respond_to do |format|\n format.html {\n render :nothing => true, :status => 403\n }\n format.json {\n render :json => {:status => 'failure'}, :status => 403\n }\n end\n end\n end", "def permission_denied\n # render :status => 403, :json => {:message => \"You don't have permission to do that.\"}\n end", "def remove_friend\n end", "def pending_friend_requests\n if ! ensure_same_as_logged_person(params['user_id'])\n render_json :status => :forbidden and return\n end\n render_json :entry => @user.pending_contacts\n end", "def remove_friend\n return if !remove_any_connection_between(params['user_id'], params['friend_id'])\n render_json :status => :ok and return\n end", "def requestee_user\n unless @friendship.requested == current_user\n flash[:warning] = \"You are not authorized.\"\n redirect_to root_path\n end\n end", "def status_forbidden\n respond_to do |format|\n format.any { head :forbidden }\n end\n end", "def cancel_friend\n respond_to do |format|\n if @user.requested_friends.include?(@friend)\n Friendship.breakup(@user, @friend)\n message = \"Friendship request canceled.\"\n format.json{render :json => {:message => message, :status => \"200\"}}\n else\n error = \"No request for friendship with #{@friend.nick_name}\"\n format.json {render :json => {:error => error, :status => \"400\"}}\n end\n end\n #redirect_to :back\n end", "def is_friend\n friend_request_exists? && @friend.accepted?\n end", "def has_request\n request_status_pending? && @friend.requester != current_user\n end", "def permission_denied\n respond_to do |format|\n format.html { render :template => \"shared/forbidden\", :status => :forbidden }\n format.any { head :forbidden }\n end\n end", "def ignore_request(_team, _user)\n # stub\n end", "def requests_received\n friends = Friend.where(friend_id: params[:user_id], accepted: false)\n\t render json:friends\n end", "def deny!\n body = %(This is an unauthorised request. Your IP address has been logged and will be reported.)\n response_headers = { 'Content-Type' => 'text/plain' }\n response = Rack::Response.new(body, 400, response_headers)\n custom! response.finish\n end", "def require_authorization( message=\"You are not authorized to access this resource.\" )\n\t\tfinish_with( HTTP::FORBIDDEN, message )\n\tend", "def unauthorized_request user:current_user,filter:nil\n\t\t\tputs \"\\n\\nNo filter matches the given http method, controller, or controller method, rerouting...\" if filter.blank?\n\t\t\tputs \"\\n\\nUser #{user.email} is not authorized to #{filter.http_method.upcase}::>#{filter.controller || 'all'}.#{filter.action_name || 'all'}, rerouting...\\n\\n\" unless filter.blank?\n\t\t\trender json: \"Unauthorized Request\"\n\t\tend", "def unauthorized\n end", "def can_be_blacklisted_by?(user)\n user && user.id != self.user.id # && !user.all_friend_ids.include?(self.user.id)\n end", "def unauthenticated\n end", "def deny\n @reservation.is_denied = true\n @reservation.status = :denied\n @reservation.admin_response_time = Time.now\n respond_to do |format|\n if @reservation.update(reservation_params)\n notify_denied\n format.html { redirect_to @reservation, flash: { error: 'Reservation has been denied' } }\n format.json { render :show, status: :ok, location: @reservation }\n else\n respond_to_errors :show, format\n end\n end\n end", "def restrict_access\n head :unauthorized and return false unless current_user\n end", "def deny_user(id)\n post(\"users/#{id}/deny\").user\n end", "def forbidden\n\n render_error( :forbidden )\n\n end", "def me_or_friend?\n unless current_user.is_friends_with? @user or @user == current_user\n flash[:notice] = \"You aren't allowed to view that page.\"\n end\n end", "def whitelist\n if cannot_access_api?\n render json: [], status: :unauthorized\n end\n end", "def reject_friend(user)\n friendship = inverse_friendships.find { |friendship| friendship.user == user }\n friendship.destroy\n end", "def blacklisted?\n self.friendship_status == BLACKLISTED\n end", "def is_requested\n request_status_pending? && @friend.requester == current_user\n end", "def unblock(me: nil)\n my_id = 't2_' + (me.is_a?(User) ? user.id : @client.get('/api/v1/me').body[:id])\n # Talk about an unintuitive endpoint\n @client.post('/api/unfriend', container: my_id, name: get_attribute(:name), type: 'enemy')\n end", "def unauthorized_request(e)\n render json: { message: e.message }, status: :unauthorized\n end", "def unauthorized_request(e)\n render json: { message: e.message }, status: :unauthorized\n end", "def unblock(me: nil)\n my_id = 't2_' + (me.is_a?(User) ? user.id : client.get('/api/v1/me').body[:id])\n # Talk about an unintuitive endpoint\n client.post('/api/unfriend', container: my_id, name: read_attribute(:name), type: 'enemy')\n end", "def user_not_authorized\n respond_to do |format|\n format.json do\n render json: ErrorReport.generate( { id: [t(\"errors.messages.not_authorized\")] }, 403 ), status: :forbidden\n end\n end\n end", "def reject\n other_id=params[:other_id].to_i()\n my_id=session[:user_id]\n if my_id<other_id then\n small_id=my_id\n big_id=other_id\n elsif my_id>other_id then\n small_id=other_id\n big_id=my_id\n else\n flash[:notice] = \"There was an removing the friend.\"\n redirect_to request.referrer\n return false\n end\n #NOTE: Don't copy and reuse the delete_all method for tables whose rows are dependencies.\n if Friend.destroy_all([\"(small_id=? AND big_id=?)\",\n small_id, big_id]) then\n flash[:notice] = \"Removed friend successfully.\"\n #redirect_to request.referrer\n return true\n else\n flash[:notice] = \"There was an error removing the friend.\"\n redirect_to request.referrer\n return false\n end\n session[:number_of_friend_invites]-=1\n end", "def email_deny(params)\n http_helper.send_post_request(\"#{@url_prefix}/#{get_user_id!(params)}/email/deny\", params)\n end", "def allow_anon\n end", "def sent_friend_requests\n Friendship.where(:sender_uid => self.uid, :accepted => false)\nend", "def requests_sent\n friends = Friend.where(user_id: params[:user_id], accepted: false)\n\t render json:friends\n end", "def negative?\n self.friendship_status < 0\n end", "def status_forbidden\n @status = 403\n end", "def received_friend_requests\n Friendship.where(:reciever_uid => self.uid, :accepted => false)\nend", "def unauthorized_request(e)\n json_response({ message: e.message }, :unauthorized)\n end", "def unauthorized_request(e)\n json_response({ message: e.message }, :unauthorized)\n end", "def unauthenticated_request(params)\n do_request(params)\n end", "def unauthenticated_request(params)\n do_request(params)\n end", "def forbidden(message)\n render json: {\n errors: message,\n status: :forbidden\n }, status: 403\n end", "def add_friend_verification_params\n if User.exists?(id: params[:user_id]) && User.exists?(id: params[:friend_id])\n # good\n else\n\n render json: {\n message: \"Unknow user or friend\"\n }, status: 401\n\n end\n end", "def not_allowed! redirect = root_url\n raise Exceptional::Unauthorized.new(\"Sorry, I was could not perform the action you requested!\")\n end", "def index\n # byebug\n if params[:requester] == \"sent\"\n @friend_requests = current_user.friend_requests_as_requester\n else\n @friend_requests = current_user.friend_requests_as_requested\n end\n filter_friend_requests\n end", "def pending_friend_requests_to\n \tself.friendships.where(state: \"pending\")\n end", "def unauthorized_request(e)\n json_response({ message: e.message }, :unauthorized)\nend", "def unauthorized_request(e)\n\t\tjson_response({ message: e.message }, :unauthorized)\n\tend", "def unauthorized_request(e)\n json_response({ message: e.message }, :unauthorized)\n end", "def unblock\n user = User.find(decode_jwt(cookies.signed[:jwt])[\"user_id\"])\n #we get friend_id from frontend\n if Block.where(blocker_id: user.id, blockee_id:follow_params[:user2]).empty?\n return render json: {error: \"No block found!\"}\n end\n\n Block.where(blocker_id: user.id, blockee_id:follow_params[:user2]).destroy_all\n # byebug\n return render json: {response: \"Friend unblocked\"}\n end", "def involved_user\n unless @friendship.requester == current_user || @friendship.requested == current_user\n flash[:warning] = \"You are not authorized.\"\n redirect_to root_path\n end\n end", "def deny_access\n respond_to do |format|\n format.html {\n flash[:alert] = 'You are not authorized to perform this action'\n\n redirect_to root_path\n }\n\n format.json {\n # Some schools of thought advocate the use of 404 (:not_found). See\n # http://www.bennadel.com/blog/2400-handling-forbidden-restful-requests-401-vs-403-vs-404.htm\n render json: {}, status: :unauthorized\n }\n end\n end", "def restrict_access\n api_key = ApiKey.find_by_access_token(request.headers[\"token\"])\n head :unauthorized unless api_key \n end", "def friend_requests\n inverse_friendships.map { |friendship| friendship.user unless friendship.confirmed }.compact\n end", "def friend_requests\n inverse_friendships.map { |friendship| friendship.user unless friendship.confirmed }.compact\n end", "def pending_friend_requests_from\n \tself.inverse_friendships.where(state: \"pending\")\n end", "def accepted\n FriendRequestNotifier.accepted\n end", "def redirect_for_inactive\n #if @friendship.friend.deactivated?\n # flash[:error] = \"Invalid connection request: person deactivated\"\n # redirect_to people_url\n #end\n end", "def unauthorized\n render_json error: 'Access Not Authorized', status: :forbidden\n end", "def send_friend_request(other_user)\n friend_requests << other_user\n end", "def destroy\n @friendship.destroy\n respond_to do |format|\n format.html { redirect_to friendships_url, notice: \"Friens request denied\" }\n format.json { head :no_content }\n end\n end", "def allow\n @privacy_friend = PrivacyFriend.find(params[:id])\n @privacy_friend.allowed = 1\n \n respond_to do |format|\n if @privacy_friend.update_attributes(params[:privacy_friend])\n flash[:notice] = 'PrivacyFriend was successfully updated.'\n format.html { redirect_to('/privacy_friends/' + @privacy_friend.user2_id.to_s() + '/new') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @privacy_friend.errors, :status => :unprocessable_entity }\n end\n end\n end", "def restrict_access\n\t\tauthenticate_or_request_with_http_token do |token, options|\n\t\t\tapi_token = ApiToken.find_by_access_token(token)\n\t\t\tparam_account = self.get_account\n\t\t\t# Does the api token exist and if so,\n\t\t\t# does it have the same id as the requester supplied?\n\t\t\tapi_token && api_token.account_id == param_account.id\n\t\tend\n\tend", "def authorize_user\n @friendship = Friendship.find(params[:id], :include => [:user])\n logger.error(\"Friendship is #{@friendship.inspect}\")\n unless current_user == @friendship.friend\n flash[:error] = \"Invalid connection.\"\n redirect_to people_path\n end\n rescue ActiveRecord::RecordNotFound\n flash[:error] = \"Invalid or expired connection request\"\n redirect_to people_path\n end", "def unauthorized_request(e)\n render json: jsonData(e), status: :unauthorized\n end", "def get_non_friends\n\n userFriends = self.all_friends\n userNotFriends = User.all_except(userFriends)\n\n #user is not friend of himself, next line break do this\n userNotFriends = userNotFriends.all_except(self)\n\n return userNotFriends\n end", "def forbid_anonymous_user_param\n if is_anonymous? && params.has_key?('user')\n raise Forbidden, 'Not allowed to list other users environments, because '\\\n 'you are seen as an anonymous one'\n end\n end", "def friends_only\n friends = @photo.author.friends\n\n unless @photo.author == current_user ||\n friends.include?(current_user)\n flash[:danger] = \"Friends Only!\"\n redirect_to root_path\n end\n end", "def unfriend(id)\n post(\"users/#{id}/unfriend\").user\n end", "def accept\n\t\tuser.friends << friend\n\t\tdestroy\n\tend", "def show\n skip_authorization\n end", "def friend_request_accepted?\n friend_request_exists? && friend.accepted?\n end", "def blacklisted?(request)\n false\n end" ]
[ "0.76746804", "0.75277454", "0.7499744", "0.73901665", "0.7110458", "0.70696723", "0.70221907", "0.6910222", "0.6761475", "0.6614006", "0.6559375", "0.6533936", "0.65114635", "0.64689326", "0.64272654", "0.64236087", "0.6419965", "0.64182174", "0.64138484", "0.63842076", "0.6381744", "0.6352813", "0.63497734", "0.6308732", "0.6291101", "0.6276099", "0.6268226", "0.6267659", "0.62609845", "0.624274", "0.62422836", "0.62054694", "0.6205031", "0.6204271", "0.6183132", "0.61816233", "0.6166119", "0.6158109", "0.61342865", "0.6129117", "0.61246324", "0.6121744", "0.61181307", "0.6113848", "0.6111445", "0.61037815", "0.6101975", "0.6075966", "0.60758185", "0.60723424", "0.6068345", "0.6066984", "0.6055717", "0.6055717", "0.60454875", "0.60432065", "0.6042033", "0.60401016", "0.60389197", "0.60371304", "0.6035897", "0.6029518", "0.60099024", "0.6002579", "0.59920657", "0.59920657", "0.59796846", "0.59796846", "0.59769756", "0.5968486", "0.5960119", "0.59593946", "0.59591687", "0.59568536", "0.59552336", "0.5951629", "0.59514695", "0.59379035", "0.5935249", "0.59340787", "0.5932697", "0.5932697", "0.5928573", "0.5919411", "0.59189874", "0.59150785", "0.590437", "0.59038466", "0.59028494", "0.58939254", "0.589381", "0.58866066", "0.5885897", "0.58852786", "0.58761096", "0.5859982", "0.58536446", "0.58528453", "0.5846821", "0.58457303" ]
0.700451
7
endpoint to unblock a friend
def unblock user = User.find(decode_jwt(cookies.signed[:jwt])["user_id"]) #we get friend_id from frontend if Block.where(blocker_id: user.id, blockee_id:follow_params[:user2]).empty? return render json: {error: "No block found!"} end Block.where(blocker_id: user.id, blockee_id:follow_params[:user2]).destroy_all # byebug return render json: {response: "Friend unblocked"} end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unblock(me: nil)\n my_id = 't2_' + (me.is_a?(User) ? user.id : @client.get('/api/v1/me').body[:id])\n # Talk about an unintuitive endpoint\n @client.post('/api/unfriend', container: my_id, name: get_attribute(:name), type: 'enemy')\n end", "def unblock(me: nil)\n my_id = 't2_' + (me.is_a?(User) ? user.id : client.get('/api/v1/me').body[:id])\n # Talk about an unintuitive endpoint\n client.post('/api/unfriend', container: my_id, name: read_attribute(:name), type: 'enemy')\n end", "def remove_friend\n end", "def unfriend!(resource)\n redis.multi do\n redis.hdel(friend_list_key, resource.id)\n redis.hdel(resource.friend_list_key, self.id)\n end\n end", "def unblock\n current_user.unblock_user(@user.id)\n render json: {ok: 1}\n end", "def unblock\n @user = current_user\n @friend = User.find(params[:friend_id])\n @user.unblock @friend\n\n l = Log.new\n l.user_id_1 = @user.id\n l.user_id_2 = @friend.id\n name_1 = if @user.name.nil? then @user.email.split('@')[0] else @user.name end\n name_2 = if @friend.name.nil? then @friend.email.split('@')[0] else @friend.name end\n l.message = \"#{name_1.humanize} blocked #{name_2.humanize}\"\n l.loggingtype = 0\n l.save\n flash[:blocked] = \"#{name_2.humanize} was unblocked successfully $green\" \n \n @friend.rank = @friend.rank + 5\n @friend.save\n\n redirect_to action: 'pending'\n end", "def unfriend(id)\n post(\"users/#{id}/unfriend\").user\n end", "def unfriend(actor)\n unfollow(actor)\n actor.unfollow(self) if ActsAsActivityStream.sns_type == :custom\n end", "def unfriend\n request_response = post(\"/users/#{id}/unfriend\")[\"response\"]\n @user = Foursquared::Response::User.new(client, request_response[\"user\"])\n end", "def remove_friend\n return if !remove_any_connection_between(params['user_id'], params['friend_id'])\n render_json :status => :ok and return\n end", "def unfriend\n if relation = @user.friendships.find_by(friend_id: @friend.id)\n relation.destroy\n render json: { success: true }\n else\n render json: {message: relation.errors&.full_messages&.first}, status: 202\n end\n end", "def unfriend_wrapper opts = {}\n logged_in?\n params = { uh: @modhash, api_type: 'json'}\n params.merge! opts\n post('/api/unfriend', body: params)\n end", "def unfollow(actor)\n contact = contact_to!(actor)\n contact.update_column(:blocked, true)\n contact\n end", "def decline_friend(friend)\n begin\n @roster.decline_subscription(friend)\n rescue\n @log.error \"Error declining friend #{friend}: #{$!}\"\n end\n end", "def block_friend\n end", "def unblock!\n self.blocked = nil\n self.save\n end", "def unsubscribe\n if Friend.unsubscribe_friend(params[:requestor], params[:target])\n render json: {\n success: true\n }\n else\n render json: {\n success: false\n }\n end\n end", "def unblock_user\n SecurityService.unblock_user(params[:id])\n redirect_to '/users/', notice: \"Käyttäjän blokkaus poistettu. Hän voi taas luoda kursseja.\"\n end", "def unblock_user(id)\n delete(\"UserBlock/#{id}\")\n end", "def unfriend friend\n friendships.where( friend: friend ).first.destroy\n end", "def decline_friend\n \tif @user.pending_friends.include?(@friend)\n Friendship.breakup(@user, @friend)\n message = \"Friendship with #{@friend.nick_name} declined\"\n format.json{render :json => {:message => message, :status => \"200\"}}\n else\n error = \"No friendship request from #{@friend.nick_name}.\"\n format.json{render :json => {:error => error, :status => \"400\"}}\n end\n #redirect_to :back\n end", "def reply_neg\n puts 'AWW BOO'\n # byebug\n friend_deny=Follow.find_by(follower_id: follow_params[:user2], followee_id: decode_jwt(cookies.signed[:jwt])[\"user_id\"])\n if friend_deny.destroy\n render json: {friend_deny: 'success'}\n else\n render json: {friend_deny: 'failure'}\n end\n end", "def unfriend(other_user)\n @friendship = user.friendships.find_by(friend: other_user)\n @friendship.destroy\n end", "def destroy\n begin\n \t@block = User.find(params[:blocked][:blocked_id])\n\t\n\t\[email protected](@block)\n\t\trender json: @user, status: :accepted, location: @user\n\trescue => error\n\t\trender :json => error.message, status: :unprocessable_entity\n\tend\n end", "def unfriend(user)\n friend_name = extract_string(user, :username)\n friend_request 'unfriend', :container => current_user.full_name, :name => friend_name, :type => :friend\n end", "def unfriend user_id\n response = post(\"/users/#{user_id}/unfriend\")[\"response\"]\n @user = Foursquared::Response::User.new(self,response[\"user\"])\n end", "def reject_friend_request\n if remove_any_connection_between(params['user_id'], params['friend_id'])\n render_json :entry => {}\n end\n end", "def unmember(jid,reason=nil,&blk)\n set_affiliation 'none', jid, reason, &blk\n end", "def unblock( id, login=@login, password=@password, type='xml' )\n \n http = Net::HTTP.new('twitter.com', 80)\n data = http.start do |http_inst|\n path = \"/blocks/destroy/#{id}.#{type}\"\n req = Net::HTTP::Post.new(path)\n \n # we make an HTTP basic auth by passing the\n # username and password\n req.basic_auth login, password\n \n resp, data = http_inst.request(req)\n data\n end\n end", "def unfollow!(unfriend)\n\t\tresponse = access_token.post(\"/friendships/destroy/#{unfriend}.json\")\n\t\tcase response\n\t\twhen Net::HTTPSuccess\n\t\t\tfriend=JSON.parse(response.body)\n\t\t\traise TwitterOauth::UnexpectedResponse unless friend.is_a? Hash\n\t\t\tfriend\n\t\telse\n\t\t\traise TwitterOauth::APIError\n\t\tend\n\trescue => err\n\t\tputs \"Exception in unfollow!: #{err}\"\n\t\traise err\n\tend", "def deny_friend_request(username)\n # Requires authorization\n raise PutioError::AuthorizationRequired if authentication_required!\n\n make_post_call('/friends/%s/deny' % [username]).status == \"OK\"\n end", "def deny_friend_request\n request_response = post(\"/users/#{id}/deny\")[\"response\"]\n @user = Foursquared::Response::User.new(client, request_response[\"user\"])\n end", "def unblock_user\n current_user.unblock_user!(params[:user_id])\n redirect_to :back, notice: \"#{User.find(params[:user_id]).full_name(false)} has been successfully unblocked\"\n end", "def unban!\n MTurkUtils.unban_user self\n end", "def delete_blocked\n end", "def uninvite_no_progress(*params)\n\t\tself.each{|x| safe{x.uninvite(*params)} if x.respond_to?('uninvite')}\n\tend", "def reject_friend(user)\n friendship = inverse_friendships.find { |friendship| friendship.user == user }\n friendship.destroy\n end", "def unauthorize_member\n @user = User.find(params[:user_id])\n @group = Group.find(params[:group_id])\n auth_or_unauth_member(\"unauth\", @user, @group)\n end", "def unblock_user(user_id)\n post(\"/users/#{user_id}/unblock\")\n end", "def uninvite\n @meal = Meal.find(params[:meal_id])\n authorize @meal, :update?\n temp_user = @meal.invited_users.find(params[:user_id])\n @meal.invited_users.delete(temp_user)\n redirect_to @meal\n end", "def decline_request\n friendship = Friend.find(params[:request_id])\n if friendship\n\t\t friend_id = friendship.friend_id\n\t friendship.destroy\n\t\t render text: friend_id\n\t else\n\t render text: '-1' \n\t end \n end", "def uninvite\n if @@invitedFriends.include? params[:uId].to_i\n @user = User.find(params[:uId].to_i)\n if @@invitedFriends.delete(params[:uId].to_i)\n render json: {error: false, message: @user.name+\" un inveited from order\" }\n else\n render json: {error: true, message: \"can't un invite \"[email protected]+\" to order\" }\n end\n else\n render json: {error: true, message: @user.name+\"is not invited to order\" }\n end\n end", "def conference_unmute(params)\n path = @version + '/Conference/Member/Unmute/'\n method = 'POST'\n return request(path, method, params)\n end", "def unblock_user(user, blocked_user)\n username = Digest::MD5.hexdigest(user.id.to_s)\n blocked_username = Digest::MD5.hexdigest(blocked_user.id.to_s)\n begin\n response = RestClient.delete(\"#{DOMAIN}/#{ORG}/#{APP}/users/#{username}/blocks/users/#{blocked_username}\",\n \"Authorization\" => \"Bearer #{access_token}\",\n :content_type => :json,\n :accept => :json\n )\n if response.code == 200\n p response\n\n body = JSON.parse(response.body)\n p body\n end\n rescue => e\n puts e.response\n end\n end", "def deny_friend_request user_id\n response = post(\"/users/#{user_id}/deny\")[\"response\"]\n @user = Foursquared::Response::User.new(self,response[\"user\"])\n end", "def destroy\n request = FriendshipRequest.find(params[:id])\n request.destroy\n if request.sender == current_user \n respond_to do |format|\n format.html { redirect_back(fallback_location: root_path, notice: \"Friend request to #{request.recipient.name} cancelled.\") }\n format.json do\n render json: {\n unrequest: {\n id: request.recipient.id,\n name: request.sender.name\n },\n message: \"Friend request to #{request.recipient.name} cancelled.\"\n }\n end\n end\n else\n redirect_back(fallback_location: root_path, notice: \"Friend request from #{request.sender.name} rejected.\")\n end\n end", "def unadmin(jid,reason=nil,&blk)\n set_affiliation 'member', jid, reason, &blk\n end", "def unblock!\n BlockedObject.unblock_page!(self)\n end", "def unblock(from_id, to_id, scope = Amico.default_scope_key)\n return if from_id == to_id\n\n Amico.redis.multi do\n Amico.redis.zrem(\"#{Amico.namespace}:#{Amico.blocked_key}:#{scope}:#{from_id}\", to_id)\n Amico.redis.zrem(\"#{Amico.namespace}:#{Amico.blocked_by_key}:#{scope}:#{to_id}\", from_id)\n end\n end", "def unblock_user!(_user_id)\n blocked_users_relationships.where(user_id: _user_id).destroy_all\n end", "def remove_reverse_friendship\n Friendship.delete_all :user_id => self.friend_id, :friend_id => self.user_id\n end", "def unfriendship\n if params[:id]\n current_user.follow_ids.delete(params[:id])\n @user = User.find(params[:id]).del_follower(current_user)\n @user.save\n current_user.save\n end\n\n respond_to do |format|\n format.html { redirect_to :back }\n format.js\n end\n end", "def destroy\n\t\t@ignore = Friend.where(:user_id => params[:id])\n\t\[email protected]\n\t\tredirect_to @user\n\tend", "def remove_user_ban(data); end", "def remove_user_ban(data); end", "def destroy\n @pending_friend_request.destroy\n respond_to do |format|\n format.html { redirect_to '/profile?id=%s' % [session[:user_id]] }\n format.json { head :no_content }\n end\n end", "def unban(jid,reason=nil,&blk)\n set_affiliation 'none', jid, reason, &blk\n end", "def disconnect\n request_user.offline! if request_valid?\n render :nothing => true\n end", "def cancel_friend\n respond_to do |format|\n if @user.requested_friends.include?(@friend)\n Friendship.breakup(@user, @friend)\n message = \"Friendship request canceled.\"\n format.json{render :json => {:message => message, :status => \"200\"}}\n else\n error = \"No request for friendship with #{@friend.nick_name}\"\n format.json {render :json => {:error => error, :status => \"400\"}}\n end\n end\n #redirect_to :back\n end", "def unban!\n self.update_attribute(:banned, false)\n end", "def destroy(&block)\n delete(member_url) do |response, json|\n if block\n block.call response, json\n end\n end\n end", "def unblocked\n where blocked: false\n end", "def accept\n\t\tuser.friends << friend\n\t\tdestroy\n\tend", "def destroy\n @friendrefuse = Friend.find(params[:id])\n @friendrefuse.destroy\n redirect_to hub_path\n end", "def unblock(blocker_blog, blockee_blog=nil, **options)\n validate_options([:blocked_tumblelog, :anonymous_only], options)\n options[:blocked_tumblelog] ||= blockee_blog\n delete(blog_path(blocker_blog, 'blocks'), options)\n end", "def can_unblock?(friendable)\n blocked? && self.blocker_id == friendable.id && friendable.class.to_s == Amistad.friend_model\n end", "def destroy_block(user)\n delete \"blocks/destroy/#{user}\"\n end", "def destroy\n @friendship = current_user.friendships.find_by_id(params[:id])\n @friendship = current_user.inverse_friendships.find_by_id(params[:id]) unless @friendship.present? \n @friendship.destroy\n respond_to do |format|\n format.html { redirect_to user_friend_url(current_user), notice: 'Friendship was successfully Removed.' }\n format.json { head :no_content }\n end\n end", "def destroy_arbitrary_friendship\n user_1 = current_admin.users.find(params[:user_1])\n user_2 = current_admin.users.find(params[:user_2])\n Friendship.destroy_arbitrary_friendship(user_2, user_1 )\n puts \"=============================================================\"\n redirect_to :back, :notice => \"#{user_1.username} totally hates #{user_2.username} and vice-versa, DE-FRIENDSHIP COMPLETE\"\n end", "def destroy\n user = User.find(params[:id])\n begin\n current_user.friends.delete(user)\n user.friends.delete(current_user)\n rescue\n end\n\n respond_to do |format|\n format.html { redirect_to '/manage/friends' }\n end\n end", "def removeFriend(user)\n method = 'removeFriend'\n type = Boolean\n args = {:user=>user}\n return_object type,method,args,true\n end", "def unfollow\n fs = Friendship.where(user_id: current_user, friend_id: params[:id]).first\n if fs\n fs.destroy\n redirect_to user_url(params[:id]), notice: \"This user was removed from your list of friends.\"\n else\n flash.alert = \"Something went wrong. You weren't following that user.\"\n redirect_to users_url\n end\n end", "def stop_being_friends(user, target)\n transaction do\n begin\n find_friendship(target, user, ACCEPTED).update_attribute(:status, PENDING)\n find_friendship(user, target, ACCEPTED).destroy\n rescue Exception\n return false\n end\n end\n true\n end", "def friend_request\n\nend", "def drop_friend(user)\n if MuckFriends.configuration.enable_following\n Friend.revert_to_follower(self, user)\n else\n Friend.stop_being_friends(self, user)\n end\n end", "def destroy\n destroy_friendship_sent_request()\n respond_to do |format|\n format.html { redirect_to(:back, :notice => \"Friendship request from #{params[:id]} was rejected\") }\n format.xml { head :ok }\n end\n end", "def remove_from_friends_list(email)\n send_contact_command email, 'RML', '1'\n end", "def destroy\n @friendship = Friendship.find(params[:id])\n @friend = @friendship.friendshipped_for_me == @user ? @friendship.friendshipped_by_me : @friendship.friendshipped_for_me\n\n flash[:notice] =\n if @user.is_friends_with?(@friend)\n \"You are no longer a friend of #{@friend.login}\"\n else\n \"You are no longer an admirer of #{@friend.login}\"\n end\n\n @friendship.destroy_or_unaccept(@user)\n redirect_to user_path(@friend)\n\n rescue ActiveRecord::RecordNotFound\n flash[:notice] = nil\n redirect_to user_path(@user)\n end", "def destroy\n @friendship = Friendship.find(params[:id])\n @friendship_mutual = Friendship.where(:user_id => @friendship.friend_id).first\n @friendship.destroy \n @friendship_mutual.destroy\n respond_to do |format|\n format.html { redirect_to root_url }\n format.json { head :no_content }\n end\n end", "def destroy\n\t @friendable = Friendable.where(from_id: [current_user, params[:id]]).where(to_id: [current_user, params[:id]]).last\n\t @friendable.destroy\n\t flash[:notice] = \"Removed friendship.\"\n\t redirect_to :back\n\t end", "def destroy\n current_user.end_friendship!(@relationship.friend)\n respond_to do |format|\n format.html { redirect_to relationships_url }\n end\n end", "def stop_being_friends(user, target)\n return false if user.blank? || target.blank?\n transaction do\n friend = self.find(:first, :conditions => {:inviter_id => target.id, :invited_id => user.id})\n friend.destroy if friend\n friend = self.find(:first, :conditions => {:inviter_id => user.id, :invited_id => target.id})\n friend.destroy if friend\n end\n true\n end", "def unsubscribe(&blk)\n pres = connection.presence_stanza('to'=>jid.bare, 'type' => 'unsubscribe')\n connection.send_stanza pres, &blk\n end", "def destroy\n @friend.destroy\n render json: {}\n end", "def destroy\n another_user = User.find params[:user_id]\n current_user.friends.delete(another_user)\n redirect_to users_path\n end", "def delete_friendship_with(friend)\n \t friendship(friend).destroy if self.is_friends_or_pending_with?(friend)\n \t end", "def destroy\n @friendship.destroy\n respond_to do |format|\n format.html { redirect_to friendships_url, notice: \"Friens request denied\" }\n format.json { head :no_content }\n end\n end", "def disinvite_member\n assignment = Assignment.find(params[:id])\n membership = StudentMembership.find(params[:membership])\n disinvited_student = membership.user\n membership.delete\n membership.save\n m_logger = MarkusLogger.instance\n m_logger.log(\"Student '#{current_user.user_name}' cancelled invitation for \" +\n \"'#{disinvited_student.user_name}'.\")\n flash_message(:success, I18n.t('student.member_disinvited'))\n redirect_to action: :student_interface, id: assignment.id\n end", "def destroy\n @friend = Friend.find(params[:id])\n @friend.destroy\n respond_with(@friend)\n end", "def revoke\n raise \"Implement me!\"\n end", "def unban_user(token, server_id, user_id)\n request(\n __method__,\n :delete,\n \"#{api_base}/guilds/#{server_id}/bans/#{user_id}\",\n Authorization: token\n )\n end", "def destroy\n REDIS.srem REDIS_SET, @user.id\n self.class.broadcast\n end", "def unblock\n card_id = params[:card_id]\n subscription = UserSubscription.get_my_paused_subscription(current_user, params[:id])\n\n if subscription.present?\n unblocked_subscription = subscription.resume_subscription(card_id)\n subscription.unblock_subscription\n end\n\n if unblocked_subscription.present?\n result = { key: \"success\", message: \"#{unblocked_subscription} has been unblocked Successfully. Please wait...\"}\n else\n result = { key: 'error', message: \"Sorry, you are not authorized for this subscription.\" }\n end\n render json: result.to_json\n end", "def reject\n other_id=params[:other_id].to_i()\n my_id=session[:user_id]\n if my_id<other_id then\n small_id=my_id\n big_id=other_id\n elsif my_id>other_id then\n small_id=other_id\n big_id=my_id\n else\n flash[:notice] = \"There was an removing the friend.\"\n redirect_to request.referrer\n return false\n end\n #NOTE: Don't copy and reuse the delete_all method for tables whose rows are dependencies.\n if Friend.destroy_all([\"(small_id=? AND big_id=?)\",\n small_id, big_id]) then\n flash[:notice] = \"Removed friend successfully.\"\n #redirect_to request.referrer\n return true\n else\n flash[:notice] = \"There was an error removing the friend.\"\n redirect_to request.referrer\n return false\n end\n session[:number_of_friend_invites]-=1\n end", "def delete_friend(other_user)\t\t\n\t\tif requested_friends.include?(other_user) \n\t\t\t\trequested_friends.delete(other_user)\n\t\telsif received_friends.include?(other_user) && received_relationships.find_by(friend_active_id: other_user.id).accepted?\n\t\t\t\treceived_friends.delete(other_user)\n\t\tend\n\tend", "def destroy\n\n @plant = Plant.find(params[:id])\n @friendship = @plant.friendships.find_by_friend_id(params[:friend_id])\n\n if @friendship.destroy\n flash[:notice] = \"Removed friendship.\"\n redirect_to request.referrer\n else\n flash[:error] = \"Unable to remove friend\"\n redirect_to request.referrer\n end\n end", "def blacklist(ip)\n send_req({a: :ban, key: ip})\n end", "def untrust!\n MTurkUtils.untrust_user self\n end", "def destroy\n @invite_friend = InviteFriend.find(params[:id])\n @invite_friend.destroy\n\n respond_to do |format|\n format.html { redirect_to invite_friends_url }\n format.xml { head :ok }\n end\n end", "def unprotected(&block)\n begin\n @unprotected = true\n block.call\n ensure\n @unprotected = false\n end\n end" ]
[ "0.7945566", "0.7932808", "0.7219855", "0.7083792", "0.69793594", "0.6964526", "0.6870644", "0.68424195", "0.67375034", "0.6691993", "0.66788363", "0.66154087", "0.6588053", "0.6587975", "0.657924", "0.65479064", "0.654257", "0.6537734", "0.6511017", "0.65061164", "0.6487089", "0.6472717", "0.646293", "0.6460342", "0.643699", "0.6420227", "0.6413964", "0.64085525", "0.6406165", "0.63535386", "0.6333594", "0.6332807", "0.63314486", "0.6320795", "0.63060576", "0.6289572", "0.6272151", "0.6272051", "0.62604547", "0.62583244", "0.6182955", "0.61768275", "0.6175121", "0.6133091", "0.61120284", "0.60975087", "0.6073503", "0.6024121", "0.60078037", "0.60031176", "0.60013276", "0.59905905", "0.598084", "0.59458363", "0.59458363", "0.59344196", "0.5919833", "0.59063596", "0.59037435", "0.5895829", "0.5879132", "0.58649313", "0.58607376", "0.58557564", "0.5844847", "0.58241314", "0.5803345", "0.5797371", "0.5788445", "0.57807875", "0.57639", "0.5754665", "0.5747166", "0.57457155", "0.5741836", "0.57319725", "0.5728957", "0.5725855", "0.5725613", "0.5720233", "0.5712071", "0.5707554", "0.57053626", "0.5694806", "0.5691716", "0.56876385", "0.5670158", "0.5662811", "0.5662051", "0.5660631", "0.5659227", "0.5656092", "0.5654833", "0.56534094", "0.5652855", "0.56353575", "0.56352824", "0.56345713", "0.563293", "0.56269145" ]
0.77708054
2
endpoint to return count of friends
def number_for_profile # byebug # @user=User.where(username: count_params[:username]) @user = User.find(decode_jwt(cookies.signed[:jwt])["user_id"]) numfollowers=Follow.where(followee_id: @user.id).count numblocked=Block.where(blockee_id: @user.id).count render json: {followers: numfollowers, blocked: numblocked} end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_friend_count\n response = twitter_user.request(:get, configatron.api_call_url + \"account/totals.json\")\n \n if response.is_a?(Net::HTTPSuccess)\n body = JSON.parse(response.body)\n body[\"friends\"]\n end\n end", "def get_counts\n #1 - @friend_count -> gets the number of current friends you have.\n @friend_count = current_user.active_friends.size\n \n #2 - @pending_count -> gets the number of pending friend requests you have.\n @pending_count = current_user.pending_friend_requests_to.map(&:friend).size\n end", "def friends_count\n @_raw_friend_ids.try(:count) || @_raw_friend_hashes.try(:count) || redis.hlen(friend_list_key)\n end", "def total_friends\n (friend_ids + inverse_friend_ids).count\n end", "def total_friends\n self.invited(false).count + self.invited_by(false).count\n end", "def call\n res = client.get('/api/rest/v1/users.json?action=count')\n res['data']['count']\n end", "def update_friends_count\n # registered user or not registered user\n if self.email\n self.update!({ :friends_count => self.friends.count })\n else\n self.update!({ :friends_count => UserUserLink.where(:friend_id => self.f_id).count })\n end\n end", "def list\n @emails = Friend.friends(params[:email])\n\n unless @emails.empty?\n render json:{\n success: true,\n friends: @emails,\n count: @emails.count\n }\n else\n render json: {\n success: false,\n friends: [],\n count: 0\n }\n end\n end", "def number_of_followers\n return_list_of_followers.size\n end", "def count_people(query={})\n self.simple_client.get(\"/api/v1/people/count?#{query.to_query}\")[\"count\"]\n end", "def number_of_followees\n return_list_of_followed_users.size\n end", "def following_users_count\n following_users.count\n end", "def count_user\n count = 0\n @f_net.each do |followees|\n count += 1 unless !followees or followees.empty?\n end\n count\n end", "def my_count\n count = my_followers.count\n end", "def followees_count\n follow_count_for_a(:followee)\n end", "def friends\n # Requires authorization\n raise PutioError::AuthorizationRequired if authentication_required!\n\n make_get_call('/friends/list').friends\n end", "def user_count\n respond_with json_response('user_count', User.active.count)\n end", "def show\n #@friendship = Friendship.where(fitpartner_user: params[:id])\n @friendship = Friendship.where(:fitpartner_id => params[:id], :user_id => current_user.id).count\n end", "def followers_count\n follow_count_for_a(:follower)\n end", "def total_users\n users.count\n end", "def friends_list\n @user = User.find_by(email: params[:email])\n if @user.present?\n @friends = @user.friends.page(params[:page]).per(params[:per_page]).distinct\n render json: @friends, each_serializer: Api::V1::UsersSerializer, meta: pagination_dict(@friends)\n else\n render json: {message: 'User Not Found'}, status: 404\n end\n end", "def user_count\n users.count\n end", "def fanscnt\n \tobject.user_location_favs.size\n\n end", "def followers_count\n\t\tself.followers.count\n\tend", "def all_friends\n requested_friends + accepted_friends\n end", "def count_edge\n count = 0\n @f_net.each_with_index do |followees, follower|\n count += followees_of(follower).count\n end\n count\n end", "def number_of_people\n\t\tusers.length\n\tend", "def friends\n @friends ||= neighbours.count{|sq| sq.alive?}\n end", "def friends(options={})\n perform_get(\"statuses/friends.#{Twitter.format}\", options)\n end", "def friends(options={})\n get('/friends', options)\n end", "def users\n \tobject.users.count\n end", "def Listfollowers\n follower= Following.where(\"followingid =:followingId\",{followingId:getUserId[0]['userId']}).count\n render json: {follower:follower}, status: :ok\n end", "def get_no_of_users\n\t\tusers.select(:id).count\n\tend", "def list\n user = User.find_by_email params[:email]\n if user\n friendship = Friendship.where user: user\n render json: {success: true, friends: friendship.map {|f| f.friend.email}, count: friendship.count}\n else\n render json: {message: \"email not found\"}\n end\n end", "def count_likes\n self.likers(Profile).length\n end", "def count\n call_client(:count)\n end", "def unread_count( params={} )\n unread_count = get_connections(\"unread_count\", params)\n return map_connections unread_count, :to => Facebook::Graph::Generic\n end", "def get_counter\n conversations = (Conversation.where(buyer_id: @current_user.id).all + Conversation.where(seller_id: @current_user.id).all)\n total = 0\n conversations.each do |conversation|\n count = conversation.messages.count - conversation.buyer_marker if conversation.buyer_id == @current_user.id\n count = conversation.messages.count - conversation.seller_marker if conversation.seller_id == @current_user.id\n total = total + count\n end\n render status: 200, json: {unread_count: total}\n end", "def count_follower\n\t\t#relationships.count - count_followed\n\n\tend", "def count_meetings\n # TODO respond with xml, only if valid session, otherwise return nil\n team = Team.find_by_id( params[:id] )\n if team\n render json: team.meetings.collect{|row| row.id}.uniq.size\n else\n render json: 0\n end\n end", "def pref_friends\n render :nothing => true and return unless request.post?\n\n @friends = []\n\n current_user.all_friends.each do |user|\n user_js = { :url => user.url, :fs => user.friendship_status, :href => user_url(user), :count => nil }\n\n lve = user.last_viewed_entries_count.to_i\n uec = user.entries_count_for(current_user)\n user_js[:count] = (uec - lve).abs if lve != uec\n @friends << user_js\n end\n\n render :json => @friends.to_json\n end", "def likes\n likers(User).count\n end", "def getCount()\n\t\t#begin\n\t\t\tcounter = 0\n\t\t\[email protected] do |username|\n\t\t\t\tcounter = counter + TwitterClient.user(username).followers_count\n\t\t\tend\n\t\t\tcounter\n\t\t#rescue\n\t\t#\t-1\n\t\t#end\n\tend", "def following_count\n sql = \"SELECT count(guid) FROM buddy WHERE relationship=#{Model::Buddy::FOLLOWING} or relationship=#{Model::Buddy::BOTH};\";\n result = @db.execute(sql)\n result[0][0].to_i\n end", "def Listfollowing\n following= Following.where(\"userid =:userId\",{userId:getUserId[0]['userId']}).count\n render json: {following:following}, status: :ok\n end", "def common\n @emails = Friend.common_friends(params[:friends])\n\n unless @emails.empty?\n render json: {\n success: true,\n friends: @emails,\n count: @emails.count\n }\n else\n render json: {\n success: false,\n friends: [],\n count: 0\n }\n end\n end", "def index\n @user = current_user\n render json: @user.friends\n end", "def refresh_friends_count\n update_attributes(\n followers_count: follower_tracker.target_count,\n following_count: following_tracker.target_count\n )\n end", "def followers_count\n @target.followers_count.to_i\n end", "def interested_users_count\n self.users.size\n end", "def count_item\n count = 0\n @g_net.each do |followers|\n count += 1 unless !followers or followers.empty?\n end\n count\n end", "def quick_get_attendance_count\n @count = TimetabledSession.find(params[:session_id]).attendees.count\n respond_to do |format|\n format.html\n format.json {render json: @count}\n end\n end", "def follower_count\n sql = \"SELECT count(guid) FROM buddy WHERE relationship=#{Model::Buddy::FOLLOWER} or relationship=#{Model::Buddy::BOTH};\";\n result = @db.execute(sql)\n result[0][0].to_i\n end", "def count_meetings\n team = Team.find_by_id( params[:id] )\n if team\n render( json: team.meetings.collect{|row| row.id}.uniq.size )\n else\n render( json: 0 )\n end\n end", "def index\n @friends = current_user.friends\n end", "def friends\n # @@neo = Neography::Rest.new\n friends = self.trackers & self.tracking\n end", "def index\n @relationships = current_user.friends\n end", "def likes_count\n self.likes.size\n end", "def number_of_members()\n return members().count\n end", "def friends(options={})\n limit = options[:limit] || 5000\n user.friends(:limit => limit)\n end", "def user_count; end", "def friend_requests\n friends = current_user.friends.where accepted: false\n profiles = friends.map{ |friend| Profile.find(friend.profile_id)}\n render json: profiles\n end", "def following_profiles_count\n stream_ids = favorites.streams.pluck(:favable_id).uniq\n return Stream.where(:id => stream_ids).pluck(:profile_id).uniq.count\n end", "def fetch_facebook_friends\n\t\treturn fetch_facebook_friends_or_subscribers_or_subscribedto(\"friends\")\n\tend", "def fb_friends\n graph = facebook()\n facebook().get_connection('me','friends') if graph\n end", "def friends(options={})\n options = {\n :count => 5000,\n :offset => 0,\n :fields => \"domain\"\n }.merge(options)\n\n fetch_all_method_items(:fetch_friends, options)\n end", "def friends\n active_friends + passive_friends\n end", "def message_count( params={} )\n message_count = get_connections(\"message_count\", params)\n return map_connections message_count, :to => Facebook::Graph::Generic\n end", "def count(params = {})\n @api.get(\"api/count.json\", params)\n end", "def mutual_friends(options={})\n response = user.friends.getMutual(options)\n\n return {} unless response.is_a?(Array)\n \n response.inject({}) {|h, a| h.merge!(a[\"id\"].to_s => a[\"common_count\"]) }\n end", "def member_count\n attach_info\n @member_count\n end", "def getMatchCount\n\t\trender json: Match.count(:user1 => params[:twitter_id])\n\tend", "def followers\n json[\"entry_data\"][\"ProfilePage\"].first[\"graphql\"][\"user\"][\"edge_followed_by\"][\"count\"]\n end", "def index\n @friends = @member.friends\n end", "def get_friend_id \n uri = \"https://api.twitter.com/1/friends/ids.json\" \n \n #http request for friend search start\n req = Typhoeus::Request.new(uri,\n :method =>\"get\",\n :params =>{:cursor=>\"-1\", :screen_name=>crt_user.handler}) \n #sign_request(req,uri)\n hydra = Typhoeus::Hydra.new\n hydra.queue(req)\n hydra.run\n JSON.parse(req.response.body)[\"ids\"]\n end", "def num_followers\n follower_ids = Following.where(:followed_id => self.id).pluck(:follower_id)\n follower_ids.size - 1 #don't count yourself\n end", "def count\n @battles_count = Battle.all\n respond_to do |format|\n format.html\n format.json\n end\n end", "def index\n \n @total_users = User.all.count\n\n end", "def index\n @friends = @user.friends.paginate(:page => params[:page],\n :per_page => RASTER_PER_PAGE)\n end", "def friend_requests\n # Requires authorization\n raise PutioError::AuthorizationRequired if authentication_required!\n\n make_get_call('/friends/waiting-requests').friends\n end", "def subscribers_count\n response = connection_wrapper {\n @faraday.get '/subscribers_count/', token_hash\n }\n response.body['result']\n end", "def follow_count\n\t\t\t\tFollow.active.for_follower( self ).count\n \t\t\tend", "def index\n @friends = current_user.friend\n end", "def friends\n #get friends page\n #get json from friends page\n #parse\n []\n end", "def member_count\n if @members.nil?\n url = open(\"#{base_url}/memberslistxml\", {:proxy => true})\n REXML::Document.new(url.read).root.elements['memberCount'].text.to_i\n else\n @members.size\n end\n end", "def users_count\n @attributes[:users_count]\n end", "def index\n if logged_in?\n @friend_id = params[:friend_id]\n\n @user = facebook_data_about '/me'\n @likes = facebook_data_about '/me/likes', :as => \"data\"\n @friends = facebook_data_about '/me/friends', :as => \"data\"\n\n if @friend_id\n @friend_id = params[:friend_id]\n @friend = facebook_data_about \"/#{@friend_id}\"\n @friends_likes = facebook_data_about \"/#{@friend_id}/likes\", :as => \"data\"\n end\n\n if @friends_likes\n @intersection = intersection_of(@likes, @friends_likes)\n end\n end\n end", "def friends\n return friendships_confirmed + inverse_friendships_confirmed\n end", "def friends\n begin\n if @@friends.size == 25 then @@friends.clear end # clear cache at 25 users\n unless @@friends[uid]\n @@friends[uid] = Set.new\n url = GRAPH_API[0] + uid + GRAPH_API[1] + oauth_token\n data = JSON.parse(open(url).read)\n data[\"friends\"][\"data\"].each { |h| @@friends[uid].add h[\"id\"] }\n @@friends[uid]\n else\n @@friends[uid]\n end\n rescue => e\n p e\n end\n end", "def fb_friends\n fb_user = FbGraph::User.new('me', :access_token => self.access_token)\n fb_user.friends\n end", "def count_of_associated_requests\n self.requests.functional.count\n end", "def count_results\n # TODO respond with xml, only if valid session, otherwise return nil\n team = Team.find_by_id( params[:id] )\n if team\n render json: ( team.meeting_individual_results.count + team.meeting_relay_results.count )\n else\n render json: 0\n end\n end", "def count_followed\n\t\t#relationships.where(follower_id: self.id).count\n\tend", "def index\n render_index @user.friends\n end", "def all_friends\n friends friendships\n end", "def followers_count(id, scope = Amico.default_scope_key)\n Amico.redis.zcard(\"#{Amico.namespace}:#{Amico.followers_key}:#{scope}:#{id}\")\n end", "def find_friend\n\n \t\tif params[:email].blank?\n \t\t\tlogger.info(\"[FriendUserController]find_friend == email is blank !!!\")\n \t\t\treturn render :json => {:success => false}\n \t\tend\n\t\n \t\temail = params[:email]\n\n \t\t#get a list of friends\n \t\tfriend_emails = get_friend_email(email)\n\n \t\t#get a list of block friends by email\n \t\tblock_mails = BlockUser.where(:email=>email).map(&:block_email)\n\n \t\tfriend_emails = friend_emails - block_mails\n \t\t\n \t\tlogger.info(\"[FriendUserController]find_friend == friends list =======>#{friend_emails}\")\n\n \t\treturn render :json => {:success => true,:friends=>friend_emails,:count=>friend_emails.size}.to_json\n \t\t\n \tend", "def get_friends\n @person = Person.find_by_guid(params['user_id'])\n if ! @person\n render_json :status => :not_found and return\n end\n\n if params['sortBy'] == \"status_changed\"\n params['sortOrder'] ||= \"ascending\"\n @friends = @person.contacts.all\n @friends.sort!{|a,b| sort_by_status_message_changed(a, b, params['sortOrder']) }\n else\n @friends = @person.contacts\n end\n @friends.filter_paginate!(params[:per_page], params[:page]){true}\n @friends.collect! { |p| p.to_hash(@user, @client)}\n render_json :entry => @friends, :size => @friends.count_available and return\n end", "def display_count_for(model)\n if model.kind_of?(Collection)\n contains_count_for_friendlier_id(model.friendlier_id)\n else\n member_count_for_friendlier_id(model.friendlier_id)\n end\n end", "def cult_population\n my_followers.count\n end" ]
[ "0.8007224", "0.7925994", "0.76246786", "0.75055397", "0.7488557", "0.7056454", "0.6957432", "0.69227356", "0.6920694", "0.68696636", "0.68534654", "0.6825913", "0.68071926", "0.67635906", "0.669968", "0.667672", "0.6559546", "0.6542103", "0.6507705", "0.64816487", "0.64571214", "0.6447875", "0.6446815", "0.6411517", "0.6395101", "0.63782126", "0.6360399", "0.6351923", "0.63510793", "0.6338654", "0.6338014", "0.63358176", "0.63256526", "0.631274", "0.63124335", "0.63102144", "0.6308517", "0.63069487", "0.630151", "0.6285344", "0.6258726", "0.6256954", "0.6254918", "0.62388486", "0.62383044", "0.62374794", "0.6229902", "0.6219022", "0.6213821", "0.6210522", "0.6205755", "0.6203248", "0.6194523", "0.6185942", "0.6166076", "0.6150618", "0.6145795", "0.61433566", "0.6140963", "0.6140178", "0.61364275", "0.61310834", "0.6125523", "0.6125258", "0.6118765", "0.61176836", "0.6112884", "0.61103964", "0.61057484", "0.6102613", "0.6097078", "0.6092163", "0.60798246", "0.60709906", "0.60673535", "0.60660887", "0.6064317", "0.60596883", "0.60592735", "0.6053119", "0.6046984", "0.6041172", "0.6038999", "0.60291743", "0.6027372", "0.60265666", "0.60239416", "0.60034096", "0.60002244", "0.5995159", "0.59905", "0.59898204", "0.5984987", "0.59832686", "0.5982122", "0.59817725", "0.59811956", "0.59784436", "0.5977716", "0.59769934" ]
0.6785197
13
GET /events GET /events.json
def under_construction end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def events\n response = self.class.get('/v1/events.json')\n response.code == 200 ? JSON.parse(response.body) : nil\n end", "def get_events\n Resources::Event.parse(request(:get, \"Events\"))\n end", "def get_events()\n @client.make_request(:get, @client.concat_user_path(\"#{CALL_PATH}/#{id}/events\"))[0]\n end", "def index\n #returns all events from eventbrite API, need to change to pull from her endpoint\n @eventList = Event.retrieve_all_events params\n render json: @eventList, status: 200\n end", "def get_events\n response = request(:get, \"/devmgr/v2/events\")\n #status(response, 200, 'Failed to get current events from server')\n #JSON.parse(response.body)\n response\n end", "def index\n @events = Event.all\n render json: @events, status: 200\n end", "def index\n @events = Event.all\n render json: @events\n end", "def index\n @events = current_user.events\n\n render json: @events\n end", "def index\n @events = Event.find(:all)\n respond_to do |format|\n format.html\n format.json\n end\n end", "def index\n @events = Event.all\n\n render json: @events\n end", "def index\n @event = Event.all\n render json: @event\n end", "def index\n @events = Event.live\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.live\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n respond_to do |format|\n format.html\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.all\n respond_to do |format|\n format.html \n format.json do\n render :json => {events: @events}\n end\n end\n end", "def index\n response = { events: Event.all }\n respond_to do |format|\n format.json { render json: response.to_json }\n format.html { render :index }\n end\n end", "def get_events\n if @user.uuid.present?\n @events = @user.events.active_events.page(params[:page])\n paginate json: @events, per_page: params[:per_page]\n elsif @user.uuid == \"guest\"\n @events = Com::Nbos::Events::Event.active_events.where(tenant_id: @user.tenant_id)\n render json: @events\n else\n render :json => {messageCode: \"bad.request\", message: \"Bad Request\"}, status: 400\n end\n end", "def index\n\t\t@events = Event.all.order('created_at desc')\n\n\t\trespond_to do |format|\n\t\t\tformat.html\n\t\t\tformat.json { render :json => @events }\n\t\tend\n\tend", "def index\n @events = Event.all\n respond_to do |format|\n format.html \n format.json \n end\n end", "def get_events(args)\n\tapi_url = \"#{@base_url}/#{args[:collection]}/#{args[:key]}/events/#{args[:event_type]}\"\n\tdo_the_get_call( url: api_url, user: @user )\nend", "def events(project_id, options = {})\n get \"projects/#{project_id}/events\", options\n end", "def index\n @events = current_user.events\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def show\n @event = Event.find(params[:id])\n render json: @event\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n \n @events = current_user.events\n \n \n respond_to do |format|\n format.html {}\n format.json { render json: Event.events_to_json(@events) }\n end\n end", "def events\n data[\"events\"]\n end", "def events\n url = 'https://api.artic.edu/api/v1/exhibitions?limit=35'\n\n res = RestClient.get(url)\n JSON.parse(res)\nend", "def index\n @events = @calendar.events.all\n respond_to do |format|\n format.html # index.html.erb\n format.json { render :json => @events }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render :json => @events }\n end\n end", "def index\n\t\t@events = current_user.events\n\n\t\trespond_to do |format|\n\t\t\tformat.html # index.html.erb\n\t\t\tformat.json { render json: @events }\n\t\tend\n\tend", "def index\n @events = @category.events\n render json: @events \n end", "def show\n render json: @event\n end", "def show\n render json: @event\n end", "def show\n render json: @event\n end", "def show\n render json: @event\n end", "def show\n render json: @event\n end", "def index\n event = Event.find(params[:event_id])\n render json: event.route, status: :ok\n end", "def index\n render json: Event.all, status: :ok\n end", "def show\n render json: @event, status: :ok\n end", "def index\n @upcoming_events = Event.upcoming\n @past_events = Event.past\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n respond_with(@events)\n end", "def get_event(session, options={})\n json_request \"get\", {:session => session}, options\n end", "def index\n @events = getUpcomingEvents()\n \n @page_title = \"Events\"\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def get(event_id)\n @client.request \"events/#{event_id}\"\n end", "def index\n\t\t@events = Event.page(params[:page]).per(10)\n\n\t\trespond_to do |format|\n\t\t\tformat.html\n\t\t\tformat.json {\n\t\t\t\trender :json => @events.to_json\n\t\t\t}\n\t\tend\n\n\tend", "def events\n collection(\"events\")\n end", "def show\n event_id = params[:id]\n if event_id.present?\n @event = Com::Nbos::Events::Event.active_events.where(id: event_id, tenant_id: @user.tenant_id)\n if @event.present?\n render :json => @event\n else\n render :json => {messageCode: \"event.notfound\", message: \"Event Not Found\"}, status: 404\n end\n else\n render :json => {messageCode: \"bad.request\", message: \"Bad Request\"}, status: 400\n end\n end", "def event(event, options = {})\n get \"events/#{event}\", options\n end", "def past_events\n @events = Event.past\n render json: @events, include: :talks\n end", "def show\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def get_event_list ( year )\n get_api_resource \"#{@@api_base_url}events/#{year}\"\n end", "def index\n begin\n events = Event.all\n render :json => {events: ActiveModel::ArraySerializer.new(events, each_serializer: EventsSerializer), :code => 200}, status: :ok\n rescue Exception => e\n logger.error {\"Error while populating list of events. ErrorMessage: #{e.message}, Params: #{params.inspect}\"}\n render json: {error: e.message, code: 500}\n end\n end", "def show\n \trender json: @event\n end", "def show\n @events = fetch_events\n end", "def show\n render json: EventSerializer.new(@event).as_json, status: 200\n end", "def list\n @events = Event.coming_events\n respond_to do |format|\n format.html do\n render layout: 'events'\n end\n format.json do \n events = @events.map {|event| {event: event, users: event.users, applied: event.users.include?(current_user) }}\n render json: events \n end\n end\n end", "def index\n if params[:query].present?\n @events = GroupEvent.send(params[:query])\n else\n @events = GroupEvent.published\n end\n\n render json: @events\n end", "def fullcalendar_events_json\n events.map do |event|\n {\n id: event.id.to_s,\n title: event.name,\n start: event.starts_at.strftime('%Y-%m-%d %H:%M:%S'),\n end: event.ends_at.strftime('%Y-%m-%d %H:%M:%S'),\n allDay: event.all_day,\n url: event_path(event)\n }\n end\n end", "def show\n @event = Event.find(params[:id])\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event_event = Event::Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event_event }\n end\n end", "def index\n if params[:user]\n @events = Event.where(user: params[:user]).first\n else\n @events = Event.all.order('created_at asc')\n end\n\n render json: @events, :only => [:id, :date, :user, :event_type, :message, :otheruser]\n end", "def show\n @event = Event.find(params[:id])\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def details\n get(\"v1/event/#{@id}\")\n end", "def show\n @event = Event.find(params[:id])\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event } \n end\n end", "def index\n @events = Event.all\n @event = Event.new\n\n respond_to do |format|\n format.html\n format.json { render 'events/index', events: @events }\n end\n end", "def show\n render json: format_event(@event)\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end" ]
[ "0.8337474", "0.8240041", "0.7943668", "0.79278964", "0.7768141", "0.7740975", "0.7670209", "0.7665706", "0.765914", "0.76425344", "0.7621046", "0.761625", "0.761625", "0.76141375", "0.75690866", "0.75230986", "0.7489482", "0.74811614", "0.7470508", "0.7442612", "0.74410146", "0.7440337", "0.74389035", "0.7410911", "0.74098724", "0.74098724", "0.74098724", "0.74098724", "0.74098724", "0.74098724", "0.74098724", "0.74098724", "0.74098724", "0.74098724", "0.7400629", "0.739856", "0.73748904", "0.73687345", "0.73670644", "0.7364443", "0.736428", "0.73617107", "0.73617107", "0.73617107", "0.73617107", "0.73617107", "0.73590285", "0.73522234", "0.73342603", "0.73254657", "0.732164", "0.729601", "0.72958374", "0.7287818", "0.7282796", "0.72739697", "0.72733605", "0.7248011", "0.72444135", "0.72289705", "0.72289705", "0.72235423", "0.72131395", "0.72122496", "0.71830565", "0.71819556", "0.7171345", "0.7134091", "0.7128304", "0.7124135", "0.71229357", "0.7115039", "0.7110467", "0.7110467", "0.7104101", "0.7102504", "0.71024567", "0.71014696", "0.7100226", "0.70999634", "0.70999634", "0.70999634", "0.70999634", "0.70999634", "0.70999634", "0.70999634", "0.70999634", "0.70999634", "0.70999634", "0.70999634", "0.70999634", "0.70999634", "0.70999634", "0.70999634", "0.70999634", "0.70999634", "0.70999634", "0.70999634", "0.70999634", "0.70999634", "0.70999634" ]
0.0
-1
GET /events/1 GET /events/1.json
def show end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def show\n @event = Event.find(params[:id])\n render json: @event\n end", "def get(event_id)\n @client.request \"events/#{event_id}\"\n end", "def show\n event_id = params[:id]\n if event_id.present?\n @event = Com::Nbos::Events::Event.active_events.where(id: event_id, tenant_id: @user.tenant_id)\n if @event.present?\n render :json => @event\n else\n render :json => {messageCode: \"event.notfound\", message: \"Event Not Found\"}, status: 404\n end\n else\n render :json => {messageCode: \"bad.request\", message: \"Bad Request\"}, status: 400\n end\n end", "def get_events()\n @client.make_request(:get, @client.concat_user_path(\"#{CALL_PATH}/#{id}/events\"))[0]\n end", "def events\n response = self.class.get('/v1/events.json')\n response.code == 200 ? JSON.parse(response.body) : nil\n end", "def index\n #returns all events from eventbrite API, need to change to pull from her endpoint\n @eventList = Event.retrieve_all_events params\n render json: @eventList, status: 200\n end", "def index\n @events = Event.all\n render json: @events, status: 200\n end", "def index\n @event = Event.all\n render json: @event\n end", "def index\n @events = Event.find(:all)\n respond_to do |format|\n format.html\n format.json\n end\n end", "def index\n respond_to do |format|\n format.html\n format.json { render json: @events }\n end\n end", "def get_events\n Resources::Event.parse(request(:get, \"Events\"))\n end", "def show\n @event = Event.find(params[:id])\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def index\n @events = Event.live\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.live\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def details\n get(\"v1/event/#{@id}\")\n end", "def index\n @events = Event.all\n render json: @events\n end", "def show\n\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @myevent = Myevent.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @myevent }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n @event_event = Event::Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event_event }\n end\n end", "def index\n event = Event.find(params[:event_id])\n render json: event.route, status: :ok\n end", "def index\n @events = Event.all\n\n render json: @events\n end", "def show\n render json: @event, status: :ok\n end", "def index\n @events = Event.all\n respond_to do |format|\n format.html \n format.json do\n render :json => {events: @events}\n end\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.json { render json: @events }\n end\n end", "def show\n @event = Event.find(params[:id])\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event } \n end\n end", "def show\n @event = Event.find(params[:id])\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @event }\n end\n end", "def show\n @event = Event.find(params[:id])\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @event }\n end\n end", "def show\n render json: @event\n end", "def show\n render json: @event\n end", "def show\n render json: @event\n end", "def show\n render json: @event\n end", "def show\n render json: @event\n end", "def index\n @events = Event.all\n respond_to do |format|\n format.html \n format.json \n end\n end", "def show\n render json: EventSerializer.new(@event).as_json, status: 200\n end", "def event(event, options = {})\n get \"events/#{event}\", options\n end", "def index\n response = { events: Event.all }\n respond_to do |format|\n format.json { render json: response.to_json }\n format.html { render :index }\n end\n end", "def index\n render json: Event.all, status: :ok\n end", "def show\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def show\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def index\n @events = current_user.events\n\n render json: @events\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def event(id, options = {})\n get \"events/#{id}\", options\n end", "def index\n @events = Event.all\n @event = Event.new\n\n respond_to do |format|\n format.html\n format.json { render 'events/index', events: @events }\n end\n end", "def show\n @event = Event.find(params[:id])\n @client = Client.find(@event.client_id)\n @event_type = EventType.find(@event.event_type_id)\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end", "def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render :json => @events }\n end\n end", "def get_event ( event_key )\n get_api_resource \"#{@@api_base_url}event/#{event_key}\"\n end", "def get_events(args)\n\tapi_url = \"#{@base_url}/#{args[:collection]}/#{args[:key]}/events/#{args[:event_type]}\"\n\tdo_the_get_call( url: api_url, user: @user )\nend", "def index\n @event = Event.find(params[:event_id])\n\n end", "def index\n @upcoming_events = Event.upcoming\n @past_events = Event.past\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def show\n \trender json: @event\n end", "def index\n @events = current_user.events\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n @events = getUpcomingEvents()\n \n @page_title = \"Events\"\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end", "def index\n respond_with(@events)\n end", "def show\n render json: format_event(@event)\n end", "def show\n @current_event = CurrentEvent.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @current_event }\n end\n end", "def get_events\n if @user.uuid.present?\n @events = @user.events.active_events.page(params[:page])\n paginate json: @events, per_page: params[:per_page]\n elsif @user.uuid == \"guest\"\n @events = Com::Nbos::Events::Event.active_events.where(tenant_id: @user.tenant_id)\n render json: @events\n else\n render :json => {messageCode: \"bad.request\", message: \"Bad Request\"}, status: 400\n end\n end", "def show\n begin\n @event = Event.find(params[:id])\n rescue ActiveRecord::RecordNotFound\n logger.error \"Attempt to show invalid event #{params[:id]}\"\n redirect_to events_path, notice: 'Invalid event ID'\n else\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end\n end", "def index\n @events = @category.events\n render json: @events \n end", "def index\n\t\t@events = Event.all.order('created_at desc')\n\n\t\trespond_to do |format|\n\t\t\tformat.html\n\t\t\tformat.json { render :json => @events }\n\t\tend\n\tend", "def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.json { render json: @event, methods: [:talks] }\n end\n end", "def get_events\n response = request(:get, \"/devmgr/v2/events\")\n #status(response, 200, 'Failed to get current events from server')\n #JSON.parse(response.body)\n response\n end", "def index\n if params[:user]\n @events = Event.where(user: params[:user]).first\n else\n @events = Event.all.order('created_at asc')\n end\n\n render json: @events, :only => [:id, :date, :user, :event_type, :message, :otheruser]\n end", "def get_event(session, options={})\n json_request \"get\", {:session => session}, options\n end", "def index\n \n @events = current_user.events\n \n \n respond_to do |format|\n format.html {}\n format.json { render json: Event.events_to_json(@events) }\n end\n end", "def show\n @calendar = Calendar.find(params[:id])\n @events = Event.find(@calendar.event_ids)\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @calendar }\n end\n end" ]
[ "0.75029767", "0.74019474", "0.7361382", "0.7348975", "0.73475033", "0.7338018", "0.7317425", "0.72875094", "0.72813755", "0.7246173", "0.72317284", "0.7219172", "0.7219172", "0.7218839", "0.7218839", "0.721464", "0.7204848", "0.71989256", "0.7196662", "0.71925515", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7192214", "0.7190171", "0.7189989", "0.71858066", "0.71843475", "0.71817815", "0.7178166", "0.716525", "0.71637964", "0.7158998", "0.71580267", "0.7120116", "0.7120116", "0.7120116", "0.7120116", "0.7120116", "0.7104676", "0.7098543", "0.70866513", "0.7075021", "0.7071629", "0.70692325", "0.70692325", "0.7067004", "0.70606047", "0.70606047", "0.70606047", "0.70606047", "0.70606047", "0.70606047", "0.70606047", "0.70606047", "0.70606047", "0.70606047", "0.7050251", "0.7043129", "0.70385677", "0.70330113", "0.7027942", "0.7025206", "0.70196456", "0.6993209", "0.69843143", "0.69733816", "0.69682246", "0.69497913", "0.6949218", "0.6943893", "0.6929541", "0.69259447", "0.6922537", "0.69194067", "0.6912311", "0.6893206", "0.689077", "0.687633", "0.6853893", "0.6851784" ]
0.0
-1
POST /events POST /events.json
def create expire_action :action => [:shadyside, :south_side, :lawrenceville, :oakland, :bloomfield, :strip_district, :downtown] @event = current_user.events.build(event_params) #@event = Event.new(event_params) if @event.day == "Weekdays" @event.day = "Monday" @event_tue = current_user.events.build(event_params) @event_tue.day = "Tuesday" @event_wed = current_user.events.build(event_params) @event_wed.day = "Wednesday" @event_thu = current_user.events.build(event_params) @event_thu.day = "Thursday" @event_fri = current_user.events.build(event_params) @event_fri.day = "Friday" respond_to do |format| if @event.save && @event_tue.save && @event_wed.save && @event_thu.save && @event_fri.save #EventMailer.sample_email(current_user, @event).deliver Venue.where(id: @event.venue_id).first.update_attribute(:venue_verify, Time.now) format.html { redirect_to Venue.where(id: @event.venue_id).first, notice: 'Hour was successfully created.' } format.json { head :no_content } format.js { render :layout => false } else format.html { render :new } format.json { render json: @event.errors, status: :unprocessable_entity } end end elsif @event.day == "Everyday" @event.day = "Monday" @event_tue = current_user.events.build(event_params) @event_tue.day = "Tuesday" @event_wed = current_user.events.build(event_params) @event_wed.day = "Wednesday" @event_thu = current_user.events.build(event_params) @event_thu.day = "Thursday" @event_fri = current_user.events.build(event_params) @event_fri.day = "Friday" @event_sat = current_user.events.build(event_params) @event_sat.day = "Saturday" @event_sun = current_user.events.build(event_params) @event_sun.day = "Sunday" respond_to do |format| if @event.save && @event_tue.save && @event_wed.save && @event_thu.save && @event_fri.save && @event_sat.save && @event_sun.save #EventMailer.sample_email(current_user, @event).deliver Venue.where(id: @event.venue_id).first.update_attribute(:venue_verify, Time.now) format.html { redirect_to Venue.where(id: @event.venue_id).first, notice: 'Hour was successfully created.' } format.json { head :no_content } format.js { render :layout => false } else format.html { render :new } format.json { render json: @event.errors, status: :unprocessable_entity } end end elsif @event.day == "Weekend" @event_sat = current_user.events.build(event_params) @event_sat.day = "Saturday" @event_sun = current_user.events.build(event_params) @event_sun.day = "Sunday" respond_to do |format| if @event.save && @event_sun.save #EventMailer.sample_email(current_user, @event).deliver Venue.where(id: @event.venue_id).first.update_attribute(:venue_verify, Time.now) format.html { redirect_to Venue.where(id: @event.venue_id).first, notice: 'Hour was successfully created.' } format.json { head :no_content } format.js { render :layout => false } else format.html { render :new } format.json { render json: @event.errors, status: :unprocessable_entity } end end else respond_to do |format| if @event.save #EventMailer.sample_email(current_user, @event).deliver Venue.where(id: @event.venue_id).first.update_attribute(:venue_verify, Time.now) format.html { redirect_to Venue.where(id: @event.venue_id).first, notice: 'Hour was successfully created.' } format.json { head :no_content } format.js { render :layout => false } else format.html { render :new } format.json { render json: @event.errors, status: :unprocessable_entity } end end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_event event, data={}\n data[:event] = event\n post '/event', data\n end", "def create\n event = Event.new(event_params)\n event.save!\n render json: event\n end", "def create\n Rails.logger.debug(\"Received event #{params[:event]}\")\n head :ok\n end", "def create\n @event = Event.new(params[:event])\n\n if @event.save\n render json: @event, status: :created, location: @event\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end", "def create\n megam_rest.post_event(to_hash)\n end", "def create\n @event = Event.new(event_params)\n\n if @event.save\n \tdata = { data: @event, status: :created, message: \"Event was successfully created.\" }\n render :json => data\n else\n \tdata = { data: @event.errors, status: :unprocessable_entity }\n render :json => data\n end\n end", "def create\n @event = Event.new(event_params)\n if @event.save\n head :created\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end", "def create\n puts params[:event]\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to events_path, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n \n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, :notice => 'Event was successfully created.' }\n format.json { render :json => @event, :status => :created, :location => @event }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @event.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.json { render :show, status: :created, location: @event }\n else\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.json { render :show, status: :created, location: @event }\n else\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @user = User.find_by_authentication_token(params[:auth_token])\n @event = Event.new.from_json(params[:event])\n @event.user_id = @user.id\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n @event.organizer = current_user\n\n if @event.save\n render json: @event, status: :created, location: @event\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end", "def save\n event = params\n # This assumes that all keys exists. Yay no error handling...\n toSave = Event.new(update_type: event[:event],\n start_time: event[:payload][:event][:start_time_pretty],\n end_time: event[:payload][:event][:end_time_pretty],\n location: event[:payload][:event][:location],\n invitee_name: event[:payload][:invitee][:name],\n duration: event[:payload][:event_type][:duration],\n event_kind: event[:payload][:event_type][:kind])\n toSave.save\n render json: {}, status: 200\n end", "def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, :notice => 'Event was successfully created.' }\n format.json { render :json => @event, :status => :created, :location => @event }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @event.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: t(:event_created) }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n params['user_id'] = current_user.id if current_user\n @event = Event.new(event_params)\n\n if @event.save\n render json: { location: format_event(@event) }, status: :created\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end", "def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to \"/#{@event.url}\" }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def push_events\n saved = []\n jsonHash = request.POST[:_json];\n jsonHash.each do |jsonEvent|\n event = Event.new\n event.race_id = jsonEvent[\"raceId\"]\n event.walker_id = jsonEvent[\"walkerId\"]\n event.eventId = jsonEvent[\"eventId\"]\n event.eventType = jsonEvent[\"type\"]\n event.eventData = jsonEvent[\"data\"]\n event.batteryLevel = jsonEvent[\"batL\"]\n event.batteryState = jsonEvent[\"batS\"]\n event.timestamp = Time.zone.parse(jsonEvent[\"time\"])\n if event.save # if new\n saved << jsonEvent[\"eventId\"]\n if event.race_id != 0 # if not unknown race_id\n after_create(event)\n end\n else # if exists\n saved << jsonEvent[\"eventId\"]\n puts \"Not Saved!\" # debug print\n puts jsonEvent # debug print \n end\n end\n render :json => {:savedEventIds => saved}\n end", "def create\n result = Event::CreateEvent.perform(event_context)\n\n respond_to do |format|\n if result.success?\n @event = result.event\n format.json { render action: 'show', status: :created }\n else\n format.json { render json: { :errors => result.errors.full_messages }, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n if @event.save\n render :show, status: :created, location: @event\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end", "def create\n @event = Events::Event.new(event_params)\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to events_path, notice: \"Event #{@event} was successfully created.\" }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to events_path, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: events_path(@event) }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, event: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n params[:event] = convert_datetimes( params[:event] )\n @event = @current_account.events.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n # render json: params[:event]\n temp_event = Event.create(\n name: params[:event][:name],\n location: params[:event][:location],\n date: params[:event][:date],\n time: params[:event][:time],\n budget: params[:event][:budget],\n user: current_user\n )\n redirect_to \"/items?event=#{temp_event.id}\"\n end", "def create\n \n @event = Event.new(event_params)\n \n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: \"Event was successfully created.\" }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event_event = Event::Event.new(params[:event_event])\n\n respond_to do |format|\n if @event_event.save\n format.html { redirect_to @event_event, notice: 'Event was successfully created.' }\n format.json { render json: @event_event, status: :created, location: @event_event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event_event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n # @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to events_path, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to dashboard_home_path }\n format.json { render 'event', status: :created, event: @event }\n else\n format.html { render dashboard_home_path }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n logger.debug @event.errors.inspect\n format.html { redirect_to @event, notice: 'データが新規作成されました。' }\n format.json { render :show, status: :created, location: @event }\n else\n logger.debug @event.errors.to_hash(true)\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to new_event_path, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, flash: {success: 'Event was successfully created.'} }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: \"Event was successfully created.\" }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: \"Event was successfully created.\" }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, success: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'El evento fue creado exitosamente.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n flash[:success] = \"Event was successfully created.\"\n format.html { redirect_to @event }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\r\n @event = Event.new(event_params)\r\n convert_timezone @event\r\n event_type_status @event\r\n if @event.save_without_exception\r\n update_theme @event\r\n add_event_categories @event\r\n add_event_location @event\r\n create_group_guest_list @event\r\n add_photos @event\r\n # Create Groups and contacts through CSV\r\n contacts_imports\r\n render json: SuccessResponse.new(\r\n code: 200, message: 'Event Created.', location: '/events/List?id=' + @event.id.to_s, eventID: @event.id\r\n ), adapter: :json, status: :ok\r\n else\r\n render json: ErrorResponse.new, adapter: :json, status: :unprocessable_entity\r\n end\r\n end", "def create\n @event = current_user.events.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: t(:event_success) }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_events\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Aula cadastrada com sucesso.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to action: :index, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to new_user_event_path(current_user), notice: 'event was successfully created.' }\n format.json\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(params[:event])\n @event.url = BASE_URL + @event.name.gsub(' ', '_')\n \n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render action: 'show', status: :created, location: @event }\n else\n format.html { render action: 'new' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def post_event(url, event, payload_type, payload)\n body = {\n :event => event,\n :payload_type => payload_type }\n body[:payload] = payload if payload\n\n http_post(url) do |req|\n req.headers['Content-Type'] = 'application/json'\n req.body = body.to_json\n req.params['verification'] = 1 if event == 'verification'\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render action: 'show', status: :created, location: @event }\n else\n format.html { render action: 'new' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render action: 'show', status: :created, location: @event }\n else\n format.html { render action: 'new' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = @calendar.events.new(event_params)\n respond_to do |format|\n if @event.save\n format.json { render json: @event }\n else\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n flash[:success] = \"Wydarzenie zostało utworzone.\"\n format.html {redirect_to @event}\n format.json {render :show, status: :created, location: @event}\n else\n format.html {render :new}\n format.json {render json: @event.errors, status: :unprocessable_entity}\n end\n end\n end", "def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to event_registration_path(id: @event.id), notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.new(event_params)\n @event.creator = @current_user\n\n if @event.save\n @event.users.each do |user|\n p \"event user = #{user.name}\"\n user.send_event_push(PushTypes::NEW_EVENT, current_user.to_push, @event.title)\n end\n else\n render json: @event.errors, status: :unprocessable_entity\n return\n end\n end", "def create\n @event = Event.new(event_params)\n if @event.save\n render json: @event, status: 201\n @user_event = UserEvent.create(admin: true, event_id: @event.id, user_id: current_user.id)\n else\n render json: { message: \"Please make sure to fill all required fields.\" }, status: 401\n end\n end", "def create\n @event = Event.new(event_params)\n respond_to do |format|\n if @event.save\n track_activity @event\n format.html { redirect_to :back, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n after_event_created_mail @event\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @myevent = Myevent.new(params[:myevent])\n\n respond_to do |format|\n if @myevent.save\n format.html { redirect_to @myevent, notice: 'Myevent was successfully created.' }\n format.json { render json: @myevent, status: :created, location: @myevent }\n else\n format.html { render action: \"new\" }\n format.json { render json: @myevent.errors, status: :unprocessable_entity }\n end\n end\n end" ]
[ "0.7714071", "0.7611226", "0.76028967", "0.7541319", "0.7444731", "0.73206913", "0.73138195", "0.728203", "0.7251226", "0.7235907", "0.7235907", "0.7215051", "0.71682763", "0.7150409", "0.7126664", "0.7118896", "0.7117831", "0.71162695", "0.70964044", "0.70907074", "0.7083036", "0.7081109", "0.7080767", "0.7071589", "0.7057984", "0.70422375", "0.7018503", "0.7018503", "0.7018503", "0.7018503", "0.7018503", "0.7018503", "0.7018503", "0.7018503", "0.7018503", "0.7018503", "0.7018503", "0.7018503", "0.7018503", "0.7016941", "0.70167124", "0.70091015", "0.70081246", "0.6989661", "0.6987218", "0.6970633", "0.6970633", "0.6966775", "0.6948742", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6948119", "0.6942416", "0.6936477", "0.69359535", "0.69359535", "0.69318086", "0.69268054", "0.6907236", "0.6905569", "0.69051725", "0.6904514", "0.6902843", "0.69011873", "0.6899826", "0.68961006", "0.68811166", "0.68746495", "0.68642014", "0.68642014", "0.6843213", "0.68419445", "0.6836244", "0.68352246", "0.6820027", "0.68000513", "0.6791519" ]
0.0
-1
PATCH/PUT /events/1 PATCH/PUT /events/1.json
def update expire_action :action => [:shadyside, :south_side, :lawrenceville, :oakland, :bloomfield, :strip_district, :downtown] respond_to do |format| if @event.update(event_params) Venue.where(id: @event.venue_id).first.update_attribute(:venue_verify, Time.now) format.html { redirect_to Venue.where(id: @event.venue_id).first, notice: 'Hour was successfully updated.' } format.json { render :show, status: :ok, location: @event } else format.html { render :edit } format.json { render json: @event.errors, status: :unprocessable_entity } end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def patch_event\n user_id = params[\"user_id\"]\n group_id = params[\"group_id\"]\n event_id = params[\"event_id\"]\n\n #TODO Handle 404 if event not found\n event = Event.find(event_id)\n\n json_body = JSON.parse(request.body.read)\n\n @@event_service.patch_event(json_body, user_id, group_id, event_id)\n\n render status: :ok, text: \"\"\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.json { head :no_content }\n else\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n if @event.update(event_params(params))\n render json: @event, status: 200\n else\n render :json => @event.errors, :status => 422\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.json { render json: @event }\n else\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n if @event.update(event_params)\n render json: @event, status: 201\n else\n render json: { message: \"Error. Error. Please try again.\"}, status: 400\n end\n end", "def update\n\tif @event.update(event_params)\n head :no_content\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end", "def update\n return forbidden unless user_is_owner\n return bad_request unless @event.update_attributes(event_params)\n render json: @event, status: :ok\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.json { render :show, status: :ok, location: @event }\n else\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.json { render :show, status: :ok, location: @event }\n else\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n if @event.update(event_params)\n render json: { location: format_event(@event) }\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n @event.update(status: \"Pending\")\n else\n @reopen = true\n format.json { render json: @event.errors, status: :unprocessable_entity }\n format.html { render :show }\n end\n end\n end", "def update\n \n \n @event = Event.find(params[:id])\n\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: t(:event_updated) }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n \n end\n end", "def update\n @event = Event.find(params[:id])\n\n if @event.update(params[:event])\n head :no_content\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end", "def pupdate\n @event = Event.find(params[:id])\n respond_to do |format|\n if @event.update_attributes(JSON.parse(params[:event]))\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content}\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n # @event = Event.find(params[:id])\n\n if @event.update(event_params)\n head :no_content\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end", "def update\n @event = Event.find(params[:id])\n\n if @event.update(event_params)\n head :no_content\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end", "def update\n @event = Event.find(params[:id])\n\n if @event.update(event_params)\n head :no_content\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end", "def update\n respond_to do |format|\n if @base_event.update(base_event_params)\n format.json { head :no_content }\n else\n format.json { render json: @base_event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html {redirect_to @event, notice: 'Event was successfully updated.'}\n format.json {head :no_content}\n else\n format.html {render action: 'edit'}\n format.json {render json: @event.errors, status: :unprocessable_entity}\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n @event.save!\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n # @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n #require \"pry\"; binding.pry\n\n update_users\n\n respond_to do |format|\n if @event.update(event_params)\n sync_update @event\n format.html { redirect_to @event, notice: t(\"successfully_updated\", :model => t(\"models.event\")) }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.using(:shard_one).find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, :notice => 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @event.errors, :status => :unprocessable_entity }\n end\n end\n\n\n\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to events_path, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: events_path(@event) }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, :notice => 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @event.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, :notice => 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @event.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, :notice => 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @event.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n\n\n\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to '/', notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n\n respond_to do |format|\n if @event.update(event_params)\n\n format.html { redirect_to @event }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n #@event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n event_id = params[:id]\n if event_id.present? && params[:event].present? && @user.uuid.present? && @user.uuid != \"guest\"\n event_params = params[:event]\n @event = Com::Nbos::Events::Event.where(id: params[:id], user_id: @user.id ).first\n if @event.present?\n @event.update(event_params.permit!)\n if @event.save\n render :json => @event\n else\n data = add_error_messages(@event)\n render :json => data\n end\n else\n render :json => {\"messageCode\": \"module.user.unauthorized\", \"message\": \"Unauthorized to update others Event\"}, status: 404\n end\n else\n render :json => {messageCode: \"bad.request\", message: \"Bad Request\"}, status: 400\n end\n end", "def update\r\n respond_to do |format|\r\n if @event.update(event_params)\r\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\r\n format.json { head :no_content }\r\n else\r\n format.html { render action: 'edit' }\r\n format.json { render json: @event.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html do\n gflash :notice\n redirect_to @event\n end\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event_event = Event::Event.find(params[:id])\n\n respond_to do |format|\n if @event_event.update_attributes(params[:event_event])\n format.html { redirect_to @event_event, notice: 'Event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event_event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to events_event_path(@event), notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n if @event.update(event_params)\n \tdata = { data: @event, status: :ok, message: \"Event was successfully updated.\" }\n render :json => data\n else\n \tdata = { data: @event.errors, status: :unprocessable_entity }\n render :json => data\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to edit_event_path(@event), notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end" ]
[ "0.75299805", "0.7372486", "0.71766764", "0.7172103", "0.7170955", "0.71424884", "0.70959055", "0.7082709", "0.7082709", "0.7057662", "0.70209146", "0.6989379", "0.69825095", "0.69775003", "0.69601995", "0.6954494", "0.6954494", "0.6951992", "0.692109", "0.692109", "0.692109", "0.692109", "0.692109", "0.692109", "0.692109", "0.692109", "0.692109", "0.69159144", "0.6893937", "0.68830234", "0.68787163", "0.68631184", "0.68579584", "0.6854905", "0.68530035", "0.68461853", "0.68461853", "0.68461853", "0.6839517", "0.6839378", "0.6835066", "0.68241036", "0.681631", "0.68139076", "0.68139076", "0.68139076", "0.68139076", "0.68139076", "0.68139076", "0.68139076", "0.68139076", "0.68139076", "0.68139076", "0.68139076", "0.68139076", "0.68139076", "0.68139076", "0.68139076", "0.68139076", "0.68139076", "0.68139076", "0.68139076", "0.68139076", "0.681159", "0.68073547", "0.679286", "0.679286", "0.679286", "0.679286", "0.679286", "0.679286", "0.679286", "0.679286", "0.6789041", "0.67812467", "0.67756975", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907", "0.67662907" ]
0.0
-1
DELETE /events/1 DELETE /events/1.json
def destroy expire_action :action => [:shadyside, :south_side, :lawrenceville, :oakland, :bloomfield, :strip_district, :downtown] @event.destroy respond_to do |format| Venue.where(id: @event.venue_id).first.update_attribute(:venue_verify, Time.now) format.html { redirect_to Venue.where(id: @event.venue_id).first, notice: 'Event was successfully destroyed.' } format.json { head :no_content } end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def destroy\n @event = Event.using(:shard_one).find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end", "def delete_event\n if params[:id]\n @e = Evento.find(params[:id]).destroy\n end\n render :json => msj = { :status => true, :message => 'ok'}\n end", "def destroy\n @event = Event.find(params[:id])\n \n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n #@event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end", "def destroy\n # @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @events = Event.where(event_id: params[:id])\n @events.each.destroy\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n\n end", "def destroy\n @event_event = Event::Event.find(params[:id])\n @event_event.destroy\n\n respond_to do |format|\n format.html { redirect_to event_events_url }\n format.json { head :ok }\n end\n end", "def destroy\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url}\n format.json { head :no_content }\n end\n end", "def destroy\n @myevent = Myevent.find(params[:id])\n @myevent.destroy\n\n respond_to do |format|\n format.html { redirect_to myevents_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n\n sync_destroy @event\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html {redirect_to events_url}\n format.json {head :no_content}\n end\n end", "def destroy\n @calevent = Calevent.find(params[:id])\n @calevent.destroy\n\n respond_to do |format|\n format.html { redirect_to calevents_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n #@event.update_attribute(:deleted, true)\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n render :nothing => true, :status => 200, :content_type => 'text/plain'\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url, notice: t(:event_deleted) }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to admin_events_path }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n head :no_content\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to root_url, notice: 'Event was successfully removed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url, notice: 'データが削除されました。' }\n format.json { head :no_content }\n end\n end", "def delete_event\r\n event = Event.find_by(id: params[:eventid].to_i)\r\n if event.present?\r\n event.update(status: 3)\r\n lt_update_event_status event, 'archived'\r\n render json: SuccessResponse.new(\r\n code: 200,\r\n message: 'Event Deleted.'\r\n ), adapter: :json, status: :ok\r\n else\r\n render json: ErrorResponse.new(\r\n code: 404,\r\n message: 'Event not found!'\r\n ), adapter: :json, status: :not_found\r\n end\r\n\r\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url, notice: 'Event was successfully removed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url, notice: 'Event was successfully deleted.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url, notice: 'Event was successfully deleted.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @client = Client.find(@event.client_id)\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_to_client_path(@client) }\n format.json { head :no_content }\n end\n end", "def destroy\n @event_request = EventRequest.find(params[:id])\n @event_request.destroy\n\n respond_to do |format|\n format.html { redirect_to event_requests_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url, notice: 'Мероприятие успешно удалено.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = current_user.events.find_by_url(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @post_event.destroy\n respond_to do |format|\n format.html { redirect_to post_events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @event = @current_account.events.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @evento = Evento.find(params[:id])\n @evento.destroy\n\n respond_to do |format|\n format.html { redirect_to eventos_url }\n format.json { head :ok }\n end\n end", "def destroy\n @event = Event.find(params[:id])\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url, notice: 'Event was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @eventtype.events.each do |e|\n e.destroy\n end\n @eventtype.destroy\n respond_to do |format|\n format.html { redirect_to eventtypes_url }\n format.json { head :no_content }\n end\n end", "def destroy \n @event.destroy \n respond_to do |format|\n format.html { redirect_to events_url, success: 'Event was successfully removed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.mobile { redirect_to events_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @create_event = CreateEvent.find(params[:id])\n @create_event.destroy\n\n respond_to do |format|\n format.html { redirect_to create_events_url }\n format.json { head :no_content }\n end\n end" ]
[ "0.769268", "0.76872975", "0.76872975", "0.76872975", "0.7680665", "0.7585337", "0.75682765", "0.7560537", "0.75407815", "0.7540473", "0.7540473", "0.7540473", "0.7540473", "0.7540473", "0.7540473", "0.7540473", "0.7540473", "0.7540473", "0.7540473", "0.7540473", "0.7539941", "0.75382024", "0.7537234", "0.7526631", "0.75194746", "0.75194746", "0.75194746", "0.75194746", "0.75194746", "0.75194746", "0.75194746", "0.75194746", "0.75194746", "0.75194746", "0.75194746", "0.75194746", "0.75194746", "0.75194746", "0.75194746", "0.75194746", "0.75194746", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.75145763", "0.74953985", "0.74650526", "0.7459677", "0.7453618", "0.7446788", "0.74399537", "0.7439926", "0.7434689", "0.7432393", "0.7401567", "0.73877174", "0.73772806", "0.7371874", "0.7365335", "0.7337954", "0.733497", "0.733497", "0.7322736", "0.73000205", "0.7296434", "0.72906893", "0.7289899", "0.7289422", "0.728712", "0.7284745", "0.7283742", "0.72820485", "0.72724354", "0.72697437" ]
0.0
-1
Use callbacks to share common setup or constraints between actions.
def set_event @event = Event.find(params[:id]) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_required_actions\n # TODO: check what fields change to asign required fields\n end", "def action_hook; end", "def run_actions; end", "def define_action_hook; end", "def actions; end", "def define_action_helpers\n if super && action == :save\n @instance_helper_module.class_eval do\n define_method(:valid?) do |*args|\n self.class.state_machines.fire_event_attributes(self, :save, false) { super(*args) }\n end\n end\n end\n end", "def add_actions; end", "def callbacks; end", "def callbacks; end", "def setup *actions, &proc\n (@setup_procs ||= []) << [proc, actions.size > 0 ? actions : [:*]]\n end", "def define_action_helpers; end", "def post_setup\n end", "def action_methods; end", "def action_methods; end", "def action_methods; end", "def before_setup; end", "def action_run\n end", "def execute(setup)\n @action.call(setup)\n end", "def define_action_helpers?; end", "def set_actions\n actions :all\n end", "def action_done(action)\n dispatch = { :migrate => :done_migrating, :map => :done_mapping, :reduce =>\n :done_reducing, :finalize => :done_finalizing } \n self.send dispatch[action[:action]], action\n end", "def dependencies action, &block\n @actions.each do |other|\n if action[:requires].include? other[:provide]\n block.call other\n end\n end\n end", "def setup!\n return unless @setup_procs\n http_actions = actions\n @setup_procs.each do |setup_proc|\n proc, actions = setup_proc\n @setup__actions = actions.map do |action|\n\n action.is_a?(Regexp) ?\n http_actions.select { |a| a.to_s =~ action } :\n action.is_a?(String) && action =~ /\\A\\./ ?\n http_actions.map { |a| a.to_s << action if format?(a).include?(action) }.compact :\n action\n\n end.flatten\n self.class_exec &proc\n @setup__actions = nil\n end\n @setup_procs = nil\n end", "def before_actions(*logic)\n self.before_actions = logic\n end", "def setup_handler\n end", "def set_action(opts)\n opts = check_params(opts,[:actions])\n super(opts)\n end", "def setup(action)\n @targets.clear\n unless action.item.target_filters.empty?\n @targets = SES::TargetManager.make_targets(action)\n else\n item = action.item\n if item.for_opponent?\n @targets = $game_troop.alive_members\n elsif item.for_dead_friend?\n @targets = $game_party.battle_members.select { |actor| actor.dead? }\n else\n $game_party.battle_members.select { |actor| actor.alive? }\n end\n end\n @item_max = @targets.size\n create_contents\n refresh\n show\n activate\n end", "def action; end", "def action; end", "def action; end", "def action; end", "def action; end", "def workflow\n end", "def revisable_shared_setup(args, block)\n class << self\n attr_accessor :revisable_options\n end\n options = args.extract_options!\n self.revisable_options = Options.new(options, &block)\n \n self.send(:include, Common)\n self.send(:extend, Validations) unless self.revisable_options.no_validation_scoping?\n self.send(:include, WithoutScope::QuotedColumnConditions)\n end", "def setup\n @action = SampleActionAndroid.new(os_name: 'android',\n app_name: APP_PATH)\n end", "def before(action)\n invoke_callbacks *self.class.send(action).before\n end", "def process_action(...)\n send_action(...)\n end", "def before_dispatch(env); end", "def after_actions(*logic)\n self.after_actions = logic\n end", "def setup\n # override and do something appropriate\n end", "def setup(client)\n return unless @setup\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n actions.each do |action|\n action.execute(client)\n end\n self\n end", "def setup(_context)\n end", "def setup(resources) ; end", "def validate_actions\n errors.add(:base, :should_give_at_least_one_action) if !manage? && !forecasting? && !read? && !api?\n end", "def setup\n @resource_config = {\n :callbacks => {\n :before_create => nil,\n :after_create => nil,\n :before_update => nil,\n :after_update => nil,\n :before_destroy => nil,\n :after_destroy => nil,\n },\n :child_assoc => nil,\n :model => nil,\n :parent => nil,\n :path => nil,\n :permission => {},\n :properties => {},\n :relation => {\n :create => nil,\n :delete => nil,\n },\n :roles => nil,\n }\n end", "def determine_valid_action\n\n end", "def process_shared\n handle_taxes\n handle_shippings\n create_adjustments_from_params\n handle_status\n handle_inventory_refunds\n handle_payment_transactions\n order.updater.update\n end", "def startcompany(action)\n @done = true\n action.setup\n end", "def init_actions\n am = action_manager()\n am.add_action(Action.new(\"&Disable selection\") { @selection_mode = :none; unbind_key(32); bind_key(32, :scroll_forward); } )\n am.add_action(Action.new(\"&Edit Toggle\") { @edit_toggle = !@edit_toggle; $status_message.value = \"Edit toggle is #{@edit_toggle}\" })\n end", "def event_callbacks(event, metadata={})\n case event\n when :reset, :review\n if confirmed\n update_attributes(confirmed: false)\n end\n when :confirm\n confirm\n # trigger :order for all applicable items\n # NOTE: :order event is common to both physical and digital items\n items.each do |i|\n if i.event_permitted(:order)\n user_id = last_transition.user_id\n i.trigger!(:order, { order_id: id, user_id: user_id })\n end\n end\n when :complete_work\n request = metadata[:request]\n work_complete_notification(request)\n when :close\n close\n end\n if event != :close && !open\n reopen\n end\n end", "def setup_action\n return unless PONY::ERRNO::check_sequence(current_act)\n new_sequence = @action_sequence[@sequence_index+1...@action_sequence.size]\n @sequence_index = 0\n new_sequence = DND::SkillSequence::ACTS[@acts[1]] + new_sequence\n execute_sequence\n end", "def define_tasks\n define_weave_task\n connect_common_tasks\n end", "def setup(&block)\n define_method(:setup, &block)\n end", "def setup\n transition_to(:setup)\n end", "def setup\n transition_to(:setup)\n end", "def action\n end", "def setup( *args )\n\t\t\tself.class.setupBlocks.each {|sblock|\n\t\t\t\tdebugMsg \"Calling setup block method #{sblock}\"\n\t\t\t\tself.send( sblock )\n\t\t\t}\n\t\t\tsuper( *args )\n\t\tend", "def config(action, *args); end", "def setup\n @setup_proc.call(self) if @setup_proc\n end", "def before_action \n end", "def setup_callbacks\n defined_callbacks.each do |meth|\n unless respond_to?(\"call_#{meth}_callbacks\".to_sym)\n self.class.module_eval <<-EOE\n def call_#{meth}_callbacks(*args)\n plugin_store.each {|a| a.call_#{meth}_callbacks(*args) } if respond_to?(:plugin_store) && plugin_store\n self.send :#{meth}, *args if respond_to?(:#{meth})\n end\n EOE\n end\n end\n end", "def action\n end", "def matt_custom_action_begin(label); end", "def setup\n # override this if needed\n end", "def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend", "def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend", "def action(options,&callback)\n new_action = Action===options ? options : Action.new(options,&callback)\n # replace any with (shared name/alias or both default) + same arity\n @actions.delete_if do |existing_action|\n ((existing_action.names & new_action.names).size > 0 ||\n existing_action.default? && new_action.default?) &&\n existing_action.required.size == new_action.required.size &&\n existing_action.optional.size <= new_action.optional.size\n end\n @actions = (@actions + [new_action]).sort\n new_action\n end", "def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action\n end", "def after(action)\n invoke_callbacks *options_for(action).after\n end", "def pre_task\n end", "def setup(server)\n server.on('beforeMethod', method(:before_method), 10)\n end", "def add_actions\n attribute = machine.attribute\n name = self.name\n \n owner_class.class_eval do\n define_method(name) {self.class.state_machines[attribute].events[name].fire(self)}\n define_method(\"#{name}!\") {self.class.state_machines[attribute].events[name].fire!(self)}\n define_method(\"can_#{name}?\") {self.class.state_machines[attribute].events[name].can_fire?(self)}\n end\n end", "def init_actions\n @select_action = SelectAction.new\n @endpoint_mouse_action = EndpointMouseAction.new\n @move_action = MoveAction.new\n end", "def setup_signals; end", "def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend", "def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend", "def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action.respond_to?('weak!') ? action.weak! : action\n end", "def initialize(*args)\n super\n @action = :set\nend", "def after_set_callback; end", "def setup\n #implement in subclass;\n end", "def lookup_action; end", "def setup &block\n if block_given?\n @setup = block\n else\n @setup.call\n end\n end", "def setup_action\n return TSBS.error(@acts[0], 1, @used_sequence) if @acts.size < 2\n actions = TSBS::AnimLoop[@acts[1]]\n if actions.nil?\n show_action_error(@acts[1])\n end\n @sequence_stack.push(@acts[1])\n @used_sequence = @acts[1]\n actions.each do |acts|\n @acts = acts\n execute_sequence\n break if @break_action\n end\n @sequence_stack.pop\n @used_sequence = @sequence_stack[-1]\n end", "def release_actions; end", "def around_hooks; end", "def save_action; end", "def setup(easy)\n super\n easy.customrequest = @verb\n end", "def action_target()\n \n end", "def setup\n callback(:setup) do\n notify(:setup)\n migration_check.last_deployed_commit\n end\n end", "def setup\n return unless @setup\n\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n run_actions_and_retry(actions)\n self\n end", "def before_setup\n # do nothing by default\n end", "def my_actions(options)\n @setup = false\n get_template_part(\"custom_used\",\"action_users\",true)\n end", "def default_action; end", "def setup(&blk)\n @setup_block = blk\n end", "def callback_phase\n super\n end", "def advice\n end", "def _handle_action_missing(*args); end", "def duas1(action)\n action.call\n action.call\nend", "def shared_action(name, &block)\n @controller.shared_actions[name] = block\n end", "def before_action action, &block\n @audience[:before][action] ||= Set.new\n @audience[:before][action] << block\n end", "def setup_initial_state\n\n state_a = State.new(\"a\", 0)\n state_b = State.new(\"b\", 0)\n state_c = State.new(\"c\", 10)\n\n move_to_b = Action.new(\"move_to_b\", 1, state_b)\n\n move_to_c = Action.new(\"move_to_c\", 1, state_c)\n\n state_a.actions = [move_to_b, move_to_c]\n\n return state_a\n \nend" ]
[ "0.6163163", "0.6045976", "0.5946146", "0.591683", "0.5890051", "0.58349305", "0.5776858", "0.5703237", "0.5703237", "0.5652805", "0.5621621", "0.54210985", "0.5411113", "0.5411113", "0.5411113", "0.5391541", "0.53794575", "0.5357573", "0.53402257", "0.53394014", "0.53321576", "0.53124547", "0.529654", "0.5296262", "0.52952296", "0.52600986", "0.52442724", "0.52385926", "0.52385926", "0.52385926", "0.52385926", "0.52385926", "0.5232394", "0.523231", "0.5227454", "0.52226824", "0.52201617", "0.5212327", "0.52079266", "0.52050185", "0.51754695", "0.51726824", "0.51710224", "0.5166172", "0.5159343", "0.51578903", "0.51522785", "0.5152022", "0.51518047", "0.51456624", "0.51398855", "0.5133759", "0.5112076", "0.5111866", "0.5111866", "0.5110294", "0.5106169", "0.509231", "0.50873137", "0.5081088", "0.508059", "0.50677156", "0.50562143", "0.5050554", "0.50474834", "0.50474834", "0.5036181", "0.5026331", "0.5022976", "0.5015441", "0.50121695", "0.5000944", "0.5000019", "0.4996878", "0.4989888", "0.4989888", "0.49864885", "0.49797225", "0.49785787", "0.4976161", "0.49683493", "0.4965126", "0.4958034", "0.49559742", "0.4954353", "0.49535993", "0.4952725", "0.49467874", "0.49423352", "0.49325448", "0.49282882", "0.49269363", "0.49269104", "0.49252945", "0.4923091", "0.49194667", "0.49174926", "0.49173003", "0.49171105", "0.4915879", "0.49155936" ]
0.0
-1
Never trust parameters from the scary internet, only allow the white list through.
def event_params params.require(:event).permit(:special,:detail, :day, :venue_id, :start, :end, :tag_list, :event_verify, :varified_user, :event_date) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def strong_params\n params.require(:user).permit(param_whitelist)\n end", "def strong_params\n params.require(:listing_member).permit(param_whitelist)\n end", "def allow_params_authentication!; end", "def allowed_params\n ALLOWED_PARAMS\n end", "def default_param_whitelist\n [\"mode\"]\n end", "def param_whitelist\n [:role, :title]\n end", "def expected_permitted_parameter_names; end", "def safe_params\n params.except(:host, :port, :protocol).permit!\n end", "def strong_params\n params.require(:team_member).permit(param_whitelist)\n end", "def permitir_parametros\n \t\tparams.permit!\n \tend", "def strong_params\n params.require(:community).permit(param_whitelist)\n end", "def permitted_strong_parameters\n :all #or an array of parameters, example: [:name, :email]\n end", "def strong_params\n params.require(:education).permit(param_whitelist)\n end", "def restricted_params\n #params.require(self.controller_name.classify.underscore.to_sym).permit([])\n raise(\"No strong params set, override restricted_params method in your controller. E.g. params.require(:model).permit(:attribute1, :attribute2)\")\n end", "def allowed_params\n params.require(:user).permit(:username, :email, :password, :password_confirmation)\n end", "def param_whitelist\n [:rating, :review]\n end", "def param_whitelist\n whitelist = [\n :username, :name,\n :parent_id,\n :headline, :description, :video,\n :policy, :signup_mode, :category,\n :website, :facebook, :twitter, :linkedin,\n :founded_at,\n privacy: [\n :events,\n :resources\n ],\n permission: [\n :profile,\n :members,\n :children,\n :statistics,\n :posts,\n :listings,\n :resources,\n :events\n ],\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n \n if action_name === 'update'\n whitelist.delete(:parent_id)\n unless current_user.role_in(@community) === 'owner'\n whitelist.delete(:privacy)\n whitelist.delete(:permission)\n end\n end\n \n whitelist\n end", "def param_whitelist\n if @user.present? && current_user != @user\n return [:followed]\n end\n \n whitelist = [\n :username, :email, :password,\n :first_name, :last_name,\n :birthday, :gender,\n :headline, :biography, :ask_about, :focus,\n :website, :facebook, :linkedin, :twitter, :github,\n roles: [],\n skills: [],\n interests: [],\n privacy: { contact: [] },\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n \n if action_name === 'update'\n whitelist.delete(:email)\n whitelist.delete(:password)\n end\n \n whitelist\n end", "def user_params \n \tparams.require(:user).permit(:name, :email, :password, :password_confirmation)# preventing CSTR\n end", "def user_params\n params.permit(:name, :phoneNumber, :address, :postalCode, :local, :link, :counter, :latitude, :longitude) \n end", "def valid_params_request?; end", "def strong_params\n params.require(:experience).permit(param_whitelist)\n end", "def trim_whitelisted(params, whitelist)\n # remove any parameters that are not whitelisted\n params.each do |key, value|\n # if white listed\n if whitelist.include? key\n # strip the parameters of any extra spaces, save as string\n params[key] = value.to_s.strip\n else\n # delete any unauthorized parameters\n params.delete key\n end\n end\n params\n end", "def whitelist_url_params\n params.require(:whitelist_url).permit(:domain)\n end", "def allowed_params\n params.require(:allowed).permit(:email)\n end", "def permitted_params\n []\n end", "def trim_whitelisted(params, whitelist)\n # remove any parameters that are not whitelisted\n params.each do |key, value|\n # if white listed\n if whitelist.include? key\n # strip the parameters of any extra spaces, save as string\n params[key] = value.to_s.strip\n else\n # delete any unauthorized parameters\n params.delete key\n end\n end\n params\n end", "def safe_params\n params.permit(:id, :name, :origin, :emails => []); #emails is an array\n end", "def query_param\n\t\tparams.permit(:first_name, :last_name, :phone)\n\tend", "def strong_params\n params.require(:success_metric).permit(param_whitelist)\n end", "def devise_filter\r\n logger.debug(\"In devise_filter =>PARAMS: #{params.inspect}\")\r\n\r\n # White list for sign_up\r\n devise_parameter_sanitizer.for(:sign_up) { |u| u.permit(user_whitelist) }\r\n\r\n # White list for account update\r\n devise_parameter_sanitizer.for(:account_update) { |u| u.permit(user_whitelist, :current_password) }\r\n\r\n # White list for Invitation creation\r\n devise_parameter_sanitizer.for(:invite) { |u| u.permit(:account_type, :email, :invitation_token)}\r\n\r\n # White list for accept invitation\r\n devise_parameter_sanitizer.for(:accept_invitation) { |u| u.permit(user_whitelist, :invitation_token)}\r\n\r\n end", "def whitelisted_user_params\n params.require(:user).\n permit( :first_name, :last_name, :email,:password,:password_confirmation,:birthday,:gender)\n end", "def user_params\n ActionController::Parameters.permit_all_parameters = true\n params.require(:user) #.permit(:name, :surname, :phone, :password, :email, :time_zone)\n end", "def strong_params\n params.require(:metric_change).permit(param_whitelist)\n end", "def safe_params\n params.require(:user).permit(:name)\n end", "def get_params\n\t\treturn ActionController::Parameters.new(self.attributes).permit(\"account_id\", \"title\", \"category\", \"introduction\", \"tags\", \"segment_type\", \"visible\", \"status\", \"main_image\")\n\tend", "def grant_params\n @whitelisted = params.require(:grant).permit(:name, :description, :agency_id, :acronym)\n end", "def check_params; true; end", "def param_whitelist\n whitelist = [\n :description,\n :progress,\n :kpi_id\n ]\n \n unless action_name === 'create'\n whitelist.delete(:kpi_id)\n end\n \n whitelist\n end", "def quote_params\n params.permit!\n end", "def valid_params?; end", "def paramunold_params\n params.require(:paramunold).permit!\n end", "def user_params\n\t\tparams.permit(:nickname, :avatar, :description, :password, :gender, :birthday, :email, :phone, :qq_id, :wechat_id)\n\tend", "def filtered_parameters; end", "def user_params\n params.permit(\n \t:id,\n \t:email, \n \t:first_name, \n \t:last_name, \n \t:password, \n \t:confirm_token, \n \t:phone_number,\n \t:facebook_link,\n \t:car_model,\n \t:license_plate)\n end", "def filtering_params\n params.permit(:email, :name)\n end", "def check_params\n true\n end", "def wx_public_params\n params.require(:wx_public).permit(:nickname, :manager, :alias)\n end", "def allowed_params\n params.require(:user).permit(:email, :password, :role, :first_name, :last_name, :password_confirmation)\n end", "def allowed_params\n params.require(:user).permit(:email, :password, :role, :first_name, :last_name, :password_confirmation)\n end", "def listing_params\n\t\tparams.permit(:address, :transit_info, :rules, :other_info, :lat, :lng)\n\tend", "def social_account_params\n\t\t\tparams.require(:social_account).permit!\n\t\tend", "def safe_params\n resurce_name = self.class.resource_name\n params_method_name = \"#{resurce_name}_params\".to_sym\n if params[resurce_name]\n if respond_to?(params_method_name) || private_methods.include?(params_method_name)\n send(params_method_name)\n else\n raise ActiveModel::ForbiddenAttributesError, \"Please, define the '#{params_method_name}' method in #{self.class.name}\"\n end\n end\n end", "def url_params\n params.require(:url).permit(:short_url, :original_url, :clicks, :ip_addresses)\n end", "def user_params\n params.require(:user).permit(:uri, :username, :password, :realname, :email, :publicvisible)\n end", "def model_params\n\t\tparams.require(:manager).permit(\n\t :user_name,\n :password,\n :email,\n \t\t\t)\n\tend", "def article_params_whitelist\n params.require(:article).permit(:title, :description, category_ids: [])\n end", "def college_whitelist_params\n params.require(:college_whitelist).permit(:status)\n end", "def active_code_params\n params[:active_code].permit\n end", "def filtering_params\n params.permit(:email)\n end", "def valid_params(params)\n params.permit(:user_id, :photo_id, :originX, :originY, :width, :height)\n end", "def ip_address_params\n\t\t\tparams.require(:ip_address).permit!\n end", "def pull_request_params\n whitelist = [\n :url,\n :id,\n :html_url,\n :diff_url,\n :patch_url,\n :issue_url,\n :number,\n :state,\n :locked,\n :title\n ]\n params.require(:pull_request).permit(whitelist)\n end", "def reserved_params\n params.require(:reserved).permit(:name, :email, :pax, :address, :KTP, :title)\n end", "def post_params\n if current_user.admin? \n params.permit(:title, :body, :city, :country, :gps_location, :privacy, :visible, :latitude, :longitude, images: [], files: [])\n else \n params.permit(:title, :body, :city, :country, :gps_location, :privacy,:latitude, :longitude, images: [], files: [])\n end \n end", "def list_params\n params.permit(:name)\n end", "def filter_parameters; end", "def filter_parameters; end", "def vineyard_params\n params.permit(:vineyard_name, :email, :website_url, :phone, :address, :city, :region, :postcode, :country, :specialty, :description, :pet_friendly, :holiday, :tours, :events, :family_friendly, :cover_image, :image_one, :image_two, :image_three, :image_four, :user_id, :base64)\n end", "def available_activity_params\n # params.require(:available_activity).permit(:type,:geometry,:properties)\n whitelisted = ActionController::Parameters.new({\n type: params.require(:available_activity)[:type],\n geometry: params.require(:available_activity)[:geometry].try(:permit!).to_h,\n properties: params.require(:available_activity)[:properties].try(:permit!).to_h\n }).try(:permit!)\n end", "def user_params\n params.permit(:name, :username, :email, :password, :img_url, :bg_url, :coinbank)\n end", "def user_params_pub\n\t \tparams[:user].permit(:hruid)\n\t end", "def user_params\n params.permit(:id, :email, :password, :nickname, :status, :avatar, :flat_picture, :flatsharing_id, :member,\n :user, :color, :solde)\n end", "def validate_search_inputs\n @whitelisted = params.fetch(:user, nil)\n if @whitelisted.blank?\n render_error(400, \"#{I18n.t('general_error.params_missing_key')}\": [I18n.t('general_error.params_missing_value', model: \"review\")])\n return\n else\n @whitelisted = @whitelisted.permit(:name, :uen, :description)\n end\n end", "def param_whitelist\n [\n :title,\n :description,\n :organization,\n :team_id,\n :started_at,\n :finished_at,\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n end", "def url_whitelist; end", "def admin_social_network_params\n params.require(:social_network).permit!\n end", "def filter_params\n params.require(:filters).permit(:letters)\n end", "def origin_params\n params.permit(:country, :state, :city, :postal_code, :address, :description)\n end", "def valid_params(params)\n params.permit(:login, :first_name, :last_name, \n :password, :password_confirmation)\n end", "def sensitive_params=(params)\n @sensitive_params = params\n end", "def permit_request_params\n params.permit(:address)\n end", "def user_params\n # Ensure a user can't give themselves admin priveleges\n params.delete(:admin) if current_user.admin?\n params.require(:user).permit(:name, :email, :admin, :image)\n end", "def secure_params\n params.require(:location).permit(:name)\n end", "def strong_params\n params.require( :setting ).\n permit( :global_scan_limit, :per_user_scan_limit,\n :target_whitelist_patterns, :target_blacklist_patterns )\n end", "def question_params\n params.require(:survey_question).permit(question_whitelist)\n end", "def case_insensitive_params\n params.require(:case_insensitive).permit(:name)\n end", "def empire_master_no_match_params\n params.require(:empire_master_no_match).permit(:uid, :last_name, :list, :search_date, :double, :source)\n end", "def maintenance_request_params\n params[:maintenance_request].permit! #allow all parameters for now\n end", "def unwanted_params\n params.require(:unwanted).permit(:title, :description, :image)\n end", "def url_params\n params[:url].permit(:full)\n end", "def backend_user_params\n params.permit!\n end", "def filter_params\n\t\treturn params[:candidate].permit(:name_for_filter)\n\tend", "def speed_measurement_params\n\n #fuckit, to lazy to deal with permit crap right now\n ActionController::Parameters.permit_all_parameters = true\n\n params[:speed_measurement]\n end", "def user_params\n params.permit(:name, :age, :username, :display_photo, :password)\n end", "def get_params\r\n #params.require(:article).permit(:title, :permalink, :content, :source_site, :introtext, :type_id, :order_by, :searchable, :created_by, :edited_by, :published_by, :published_on, :user_id)\r\n params.require(:article).permit!\r\n\r\n end", "def pub_params\n params.require(:pub).permit(:name, :description, :phone, :email, :hidden, :city_id, :address)\n end", "def pass_params\n params[:pass].permit(:name, :price, :description, :colour, :events)\n end", "def droptraining_params\n params.permit(:training_id,:user_id, :utf8, :authenticity_token, :commit)\n end", "def person_params\n # params whitelist does *not* include admin, sub, remember_token\n # TBD: share this whitelist with the list used by configuration_permitted_parameters\n # TBD: should current_password be on this list? -- for now, leaving off, since it seems to work without\n # NOTE: do not include 'admin' in this list!\n params.require(:person).permit(\n :name, \n :email, \n :description,\n :password, \n :password_confirmation\n )\n end", "def parameter_params\n params.require(:parameter).permit(:name, :description, :param_code, :param_value, :active_from, :active_to)\n end" ]
[ "0.69792545", "0.6781151", "0.67419964", "0.674013", "0.6734356", "0.6591046", "0.6502396", "0.6496313", "0.6480641", "0.6477825", "0.64565", "0.6438387", "0.63791263", "0.63740575", "0.6364131", "0.63192815", "0.62991166", "0.62978333", "0.6292148", "0.6290449", "0.6290076", "0.62894756", "0.6283177", "0.6242471", "0.62382483", "0.6217549", "0.6214457", "0.6209053", "0.6193042", "0.6177802", "0.6174604", "0.61714715", "0.6161512", "0.6151757", "0.6150663", "0.61461", "0.61213595", "0.611406", "0.6106206", "0.6105114", "0.6089039", "0.6081015", "0.6071004", "0.60620916", "0.6019971", "0.601788", "0.6011056", "0.6010898", "0.6005122", "0.6005122", "0.6001556", "0.6001049", "0.59943926", "0.5992201", "0.59909594", "0.5990628", "0.5980841", "0.59669393", "0.59589154", "0.5958826", "0.5957911", "0.5957385", "0.5953072", "0.59526145", "0.5943361", "0.59386164", "0.59375334", "0.59375334", "0.5933856", "0.59292704", "0.59254247", "0.5924164", "0.59167904", "0.59088355", "0.5907542", "0.59064597", "0.5906243", "0.5898226", "0.589687", "0.5896091", "0.5894501", "0.5894289", "0.5891739", "0.58860534", "0.5882406", "0.587974", "0.58738774", "0.5869024", "0.58679986", "0.5867561", "0.5865932", "0.5864461", "0.58639693", "0.58617616", "0.5861436", "0.5860451", "0.58602303", "0.5854586", "0.58537364", "0.5850427", "0.5850199" ]
0.0
-1
Sends the notice unless it is one of the default ignored exceptions.
def notify_or_ignore(exception, context = {}) notify(exception, context) unless ignored?(exception) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def notify_or_ignore(exception, opts = {})\n notice = build_notice_for(exception, opts)\n send_notice(notice) unless notice.ignore?\n end", "def notify_or_ignore(exception, opts = {})\n notice = build_notice_for(exception, opts)\n send_notice(notice) unless notice.ignore?\n end", "def notice!\n self.severity = :NOTICE\n end", "def notice; end", "def rescue_action_in_public_with_errornot(exception)\n unless errornot_ignore_user_agent?\n ErrornotNotifier.notify_or_ignore(exception, errornot_request_data)\n end\n rescue_action_in_public_without_errornot(exception)\n end", "def send_exception_to_honeybadger_unless_filtered(exception_info)\n if exception_info.send_to_honeybadger?\n send_exception_to_honeybadger(exception_info)\n else\n log_info(\"Filtered exception using '#{exception_info.exception_description.filter_name}'; not sending notification to Honeybadger\")\n :skipped\n end\n end", "def notice?\n severity == :NOTICE\n end", "def notice\n #\n end", "def notify(exception, options = {})\n send_notice(build_notice_for(exception, options))\n end", "def without_notices\n # execute the block with NOTICE messages disabled\n begin\n execute('SET client_min_messages = warning')\n yield\n ensure\n execute('RESET client_min_messages')\n end\n end", "def notify(exception, opts = {})\n send_notice(build_notice_for(exception, opts))\n end", "def notify_or_raise(ex)\n if should_raise?\n fail ex\n else\n notify_or_ignore_with_options(ex)\n end\n end", "def _notice(msg, type = :notice)\n if type == :error\n add_error(msg)\n else\n add_msg(\"* #{msg}\", type)\n end\nend", "def no_rescue(*exceptions)\n @options[:rescue] =\n if !exceptions.nil? and !exceptions.empty?\n ->(e) { !e.class.in?(exceptions) }\n else\n NONE\n end\n end", "def ignore &block\n begin; block.call; rescue; end\n end", "def notice=(message); end", "def notice=(message); end", "def notice(target, message)\n send_data(\"NOTICE #{target} :#{message}\")\n end", "def ignore!\n\t\t\t\tSignal.trap(@name, \"IGNORE\")\n\t\t\tend", "def notice_signal\n @selfpipe[:writer].write_nonblock( '.' )\n rescue Errno::EAGAIN\n # Ignore writes that would block\n rescue Errno::EINTR\n # Retry if another signal arrived while writing\n retry\n end", "def send_notice(message, *recipients)\n normalize_message(message) { |message|\n recipients.each { |recipient|\n send_raw(NOTICE, recipient, message)\n }\n }\n end", "def notice(receiver, message)\n raw Helpers.splitted(\"NOTICE #{receiver}\", message.to_s)\n end", "def ignore!\n\t\t\t\tSignal.trap(@name, :IGNORE)\n\t\t\tend", "def test_notice_error_returns_nil\n begin\n raise 'WTF'\n rescue => e\n assert_nil ::NewRelic::Agent.notice_error(e)\n end\n end", "def catch_simple\n begin\n yield\n rescue => e\n Rails.logger.info e.message\n end\n end", "def notice(t, m)\n @socket << \"NOTICE #{t} :#{m}\"\n end", "def send_notice(text)\n client.api.send_notice(id, text)\n end", "def notice(to_nick, text)\n split_send(\"NOTICE #{to_nick} :\", text)\n end", "def test_does_not_consider_unknown_errors\n ig = ScoutApm::ErrorService::IgnoredExceptions.new(context, [\"ThisDoesNotExist\", \"IgnoredExceptionsTest::FakeError\"])\n assert ig.ignored?(FakeError.new(\"ignore this one\"))\n end", "def wont_throw(sym, msg=nil)\n ThrowAssay.refute!(sym, :message=>msg, :backtrace=>caller, &self)\n end", "def skip_this_when(enabled:, expected_exception:)\n yield\n rescue expected_exception => e\n e.tap do\n skip e.message if enabled && e.is_a?(expected_exception)\n end\n end", "def notice(msg) log(5, msg); end", "def send_overdue_notice(notice)\n p \"Reminder #{get_name} #{notice}\"\n end", "def send_overdue_notice(notice)\n p \"Reminder #{get_name} #{notice}\"\n end", "def notify_airbrake(exception)\n # tested with airbrake 4.3.5 and 5.0.5\n if defined?(Airbrake)\n if(Airbrake.respond_to?(:notify_or_ignore))\n env['airbrake.error_id'] = Airbrake.notify_or_ignore(exception, airbrake_request_data) # V4\n else\n # V5\n notice = Airbrake::Rack::NoticeBuilder.new(env).build_notice(exception)\n env['airbrake.error_id'] = Airbrake.notify(notice)\n end\n end\n end", "def notice_error(e, options={})\n state = TingYun::Agent::TransactionState.tl_get\n txn = state.current_transaction\n if txn\n txn.exceptions.notice_error(e, options)\n state.transaction_sample_builder.trace.add_errors_to_current_node(state,e) rescue nil\n elsif TingYun::Agent.instance\n TingYun::Agent.instance.error_collector.notice_error(e, options)\n end\n end", "def notice(msg)\n @notices << msg\n blah @notices.last\n end", "def prohibit_all(except: [])\n set_all :prohibited, except: { exceptions: except, status: :allowed }\n end", "def send_to_honeybadger(notice)\n @notices << notice\n end", "def rescue_action_in_public_with_notifier(exception) #:doc:\n response_code = response_code_for_rescue(exception)\n status = interpret_status(response_code)[0,3]\n respond_to do |format|\n # Personalize rescue rules for backend\n if controller_path =~ /^backend\\//\n # Usually when we made a post we submit the form\n # to a target iframe, so we need to respond to the parent.\n if request.post?\n responds_to_parent do\n render :update do |page|\n page.unmask\n page.ext_alert I18n.t(\"lipsiadmin.exceptions.#{status}.title\"), I18n.t(\"lipsiadmin.exceptions.#{status}.description\")\n end\n end\n else\n # We can't use status, because Backend.app.load don't permit load 500, 404 pages\n format.html { render :template => \"/exceptions/#{status}\" }\n format.js do\n render :update do |page|\n page.unmask\n page.ext_alert I18n.t(\"lipsiadmin.exceptions.#{status}.title\"), I18n.t(\"lipsiadmin.exceptions.#{status}.description\")\n end\n end\n format.all { render :nothing => true, :status => status }\n end\n else\n format.html { render :template => \"/exceptions/#{status}\", :status => status }\n format.all { render :nothing => true, :status => status }\n end\n end\n rescue Exception => e\n logger.error e.message\n erase_results\n rescue_action_in_public_without_notifier(exception)\n ensure\n if response_code != :not_found && Lipsiadmin::Mailer::ExceptionNotifier.send_mail\n Lipsiadmin::Mailer::ExceptionNotifier.deliver_exception(exception, self, request)\n end\n end", "def ignored?(exception)\n configuration.ignored?(exception)\n end", "def remote_notice(sender, body)\n if sender == 'server'\n sender = ''\n else\n sender = \"#{sender} \"\n end\n _notice \"#{sender}#{body}\", :notice\nend", "def unknown(message)\n asl_log(@aslclient, @aslmsg, ASL_LEVEL_EMERG, message)\n end", "def gocdkit_warn(*message)\n unless ENV['GOCDKIT_SILENT']\n warn message\n end\n end", "def notify_exception(e)\n # ignore exception because the exception caused tuple server is down...\n Util.ignore_exception do\n write(Tuple[:exception].new(uuid, agent_type, e))\n end\n end", "def to_notice(_notification_class, *_args)\n raise \"to be implemented by including class!\"\n end", "def octokit_warn(*message)\n unless ENV['OCTOKIT_SILENT']\n warn message\n end\n end", "def send_overdue_notice(notice)\n \"#{@name} #{notice}\"\n end", "def notice\n self[:notice]\n end", "def ignore!\n @should_ignore = true\n end", "def channel_notice(chan, notice)\n @connection.send(\"NOTICE #{chan} :#{notice}\")\n end", "def catch_exceptions; end", "def ignore_raise\n yield\nrescue StandardError\n :raised\nend", "def rescue_action_in_public(exception)\n # If the error class is NOT listed in the rails_error_class hash then we get a generic 500 error:\n # OTW if the error class is listed, but has a blank code or the code is == '200' then we get a custom error layout rendered\n # OTW the error class is listed!\n verbose = self.class.exception_notifiable_verbose && respond_to?(:logger) && !logger.nil?\n logger.info(\"[RESCUE STYLE] rescue_action_in_public\") if verbose\n status_code = status_code_for_exception(exception)\n if status_code == '200'\n notify_and_render_error_template(status_code, request, exception, ExceptionNotification::Notifier.get_view_path_for_class(exception, verbose), verbose)\n else\n notify_and_render_error_template(status_code, request, exception, ExceptionNotification::Notifier.get_view_path_for_status_code(status_code, verbose), verbose)\n end\n pass_it_on(exception, ENV, request, params, session, verbose)\n end", "def bypass_rescue\n if ::Rails::VERSION::STRING >= '2.2'\n def controller.rescue_action(exception)\n raise exception\n end\n else\n def controller.rescue_action_with_handler(exception)\n raise exception\n end\n end\n end", "def regardless(&block)\n yield\nrescue\nend", "def should_not_throw\n if match.output.index(/\\n(\\S*)Exception(.*?)\\n\\S/m)\n raise Bcpm::Tests::AssertionError, \"Player should not have thrown exceptions! \" +\n \"It threw #{$1}Exception#{$2}\"\n end\n if match.chatter.index(/\\n(\\S*)Exception(.*?)\\n\\S/m)\n raise Bcpm::Tests::AssertionError, \"Player should not have thrown exceptions! \" +\n \"It threw #{$1}Exception#{$2}\"\n end\n end", "def emergency!\n self.severity = :EMERGENCY\n end", "def bypass_rescue\n @controller.extend(BypassRescue)\n end", "def handle_ignored\n if @note.ignore == true\n raise ActiveRecord::RecordNotFound\n end\n end", "def user_not_authorized(exception)\n wiki_policy = exception.policy.class.to_s.underscore\n\n flash[:alert] = \"You are not authorized to do that!\"\n redirect_to(request.referrer || root_path)\n end", "def process_action(*args)\n super\n rescue Exception => exception\n raise if RocketPants.pass_through_errors?\n # Otherwise, use the default built in handler.\n logger.error \"Exception occured: #{exception.class.name} - #{exception.message}\"\n logger.error \"Exception backtrace:\"\n exception.backtrace[0, 10].each do |backtrace_line|\n logger.error \"=> #{backtrace_line}\"\n end\n exception_notifier_callback.call(self, exception, request)\n render_error exception\n end", "def render_not_found(exception)\n # Render detailed diagnostics for unhandled exceptions rescued from\n # a controller action.\n rescue_action_locally(exception)\n end", "def expects_exception!\n @expects_exception = true\n end", "def ignore_if(&block)\n @@ignores << block\n end", "def supporting_method\n logger.warn 'This does nothing'\n end", "def rescue_action(e); raise e; end", "def on_uncaught_exception(&block)\n @channel.on_uncaught_exception(&block)\n end", "def ignore(ignore_msg = nil)\n capture_result(Assert::Result::Ignore, ignore_msg)\n end", "def rescue_with_handler(exception)\n to_return = super\n if to_return\n verbose = self.class.exception_notifiable_verbose && respond_to?(:logger) && !logger.nil?\n logger.info(\"[RESCUE STYLE] rescue_with_handler\") if verbose\n data = get_exception_data\n status_code = status_code_for_exception(exception)\n #We only send email if it has been configured in environment\n send_email = should_email_on_exception?(exception, status_code, verbose)\n #We only send web hooks if they've been configured in environment\n send_web_hooks = should_web_hook_on_exception?(exception, status_code, verbose)\n the_blamed = ExceptionNotification::Notifier.config[:git_repo_path].nil? ? nil : lay_blame(exception)\n rejected_sections = %w(request session)\n # Debugging output\n verbose_output(exception, status_code, \"rescued by handler\", send_email, send_web_hooks, nil, the_blamed, rejected_sections) if verbose\n # Send the exception notification email\n perform_exception_notify_mailing(exception, data, nil, the_blamed, verbose, rejected_sections) if send_email\n # Send Web Hook requests\n ExceptionNotification::HooksNotifier.deliver_exception_to_web_hooks(ExceptionNotification::Notifier.config, exception, self, request, data, the_blamed) if send_web_hooks\n pass_it_on(exception, ENV, verbose)\n end\n to_return\n end", "def skipping_setup\n @fast_skipped = false\n bt = nil\n begin\n if self[:fast_pending]\n bt, message = self[:fast_pending]\n pend(message)\n elsif self[:fast_omit]\n bt, message, cond_block = self[:fast_omit]\n omit_if(cond_block.call(self), message)\n end\n rescue PendedError, OmittedError => e\n @fast_skipped = true\n # We reset the backtrace to point to the line where the pend/omit call was\n # originally made.\n e.set_backtrace(bt) if bt\n raise e\n end\n end", "def notify(notice)\n if @config.use_dogapi?\n notify_dogapi_event(notice) if @config.send_event\n notify_dogapi_metric(notice) if @config.send_metric\n end\n if @config.use_statsd?\n notify_statsd_event(notice) if @config.send_event\n notify_statsd_metric(notice) if @config.send_metric\n end\n end", "def exceptions\n end", "def warned; end", "def do_NOTICE(u, msg)\n ircsend(\":#{@server.sid}AAAAAA NOTICE #{u.is_a?(User) ? u.uid : u} :#{msg}\", @conn)\n end", "def wont_send(send_array, msg=nil)\n ExecutionAssay.refute!(:message=>msg, :backtrace=>caller) do\n self.__send__(*send_array)\n end\n end", "def send_message\n send unless SuppressionList.include?(receiver)\n end", "def refute_exception\n yield\n rescue StandardError => e\n flunk e.message\n end", "def user_not_authorized(exception)\n policy_name = exception.policy.class.to_s.underscore\n\n flash[:error] = t \"#{policy_name}.#{exception.query}\", scope: \"pundit\", default: :default\n redirect_to(request.referrer || root_path)\n end", "def on_notice(&block)\n @notice_handler = block\n end", "def rescue_action(e) raise e end", "def rescue_action(e) raise e end", "def rescue_action(e) raise e end", "def rescue_action(e) raise e end", "def refute_nothing_raised(msg=nil, &block)\n RescueAssay.assert!(Exception, :message=>msg, :backtrace=>caller, &block)\n end", "def user_not_authorized(exception)\n respond_to do |format|\n if @current_user\n flash.now[:warning] = exception_message(exception)\n format.any(:json, :js) { render json: { error: t('pundit.default'), status: :not_authorized } }\n format.html { render 'users/sessions/access_denied', layout: @layout ? false : current_layout }\n else\n format.any(:html, :json, :js) do\n flash[:warning] = exception.to_s\n redirect_to(root_path)\n end\n end\n end\n end", "def no_500_error!\n return true if browser.all(:css, 'head title', :text => 'Internal Server Error').empty?\n sleep 30 if ENV['GIMME_CRAP']\n raise Unexpected500, browser.body\n end", "def bite\n raise @_error if @_error\n before\n response(handle_call)\n rescue Exception => e\n code, error = 500, e\n if Lunetas::Error::BaseError === e\n code = e.code\n elsif development?\n error = \"Error: #{e.message}\\nBacktrace: #{e.backtrace.join(\"\\n\")}\"\n end\n response(error, code)\n end", "def do_ignore\n puts \"IGNORING <#{@line}> FOR NOW\"\n end", "def miss_reason; end", "def propagate\n @propagate_exceptions = true\n end", "def assert_nothing_raised(msg=nil, &block)\n RescueAssay.refute!(Exception, :message=>msg, :backtrace=>caller, &block)\n end", "def unavailable! redirect = root_path\n raise Exceptional::NotAllowed.new(\"Sorry, I was unable to perform the action you requested!\")\n end", "def safe(quiet = false)\n begin\n yield\n rescue NoFreeConnectionError => e\n disable!\n custom_data['disabled_reason'] = :no_free_connections\n custom_data['disabled_description'] = 'Cannot find free connection or create new one'\n logger.error \"#{e}\\n#{e.backtrace}\" \n rescue => e\n unless quiet\n disable!\n custom_data['disabled_reason'] = :uncaught_error\n custom_data['disabled_description'] = \"Uncaught error #{e}. See logs for details\"\n end\n logger.error \"Unspecified error #{e}\\n#{e.backtrace}\" \n end\n end", "def user_not_authorized(exception)\n if exception.is_a? String\n flash[:error] = t \"#{exception}\", scope: \"pundit\", default: :default\n else\n policy_name = exception.policy.class.to_s.underscore\n flash[:error] = t \"#{policy_name}.#{exception.query}\", scope: \"pundit\", default: :default\n end\n\n redirect_to(request.referrer || root_path)\n end", "def fatal; end", "def user_not_authorized(exception)\n policy_name = exception.policy.class.to_s.underscore\n\n flash[:error] = t \"#{policy_name}.#{exception.query}\", scope: \"pundit\", default: :default\n redirect_to root_path\n end", "def rescue_action_in_public(exception) #:doc:\n render_optional_error_file response_code_for_rescue(exception)\n end", "def rescue_action_in_public(exception) #:doc:\n render_optional_error_file response_code_for_rescue(exception)\n end", "def rescue_action_in_public(exception)\n case exception\n when ActiveRecord::RecordNotFound, ActionController::UnknownAction\n render(:file => \"#{Rails.root}/public/404.html\", :status => \"404 Not Found\")\n else\n render(:file => \"#{Rails.root}/public/500.html\", :status => \"500 Error\")\n SystemMailer.deliver_exception_notification(self, request, exception)\n end\n end" ]
[ "0.73556817", "0.73556817", "0.6492782", "0.61551136", "0.6142922", "0.6117215", "0.61099255", "0.6016112", "0.59590274", "0.59373856", "0.58329886", "0.57911223", "0.574737", "0.5742679", "0.5699931", "0.5695802", "0.5695802", "0.5661375", "0.5633226", "0.5614445", "0.5594372", "0.5593547", "0.557108", "0.557103", "0.55402017", "0.55362976", "0.552661", "0.549727", "0.5474369", "0.5465442", "0.5424276", "0.5421926", "0.5421377", "0.5421377", "0.5408485", "0.540354", "0.5401031", "0.539611", "0.5395976", "0.5393941", "0.53860354", "0.53596884", "0.53513175", "0.53435785", "0.53263867", "0.5318683", "0.5316671", "0.52935827", "0.5262539", "0.5242354", "0.5238856", "0.5235595", "0.5225105", "0.5204906", "0.51987374", "0.51954126", "0.51742744", "0.5169808", "0.5141012", "0.513546", "0.5135068", "0.51171595", "0.5115649", "0.5107662", "0.51045096", "0.5101553", "0.5093802", "0.50837064", "0.5083663", "0.5082335", "0.5077683", "0.5075004", "0.50724524", "0.5067136", "0.50632024", "0.50495696", "0.5048057", "0.50396556", "0.5037667", "0.50370693", "0.50270873", "0.50270873", "0.50270873", "0.50270873", "0.50209326", "0.5013226", "0.50122714", "0.50119287", "0.500871", "0.5007068", "0.50067425", "0.50066566", "0.50014037", "0.49990976", "0.4997784", "0.49975973", "0.49944526", "0.49896443", "0.49896443", "0.49750876" ]
0.65031064
2
Print a message at the top of the applciation's logs to say we're ready.
def report_for_duty! application = reporter.announce if application info("Configured correctly and ready to handle exceptions for '#{application}'") else error("Failed to report for duty, your application failed to authenticate correctly with stdin.crashlog.io") end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def report_ready\n self.logger.info \"Opbeat #{VERSION} ready to catch errors\"\n end", "def report_ready\n write_verbose_log(\"Notifier #{VERSION} ready to catch errors\", :info)\n end", "def report_ready\n self.logger.info \"Raven #{VERSION} ready to catch errors\"\n end", "def wait_until_ready\n # this method may be left unimplemented if that is applicable log\n end", "def notify_ready\n if state == :smoking\n puts \"#{name} says: I'm good on smokes, thanks\"\n else\n transition :procuring\n end\n end", "def ready\n Souffle::Log.info \"#{@node.log_prefix} Is ready for provisioning...\"\n end", "def notify_ready\n notify(\"READY=1\")\n end", "def send_ready_notification\n\n end", "def log_start\n @notifier.log \"#{identifier}: Waiting for #{describe_query}\".strip, level: :collect\n end", "def conclusion\n separator\n puts \"#{@app_name} successfully deployed! URL: http://#{app_name}\"\n puts \"\\n\\n\"\n end", "def start_message\n message = @matches ? @matches.join(' ') : \"all scenario's\"\n UI.info(\"Alfred: Serving #{message}\", :empty_line_before => true)\n end", "def log_stuff\r\n log.info(\"TestLogger is here to log stuff.\")\r\n log.warn(\"TestLogger is finishged logging. Be careful.\")\r\n end", "def setup_complete\n puts green(\"Setup complete\")\n end", "def print_welcome_message\n puts \"Welcome to Movie Bookings\"\n puts \"------------------------\"\n end", "def on_app_initializing(_event)\n info \"Initializing Karafka framework #{::Process.pid}\"\n end", "def on_is_standalone\n STDOUT << \"on_is_standalone\\n\"\n STDOUT.flush\n end", "def log_startup\n log_environment\n log_dispatcher\n log_app_name\n end", "def welcome_message\n puts \"Welcome to Everything but the Kitchen Sink!\"\n end", "def logs message\n puts \"#{Time.now}: #{message}\"\n end", "def log!\n Logger.message \"#{ self.class } started notifying about the process.\"\n end", "def on_startup\n\t\t# Check for modules that failed to load\n\t\tif (framework.modules.failed.length > 0)\n\t\t\tprint_error(\"WARNING! The following modules could not be loaded!\")\n\t\t\tframework.modules.failed.each_pair do |file, err|\n\t\t\t\tprint_error(\"\\t#{file}: #{err}\")\n\t\t\tend\n\t\tend\n\t\tframework.events.on_ui_start(Msf::Framework::Revision)\n\n\t\t# Build the banner message\n\t\trun_single(\"banner\")\n\t\tself.on_command_proc = Proc.new { |command| framework.events.on_ui_command(command) }\n\tend", "def on_app_initializing(_event)\n info 'Initializing Karafka framework'\n end", "def print_init_status\n\t\tset_status(@ctx_init_id, \"by #{RsConfig::AUTHOR_FULLNAME} #{RsConfig::AUTHOR_PROMO} - (c) #{Time.now.year}\")\n\tend", "def start_messages\n Server.log.info(\"* Worker count: #{@configs.count}\")\n end", "def log!\n Logger.info \"#{ notifier_name } started notifying about the process.\"\n end", "def empty_log()\n error = TTY::Box.warn(\"Sorry log is empty. Return to main menu to add a book to this log.\")\n puts error\nend", "def print message=nil\n log(:info, message) unless Qcmd.silent?\n end", "def autoflush_log; end", "def autoflush_log; end", "def showInformation()\n print(\"Starting up the scraper for the RAND Terrorism Incident Database. The flashing numbers that will appear represent written incidents. It will take a few moments for the initial program to load... \\n\");\nend", "def welcome\n __log_activity\n __debug_route\n end", "def startup_log\n return if ENV['SPLITCLIENT_ENV'] == 'test'\n\n @logger.info(\"Loaded Ruby SDK v#{VERSION} in the #{@mode} mode\")\n @logger.info(\"Loaded cache class: #{@cache_adapter.class}\")\n end", "def success\n @log[Date.today.iso8601] = 0\n\n puts \"Well done, #{@username}! You haven't smoked for #{@log.count { |value| value = 0 }} days! \"\n end", "def print\n GoHiring::Config[\"messages\"][\"hello\"] % settings\n end", "def logme\n\t\tlogger.debug \"============Main ApplicationController get calls at #{Time.now}\"\n\tend", "def print_status(msg)\n puts \"\" if verbose_mode?\n puts \"#{color('----->', 32)} #{msg}\"\n end", "def on_ready\n end", "def status_notice\n today = Date.current\n status_notice = \"#{self.name}\"\n case\n when today > start_on && today < finished_on\n status_notice << \" is RUNNING NOW.\"\n when today == start_on\n status_notice << \" starts TODAY.\"\n when today < start_on\n status_notice << \" starts on #{start_on.strftime(\"%A, %d %B, %Y\")}.\"\n when today > finished_on\n status_notice << \" is completed.\"\n when start_on.nil?\n status_notice << \" has no start date.\"\n else\n status_notice << '.'\n end\n end", "def ready_status\n 'ready' if notification\n end", "def post_init\n # puts 'Syslogger initialized'\n end", "def on_hello_message( info, * )\n\t\tself.log.info \"Connected. Waiting for an assembly to build.\"\n\t\tsuper\n\tend", "def print_status(msg='')\n end", "def print_status(msg)\n\t\tself.module.print_status(msg)\n\tend", "def ready; end", "def ready; end", "def setup_log\r\n logfile = File.open(\"/home/englandk/rails_apps/reminders/log/auto_enquiry.log\", 'a')\r\n \r\n @log = AuditLogger.new(logfile)\r\n @log.level = Logger::INFO\r\n @log.debug \"Started auto response email run\"\r\n end", "def log(msg)\n puts(\"#{Time.now}> #{msg}\")\n end", "def welcome_message\n message = \"Welcome to Locavore Kitchen!\"\n end", "def start_message\n ActiveHook.log.info(\"* Worker #{@id} started, pid: #{@pid}\")\n end", "def log msg\n puts \"U:Service: #{msg}\"\n end", "def preInit\n # Pre-Startup things go here. THIS RUNS BEFORE ANY SCREEN DRAWING OR ANYTHING!!!\n # Determines Debug mode\n # Change CONST_VERBOSE in globalVars.rb to switch from verbose to nonverbose.\n DEBUG.new(CONST_VERBOSE)\n\n #Syntax for console out function is <message> <message severity from 0-4> <should it be displayed only in verbose mode?>\n DEBUG.cout(\"Debugging has been loaded, initial cout here.\", 0, false)\n\n\n #Signal end of Preinitialization code\n DEBUG.cout(\"PreInit Finished!\", 0, false)\nend", "def log(msg)\n puts msg unless ENV['TEST']\n end", "def info(msg) log(6, msg); end", "def log(msg)\n Serv::Log.info(Time.now.to_s+': '+msg+\"\\n\")\n end", "def printComeBackMessage\n puts \"Then come back and run the program again.\"\n end", "def putsNow(msg)\n\t\tprintState = @out.sync\n\t\[email protected] = true\n\t\[email protected](msg)\n\t\[email protected] = printState\n\tend", "def wait_for_ready\n sleep 0.1 until ready?\n end", "def log_action msg\n\t\t\t\tputs '=> ' + msg\n\t\t\tend", "def log(msg)\n puts msg\n $stdout.flush\n end", "def wait\n @log.clear\n end", "def print_status(msg = '')\n $stderr.puts \"[*] #{msg}\"\n end", "def alert(msg) log(1, msg); end", "def log(msg)\n if ENV['LAUNCHY_DEBUG'] == 'true' then\n $stderr.puts \"LAUNCHY_DEBUG: #{msg}\"\n end\n end", "def push_notify_ready\n droid_destinations = patient.devices.map do |device|\n device.token if device.platform == 'android' && device.enabled\n end\n data = {:type => \"READY\", :message =>\n \"Your appointment with #{clinic.name} at #{delayed_date_time_ampm} is ready\" }\n Thread.new do\n GCM.send_notification(droid_destinations, data) unless droid_destinations.empty?\n end\n end", "def notify_ready\n # Make sure to raise the event\n raise_event(ReadyEvent.new(self))\n LOGGER.good 'Ready'\n\n # Tell the run method that everything was successful\n @ws_success = true\n end", "def print_good(msg='')\n end", "def start_message\n Server.log.info(\"* Worker #{@id} started | pid: #{@pid} | namespace: #{@config.namespace}\")\n end", "def printUsageMessage()\n\n print \"Check the last NAMD log file in a directory to make sure the run either completed\\n\"\n print \"successfully or died after all necessary restart files were written. Then restart\\n\"\n print \"the run or print an error message.\\n\"\n print \" autoNamdRun.rb <configFile>\\n\"\n print \" <configFile> - file listing all NAMD jobs to check\\n\"\n\n return\nend", "def event_startup()\n @var[:start_time] = Time.now\n\n # Log the startup\n dispatch :log, \"Server startup (#{@port})\"\nend", "def display_welcome\n\t\tputs \"\\t**************************\"\n\t\tputs \"\\t*** Welcome To Hangman ***\"\n\t\tputs \"\\t**************************\"\n\tend", "def INFO(msg)\n if DEBUG\n puts \"INFO: \" + msg\n end\nend", "def footer\n puts\n puts \"Need an expert to analyze your application?\"\n puts \"Mail to #{link('[email protected]')} or visit us at #{link('http://railsdoctors.com')}.\"\n line(:green)\n puts \"Thanks for using #{colorize('request-log-analyzer', :white, :bold)}!\"\n end", "def generate_log(message, start = false)\n if start\n puts \"#{message}\"\n else\n puts \"\\t#{message}\"\n end\nend", "def log_complete\n @notifier.log \"#{identifier}: Done\", level: :collect\n end", "def log(msg)\n puts msg\n $stdout.flush\n end", "def intro\n puts \"\\n\\n#{\"Welcome to the Amiibo Selection!\".colorize(:cyan)}\\n\\n\"\n puts \"Gathering all Amiibo Results...one moment...\"\n puts \"How do you pronouce Amiibo? Hyphenate: A-me-bo\\n\\n\"\n puts \"--------------------------------------------------------------------------------\\n\\n\"\n sleep(1)\n end", "def status(msg = '')\n\t\toutput.print_status(msg)\n\tend", "def start\n \tputs \"\"\n \tputs \"Here are the current Maximum Fun Comedy podcasts:\"\n \tsleep(1)\n \tputs \"\"\n \tprint_podcast_list \n puts \"\"\n more_info\n end", "def ready?\n true\n end", "def log_message_for(action)\n case action\n when :up then \"Bringing up #{description}\"\n when :halt then \"Halting #{description}\"\n when :destroy then \"Destroying #{description}\"\n when :reload then \"Reloading #{description}\"\n when :suspend then \"Suspending #{description}\"\n when :resume then \"Resuming #{description}\"\n else nil\n end\n end", "def running_logs()\n if @container.nil?\n 'not yet'\n else\n wait_for_start_up\n @container.logs_container\n end\n end", "def log(message)\n puts \"[ dev/rails ] #{message}\"\nend", "def info_and_log(message)\n @ui.info(message)\n @logger.info(message)\n end", "def log(msg)\n puts(msg) if @info_values['enable_debug_logging'] == \"Yes\"\n end", "def on_app_running(_event)\n info \"Running in #{RUBY_DESCRIPTION}\"\n info \"Running Karafka #{Karafka::VERSION} server\"\n\n return if Karafka.pro?\n\n info 'See LICENSE and the LGPL-3.0 for licensing details.'\n end", "def logging\n @@logging ||= lambda { |msg| puts(\"#{Time.now} :minion: #{msg}\") }\n end", "def print(msg)\n info(msg)\n end", "def info_msg(msg)\n @log.puts(msg)\n puts msg\nend", "def up\n send_message('APP_UP')\n end", "def step_status(msg = '')\n\t\toutput.print_status(\"#{pos}: #{msg}\") if (msg and msg.length > 0)\n\tend", "def start_petrol_pump\n puts \"Starting petrol pump\"\n end", "def ready?\n # In test, we're always ready :-)\n return true if ENV['INSTANA_GEM_TEST']\n\n @state == :announced\n end", "def create_status_bar\n status_bar.show_message(tr(\"Ready\"))\n end", "def show_log!\n @runopts[:show_log] = true\n end", "def ready?\n\t\t$stderr.puts \"#{@name}: ready? not overridden!\"\n\tend", "def start\n msg = \"guard-self_test is running...\"\n UI.info msg\n Notifier.notify(msg)\n end", "def _ready?\n true\n end", "def welcome_message\n\t\tputs \"Welcome/Bienvenue \" + self.name\n\tend", "def log(message)\n puts \"Worker ##{@id}: #{message}\" unless ENV['RACK_ENV'] == 'test'\n end", "def welcome_message\n puts \"\\n\"\n puts \"Welcome to Rock Paper Scissors\"\n end", "def send_ready_signal()\n do_send(Erlang::Tuple.new([Erlang::Atom.new('ready')]))\n end" ]
[ "0.6731967", "0.6669439", "0.64619195", "0.63470685", "0.6316203", "0.6254288", "0.62529546", "0.6133716", "0.61285174", "0.6105979", "0.6071732", "0.6045285", "0.6038749", "0.6029463", "0.59906834", "0.5976198", "0.5903672", "0.5876011", "0.5875308", "0.5844501", "0.58440995", "0.58303404", "0.5829493", "0.5818939", "0.581046", "0.5739771", "0.5727519", "0.5719011", "0.5719011", "0.5708064", "0.5697238", "0.56729764", "0.56704336", "0.56525886", "0.56363136", "0.5635108", "0.5620706", "0.56144893", "0.557863", "0.5540718", "0.5533842", "0.5522925", "0.549386", "0.54768896", "0.54768896", "0.54758036", "0.5473209", "0.5463494", "0.5463093", "0.54622984", "0.54552954", "0.54516447", "0.5449338", "0.5435902", "0.54335624", "0.54237974", "0.54223466", "0.5421193", "0.5418446", "0.54089767", "0.54086673", "0.5404279", "0.5401249", "0.540059", "0.53954524", "0.53910714", "0.5380044", "0.53737557", "0.53706646", "0.53664833", "0.53662515", "0.53645", "0.53591055", "0.5357234", "0.53555286", "0.53545314", "0.5350495", "0.53422326", "0.53406984", "0.5339761", "0.53292346", "0.5318837", "0.53123665", "0.53123164", "0.5306453", "0.53029025", "0.5298952", "0.52978057", "0.5295466", "0.5293586", "0.52924895", "0.5289754", "0.5285776", "0.52806073", "0.5278224", "0.52719796", "0.527085", "0.52678275", "0.52521616", "0.52447766", "0.52442306" ]
0.0
-1
Configure the gem to send notifications, at the very least an api_key is required.
def configure(announce = false, &block) if block_given? yield(configuration) if live? self.reporter = CrashLog::Reporter.new(configuration) if configuration.valid? if announce.eql?(true) report_for_duty! else debug("Configuration updated successfully") end elsif !configuration.invalid_keys.include?(:api_key) error("Not configured correctly. Missing the following keys: #{configuration.invalid_keys.join(', ')}") end end end configuration end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def initialize(api_key: nil)\r\n Configuration.api_key = api_key\r\n end", "def initialize(x_api_token: nil, x_api_email: nil)\r\n Configuration.x_api_token = x_api_token\r\n Configuration.x_api_email = x_api_email\r\n end", "def initialize(api_key, params = {})\n @filters = []\n @api_key = api_key\n\n notify_host = URI.parse(params[:notify_host] || DEFAULT_NOTIFY_HOST)\n @transport = params.delete :transport\n if @transport and not params[:notify_host]\n notify_host.scheme = 'https' if @transport.use_ssl?\n notify_host.host = @transport.address\n notify_host.port = @transport.port\n end\n\n @error_url = URI.parse(params.delete(:error_url) || \"#{notify_host}/notifier_api/v2/notices\")\n @deploy_url = URI.parse(params.delete(:deploy_url) || \"#{notify_host}/deploys.txt\")\n\n validate!\n end", "def service_notifications_api_key\n raise 'define in subclass'\n end", "def notify(event, message, *args)\n api_key = args.first.is_a?(String) || args.first.is_a?(Array) ? args.shift : self.api_key\n\n raise ConfigurationError, \"You must provide an API key to send notifications\" if api_key.nil?\n raise ConfigurationError, \"You must provide an application name to send notifications\" if application.nil?\n\n if args.first.is_a?(Fixnum)\n options = { :priority => args.shift, :delayed => args.shift || Prowler.delayed }\n else\n options = args.last.is_a?(Hash) ? args.pop : {}\n options = { :priority => Prowler::Priority::NORMAL, :delayed => Prowler.delayed }.merge(options)\n end\n\n options.merge!(\n :application => application, :providerkey => provider_key,\n :apikey => api_key, :event => event, :description => message\n )\n\n if options.delete(:delayed)\n enqueue_delayed_job(options)\n else\n perform(:add, options, :post, Success)\n end\n end", "def initialize(*args)\n @send_notifications = true\n\n if args.empty?\n CONFIG_ATTRS.each{ |attr| send(\"#{attr}=\".to_sym, Prowler.send(attr)) }\n elsif args.first.is_a?(Hash)\n CONFIG_ATTRS.each do |attr|\n send(\"#{attr}=\".to_sym, args[0][attr] || Prowler.send(attr))\n end\n else\n @service_url = Prowler.service_url\n @api_key, @application, @provider_key = args[0], args[1], args[2]\n end\n end", "def set_api_key(api_key)\n @api_key = api_key\n end", "def initialize(api_key)\n @sg = SendGrid::API.new(api_key: api_key)\n end", "def config(api_id, api_key, options={})\n @api_id = api_id\n @api_key = api_key\n @api_endpoint = URI.parse(options.delete(:url))\n @api_options = options\n end", "def config(api_id, api_key, options={})\n @api_id = api_id\n @api_key = api_key\n @api_endpoint = URI.parse(options.delete(:url))\n @api_options = options\n end", "def configure\n reconfigure_notifier\n end", "def configure\n reconfigure_notifier\n end", "def initialize(api_key=nil)\n @api_key = api_key || ENV['MESSAGEPUB_API_KEY']\n self.class.basic_auth @api_key, 'password'\n end", "def initialize(options = {})\n super\n if Hash === settings[:'x-smtpapi']\n settings[:'x-smtpapi'] = JSON.dump(settings[:'x-smtpapi'])\n end\n end", "def initialize(email, password, apikey)\n @config, @config[:email], @config[:password],@config[:apikey] = {}, email, password,\"?app_key=#{apikey}\"\n end", "def ensure_configured\n raise ApiKeyError unless config.api_key\n raise AppKeyError unless config.app_key\n end", "def initialize(api_key, options = {})\n @api_key = api_key\n @api_endpoint = DEFAULT_API_ENDPOINT\n end", "def initialize(api_key, api_secret)\n @api_key = api_key\n @api_secret = api_secret\n @api_base_url = 'https://pepocampaigns.com'\n end", "def set_api_key\n\t\tself.api_key = ApiKey.create()\n\tend", "def initialize(api_key)\n @api_key = api_key \n end", "def initialize(api_key)\n @api_key = api_key\n end", "def set_settings(args)\n XingApi::Client.configure do |config|\n config.consumer_key = args[:consumer_key]\n config.consumer_secret = args[:consumer_secret]\n config.oauth_token = args[:oauth_token]\n config.oauth_token_secret = args[:oauth_token_secret]\n end\n end", "def initialize( api_key )\n @api_key = api_key\n end", "def initialize(api_key:)\n @api_key = api_key\n end", "def weechat_init\n Weechat.register(\"push_notifications\", \"0.1\", \"\", \"Send notifications using appnotifications REST API.\")\n\tWeechat.add_message_handler(\"weechat_highlight\", \"highlight\")\n Weechat.add_message_handler(\"privmsg\", \"pv\")\n check_config ? Weechat::PLUGIN_RC_OK : Weechat::PLUGIN_RC_KO\nend", "def init\n if GoGoGibbon::Config.api_key.blank?\n @error = \"MailChimp not installed on this server.\"\n elsif params[:key].blank?\n @error = \"Please supply your MailChimp API key for verification.\"\n elsif params[:key] != GoGoGibbon::Config.api_key\n @error = \"Supplied MailChimp API key does not match server key.\"\n else\n @result = GoGoGibbon::Commands.subscribe_set User.Mailable\n end\n end", "def initialize (api_key)\n\t\t@params = {\n\t\t\t\"key\" => api_key,\n\t\t\t\"wrapper\" => \"cleverbotrb\"\n\t\t}\n\t\t@endpoint = ENDPOINT\n\tend", "def cmd_notify_help\n\t\t\t\tputs \"Run notify_set_user, notify_set_webhook, and notify_set_source to setup Slack config. Then run notify_save to save them for later. Use notify_test to test your config and load it from the YAML file in the future. Finally, run notify_start when you have your listener setup.\"\n\t\t\tend", "def initialize(apikey)\n @apikey = apikey\n end", "def config_honey_badger\n Honeybadger::Api.configure do |c|\n c.access_token = self.access_token\n end\n end", "def initialize(api_key)\n @api_key = api_key\n end", "def initialize(api_key)\n @api_key = api_key\n end", "def initialize(api_key)\n @api_key = api_key\n end", "def config\n {\n :nagyo_host => \"http://0.0.0.0:3000\",\n :nagyo_auth_token => 'nagyo-token-test-user'\n }\n end", "def enable_honeybadger(**config)\n Bundler.require(:honeybadger)\n Honeybadger.configure do |config_klass|\n config.each do |k, v|\n if k == :before_notify\n config_klass.send(k, v)\n else\n config_klass.send(:\"#{k}=\", v)\n end\n end\n end\n end", "def api_key; @opts[:api_key]; end", "def notify\n send_email_notification if allow_email_notifications\n send_slack_notification if allow_slack_notifications\n end", "def initialize(x_auth_token: '123abcqwerty')\r\n Configuration.x_auth_token = x_auth_token\r\n end", "def initialize(api_key)\n @options = { headers: { 'Authorization' => \"token #{api_key}\" } }\n end", "def cmd_notify_pushover_set_app_key(*args)\n\t\t\t\tif args.length > 0\n\t\t\t\t\tprint_status(\"Setting pushover app API key to #{args[0]}\")\n\t\t\t\t\t@app_key = args[0]\n\t\t\t\telse\n\t\t\t\t\tprint_error(\"Please provide a value\")\n\t\t\t\tend\n\t\t\tend", "def initialize(x_cisco_meraki_api_key: nil)\r\n Configuration.x_cisco_meraki_api_key = x_cisco_meraki_api_key if\r\n x_cisco_meraki_api_key\r\n end", "def init_alipaymini_config\n Singleton.__init__(AlipayMini::Config)\n\n @url = \"https://openapi.alipaydev.com/gateway.do\"\n @app_id = '2015102700040153'\n\n pkey = OpenSSL::PKey::RSA.new(2048)\n @private_key = remove_start_end_for_key(pkey.to_s)\n @public_key = remove_start_end_for_key(pkey.public_key.export)\n\n AlipayMini.configure do |c|\n c.url = @url\n c.app_id = @app_id\n c.private_key = @private_key\n c.public_key = @public_key\n end\nend", "def setup\n FileUtils.cp(VALID_CONFIG_JSON_ORIG, CONFIG_JSON)\n @config = Thermoserver::Configuration.new\n @api_key = @config.api_key\n end", "def api_credentials(api_client)\n @api_client = api_client\n return if @api_client.contact_email.blank?\n\n @api_docs = Rails.configuration.x.application.api_documentation_overview_url\n\n @name = (@api_client.contact_name.presence || @api_client.contact_email)\n\n @helpdesk_email = helpdesk_email(org: @api_client.org)\n\n I18n.with_locale I18n.default_locale do\n mail(to: @api_client.contact_email,\n subject: format(_('%{tool_name} API changes'), tool_name: tool_name))\n end\n end", "def initialize email, api_key\n @email = email\n @api_key = api_key\n end", "def declare_configuration_options\n ws.config.declare \"daemon_polling_period\", \"string\",\n default: \"60\",\n doc: \"Enter the github polling period\"\n\n ws.config.declare \"daemon_buildbot_host\", \"string\",\n default: \"localhost\",\n doc: \"Enter builbot host/ip\"\n\n ws.config.declare \"daemon_buildbot_port\", \"string\",\n default: \"8010\",\n doc: \"Enter buildbot http port\"\n\n ws.config.declare \"daemon_project\", \"string\",\n default: File.basename(ws.root_dir),\n doc: \"Enter the project name\"\n\n ws.config.declare \"daemon_max_age\", \"string\",\n default: \"120\",\n doc: \"Enter events and pull requests max age\"\n end", "def send_notifications\n end", "def initialize(api_key)\n @api_key = api_key\n check_api_key!\n end", "def api_key=(val)\n @api_key = val\n end", "def setup\n @watchlist = YAML.load_file(WATCHLIST)\n @config = YAML.load_file(CONFIG)\n\n @transmission_api_client = TransmissionApi::Client.new(\n :url => @config['transmission_api_client']['url'],\n :username => @config['transmission_api_client']['username'],\n :password => @config['transmission_api_client']['password']\n )\n end", "def add_team_specific_notifier(team_postfix, api_key)\n if api_key.nil? || api_key.length == 0\n Honeybadger.instance_eval do\n define_singleton_method(\"notify_#{team_postfix}\") do |exception, options = {}|\n notify(exception, options)\n end\n end\n else\n Honeybadger.instance_eval do\n define_singleton_method(\"notify_#{team_postfix}\") do |exception, options = {}|\n notify(exception, options.merge({ tn_team: team_postfix }))\n end\n end\n end\n end", "def notify(token, notification = nil)\n client.notify app_id, token, notification\n end", "def initialize(api_key=nil)\n @api_key = api_key || ENV['KULER_API_KEY'] || raise(ArgumentError, 'no API key found')\n end", "def api_key\n config.api_key\n end", "def api_key\n config.api_key\n end", "def update_notification_config(cfg)\n http_put(notification_config_url(), cfg)\n end", "def api_key\n configuration.api_key\n end", "def api_key=(value)\n puts \"DEPRECATED: PigCI.com API has been retired, you no longer need to set config.api_key in your spec/rails_helper.rb file.\"\n end", "def initialize(options = {})\n super\n @api_key = settings.delete(:api_key)\n @async = settings.delete(:async) || false\n @ip_pool = settings.delete(:ip_pool)\n @send_at = settings.delete(:send_at)\n unless @send_at.nil? or String === @send_at\n @send_at = @send_at.utc.strftime('%Y-%m-%d %T')\n end\n end", "def send_notification(method, params); end", "def send_notification\n\n # TODO this seems hacky\n if Rails.env.development?\n apn = Houston::Client.development\n else\n apn = Houston::Client.production\n end\n\n apn.certificate = AWS::S3.new.buckets[ENV['S3_BUCKET_NAME']].objects['private/cert.pem'].read\n\n token = self.user.apn_token\n\n notification = Houston::Notification.new(device: token)\n notification.alert = \"You won a free drink!\"\n\n apn.push(notification)\n end", "def send_notification\n\n\n end", "def configure(jira_url:, jira_username:, jira_api_token:)\n warn \"danger-jira_sync plugin configuration is missing jira_url\" if jira_url.blank?\n warn \"danger-jira_sync plugin configuration is missing jira_username\" if jira_username.blank?\n warn \"danger-jira_sync plugin configuration is missing jira_api_token\" if jira_api_token.blank?\n\n @jira_client = JIRA::Client.new(\n site: jira_url,\n username: jira_username,\n password: jira_api_token,\n context_path: \"\",\n auth_type: :basic\n )\n end", "def api_key=(key)\n @@api_key = key\n end", "def api_key; end", "def api_key; end", "def initialize(api_key, options = {})\n $api_key = api_key\n $redis = options[:redis] || Redis.new\n $debug = options[:debug] || false\n end", "def initialize api_key, api_secret\n @api_key = api_key\n @api_secret = api_secret\n end", "def setup(args={})\n super\n @conf_key = (args[:config_key] || :http_endpoints).to_sym\n set_points\n end", "def config_github(api_key)\n @github = GitHub.new(api_key)\n end", "def cmd_notify_start\n\t\t\t\tprint_status \"Session activity will be sent to you via Slack Webhooks, channel: #{@channel}\"\n\t\t\t\tif read_settings()\n\t\t\t\t\tself.framework.events.add_session_subscriber(self)\n\t\t\t\t\tprint_good(\"Notify Plugin Started, Monitoring Sessions\")\n\t\t\t\telse\n\t\t\t\t\tprint_error(\"Could not set Slack Web API settings.\")\n\t\t\t\tend\n\t\t\tend", "def push_notifications\n # post \"/push\" do\n Webpush.payload_send(\n message: params[:message],\n endpoint: params[:subscription][:endpoint],\n p256dh: params[:subscription][:keys][:p256dh],\n auth: params[:subscription][:keys][:auth],\n vapid: {\n subject: \"mailto:[email protected]\",\n public_key: ENV['VAPID_PUBLIC_KEY'],\n private_key: ENV['VAPID_PRIVATE_KEY']\n }\n )\n end", "def setup\n redmine_base_url=ENV['REDMINE_BASE_URL']\n redmine_api_key=ENV['REDMINE_API_KEY']\n slack_webhook_url=ENV['SLACK_WEBHOOK_URL']\n verbose = ENV['VERBOSE'] == '1'\n slack_off = ENV['SLACK_OFF'] == '1'\n\n kv = KeyValueStore.new(ENV[\"DATABASE_URL\"])\n rAPI = RedmineSlack::RedmineAPI.new(redmine_base_url, redmine_api_key, verbose:verbose)\n slack_api = RedmineSlack::SlackAPI.new(slack_webhook_url, verbose:verbose)\n slack_api.channel_override = ENV['SLACK_CHANNEL_OVERRIDE']\n slack_api.enabled = !slack_off\n\n RedmineSlack::RedmineSlackGlue.new(rAPI, slack_api, kv, verbose:verbose)\nend", "def initialize(api_key)\n raise ArgumentError, 'api_key is required' if api_key == nil || api_key.empty?\n @api_key = api_key\n\t\tend", "def update_email_notification\n service_response = ClientManagement::UpdateEmailNotificationSetting.new(params).perform\n render_api_response(service_response)\n end", "def initialize(key = nil, api_key: nil)\n @api_key = key || api_key\n end", "def test_smtp_publisher_config_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: DefaultApi.test_smtp_publisher_config ...'\n end\n # resource path\n local_var_path = '/v1/notification/publisher/test/smtp'\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['destination'] = opts[:'destination'] if !opts[:'destination'].nil?\n\n # http body (model)\n post_body = opts[:body] \n\n # return_type\n return_type = opts[:return_type] \n\n # auth_names\n auth_names = opts[:auth_names] || ['X-Api-Key']\n\n new_options = opts.merge(\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: DefaultApi#test_smtp_publisher_config\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def initialize\n @api_key = ENV['PETFINDER_API_KEY']\n end", "def configure\n Razorpay.setup(publishable_key, secret_key)\n end", "def api_key=(key)\n\t\t\t\t\t@@api_key = key\n\t\t\t\tend", "def api_key\n 'your_api_key'\nend", "def alternative_config!\n JabberAdmin.configure do |conf|\n conf.username = '[email protected]'\n conf.password = 'defaultpw'\n conf.url = 'http://jabber.local/api'\n end\nend", "def initialize\n # These require statements are intentionally placed here to initialize\n # the gRPC module only when it's required.\n # See https://github.com/googleapis/toolkit/issues/446\n require \"gapic/grpc\"\n require \"google/monitoring/v3/notification_service_services_pb\"\n\n # Create the configuration object\n @config = Configuration.new Client.configure\n\n # Yield the configuration if needed\n yield @config if block_given?\n\n # Create credentials\n credentials = @config.credentials\n # Use self-signed JWT if the endpoint is unchanged from default,\n # but only if the default endpoint does not have a region prefix.\n enable_self_signed_jwt = @config.endpoint == Configuration::DEFAULT_ENDPOINT &&\n [email protected](\".\").first.include?(\"-\")\n credentials ||= Credentials.default scope: @config.scope,\n enable_self_signed_jwt: enable_self_signed_jwt\n if credentials.is_a?(::String) || credentials.is_a?(::Hash)\n credentials = Credentials.new credentials, scope: @config.scope\n end\n @quota_project_id = @config.quota_project\n @quota_project_id ||= credentials.quota_project_id if credentials.respond_to? :quota_project_id\n\n @notification_channel_service_stub = ::Gapic::ServiceStub.new(\n ::Google::Cloud::Monitoring::V3::NotificationChannelService::Stub,\n credentials: credentials,\n endpoint: @config.endpoint,\n channel_args: @config.channel_args,\n interceptors: @config.interceptors\n )\n end", "def initialize\n @api_user = \"\"\n @api_key = \"\"\n end", "def cmd_notify_start\n\t\t\t\tprint_status \"Session activity will be sent to you via Slack Webhooks\"\n\t\t\t\tif read_settings()\n\t\t\t\t\tself.framework.events.add_session_subscriber(self)\n\t\t\t\t\tnotifier = Slack::Notifier.new @webhook_url, channel: @user_name, username: 'Meterpreter Helper'\n\t\t\t\t\tprint_good(\"Notify Plugin Started, Monitoring Sessions\")\n\t\t\t\telse\n\t\t\t\t\tprint_error(\"Could not set Slack Web API settings.\")\n\t\t\t\tend\n\n\t\t\tend", "def initialize(api_key)\n @client = Juicer::Client.new(api_key)\n end", "def initialize(api_key, options = {})\n # symbolize options keys\n options.replace(\n options.inject({}) { |m, (k, v)| m[(k.to_sym rescue k) || k] = v; m }\n )\n\n # defaults\n # host: piratemetrics.com\n # port: 80\n # enabled: true\n # synchronous: false\n @api_key = api_key\n @host, @port = options[:collector].to_s.split(':')\n @host = options[:host] || 'https://piratemetrics.com'\n @port = (options[:port] || 443).to_i\n @enabled = options.has_key?(:enabled) ? !!options[:enabled] : true\n @synchronous = !!options[:synchronous]\n @pid = Process.pid\n @allow_reconnect = true\n\n setup_cleanup_at_exit if @enabled\n end", "def auth_settings\n {\n 'api-key' =>\n {\n type: 'api_key',\n in: 'header',\n key: 'api-key',\n value: api_key_with_prefix('api-key')\n },\n }\n end", "def allow_api_key\n @api_key_allowed = true\n end", "def initialize(api_key, opts={})\n @api_key = api_key\n @https = opts[:https] ? opts[:https] : false\n end", "def initialize(api_key, email = nil)\n @params = Hash.new\n\n if email.nil?\n @params[:api_key] = api_key\n else\n @params[:api_key] = api_key\n @params[:email] = email\n end\n\n end", "def auth_settings\n {\n 'api_key' =>\n {\n type: 'api_key',\n in: 'header',\n key: 'API-Key',\n value: api_key_with_prefix('API-Key')\n },\n }\n end", "def start_configuration\n post \"/setup/api/configure\", password_hash\n end", "def initialize(api_key=nil)\n @api_key = api_key\n @api_key ||= SocialMediaMonitoring.api_key\n @api_path = ''\n\n end", "def configuration_options\n iq = connection.iq_stanza({'to'=>jid.bare},\n x('pubsub',{:xmlns => EM::Xmpp::Namespaces::PubSubOwner},\n x('configure','node' => node_id)\n )\n )\n\n send_iq_stanza_fibered iq\n end", "def notification\n @notification ||= APND::Settings::Notification.new\n end", "def apikey=(apikey)\n @apikey = apikey\n @connection = nil\n end", "def notification=(options = {})\n if options.respond_to?(:keys)\n notification.port = options[:port] if options[:port]\n notification.host = options[:host] if options[:host]\n end\n end", "def initialize(*args)\n raise ArgumentError, \"Requires at least the api_key and secret_key when instatiating\" if args.size == 0\n\n base_url = 'https://api.att.com'\n\n if args.size == 1 && args[0].instance_of?(Hash)\n args = args.shift\n @api_key = args[:api_key]\n @secret_key = args[:secret_key]\n @base_url = args[:base_url] || base_url\n set_ssl_verify args[:ssl_verify]\n else\n @api_key = args.shift\n @secret_key = args.shift\n @base_url = args.shift || base_url\n set_ssl_verify args.shift\n end\n\n @grant_type = 'client_credentials'\n @access_token = ''\n @refresh_token = ''\n\n create_connection 'application/json'\n\n get_tokens\n\n Actor.current\n end", "def send_notifications\n if self.cert.nil?\n raise APN::Errors::MissingCertificateError.new\n return\n end\n APN::App.send_notifications_for_cert(self.cert, self.id)\n end", "def check_key_valid\n @key_warning = false unless defined?(@key_warning)\n if !configuration.valid_api_key? && !@key_warning\n configuration.warn(\"No valid API key has been set, notifications will not be sent\")\n @key_warning = true\n end\n end" ]
[ "0.63896275", "0.63179916", "0.62401325", "0.6159049", "0.60380244", "0.6029999", "0.59597945", "0.5914346", "0.5847085", "0.5847085", "0.5821589", "0.5821589", "0.58030576", "0.57551575", "0.57129145", "0.57003975", "0.56888735", "0.5673955", "0.5665071", "0.56439394", "0.5607207", "0.5595639", "0.559203", "0.558584", "0.5584425", "0.55678004", "0.5561643", "0.5560653", "0.55449665", "0.55404466", "0.55298877", "0.55298877", "0.55298877", "0.5519168", "0.5509689", "0.54894173", "0.54891086", "0.5487754", "0.5474589", "0.5474034", "0.54729384", "0.5461823", "0.5450787", "0.54364413", "0.54328984", "0.54273635", "0.5424515", "0.54037833", "0.5398735", "0.53982586", "0.5397616", "0.5391901", "0.53909636", "0.539024", "0.539024", "0.5389263", "0.53867584", "0.5384469", "0.53798014", "0.53780377", "0.53743976", "0.53694016", "0.53683597", "0.536339", "0.53626966", "0.53626966", "0.534835", "0.5340989", "0.533484", "0.53149307", "0.5306284", "0.53006536", "0.5300649", "0.52982956", "0.5294524", "0.52914506", "0.52892655", "0.52819926", "0.52819216", "0.52812535", "0.5258491", "0.52552253", "0.52547795", "0.52499163", "0.5235807", "0.5225553", "0.52210855", "0.521956", "0.5214343", "0.520378", "0.51791537", "0.5178795", "0.5176751", "0.5176398", "0.51757497", "0.5169547", "0.5168851", "0.51487404", "0.5144127", "0.5141669", "0.51293546" ]
0.0
-1
The global configuration object.
def configuration @configuration ||= Configuration.new end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def global_config\n @@global_config\n end", "def config\n @config ||= {}\n end", "def config\n @config ||= {}\n end", "def config\n @configuration ||= Configuration.new\n end", "def config\n @configuration ||= Configuration.new\n end", "def config\n @config ||= Config.new\n end", "def config\n @config ||= Config.new\n end", "def config\n @config ||= load_config\n end", "def global_config\n # This entire VM Pooler config\n @config\n end", "def config\n @config ||= Config.create ConfigLoader.new(root, CONFIG_FILE).to_hash, options.merge_config\n end", "def config\n @config ||= Configuration.new\n end", "def configuration\n @config ||= setup\n end", "def config\n @config ||= @module_config || {}\n end", "def config\n @config ||= @module_config || {}\n end", "def config\n @config ||= Smartgen::Configuration.new\n end", "def config\r\n Configuration\r\n end", "def config\r\n Configuration\r\n end", "def config\r\n Configuration\r\n end", "def config\r\n Configuration\r\n end", "def config\r\n Configuration\r\n end", "def config\r\n Configuration\r\n end", "def global_configurations\n @_global_configurations ||= []\n end", "def config\n @config ||= read_config\n end", "def configuration\n @configuration ||= Configuration.new()\n end", "def config\r\n @configuration\r\n end", "def config\n self\n end", "def config\n site.config\n end", "def configuration\n @_configuration ||= Configuration.new\n end", "def configuration\n @_configuration ||= Configuration.new\n end", "def config\n App.instance.load_project_config\n App.instance.config\n end", "def config\n unless @config\n @config = Configuration.new\n @config.reset\n end\n @config\n end", "def config; end", "def config; end", "def config; end", "def config; end", "def config; end", "def config; end", "def config; end", "def config; end", "def config; end", "def config; end", "def config; end", "def config; end", "def config; end", "def config; end", "def config; end", "def config; end", "def config; end", "def config; end", "def configuration\n application.config\n end", "def configuration\n @configuration ||= Configuration.new(self)\n end", "def configuration\n {}\n end", "def config\n configuration\n end", "def config\n @_config ||= self.class.config.inheritable_copy\n end", "def config\n @config ||= { :force => false, :quiet => false }\n end", "def configuration\n @configuration ||= Configuration.new\n end", "def configuration; end", "def configuration; end", "def configuration; end", "def configuration; end", "def configuration; end", "def configuration\n self\n end", "def config\n @config\n end", "def config\n @config\n end", "def config\n @config\n end", "def configuration\n @configuration ||= Clever::Configuration.new\n end", "def config\n Troy.configuration\n end", "def c\n configuration\n end", "def config\n machined.config\n end", "def config\n Kitabu.config(root_dir)\n end", "def config\n\n end", "def config\n\t\t\t@app_class.config\n\t\tend", "def config\n Thread.current[CONFIG_THREAD_KEY] ||= ChartMogul::Configuration.new\n end", "def c\n configuration\n end", "def initialize\n @configuration = Configuration.new\n end", "def config\n @config ||= multi_config || single_config\n end", "def configuration\n @configuration ||= begin\n ConfigFile.new(GLOBAL_CONFIG).merge_file(LOCAL_CONFIG)\n rescue TypeError\n raise AssistedWorkflow::Error, \"Error on loading .awconfig files. Please check the content format.\"\n end\n end", "def config\n @config\n end", "def config\n boot.config\n end", "def config\n @config ||= begin\n conf = Bolt::Config.new(Bolt::Project.default_project, config_data)\n conf.modulepath = [modulepath].flatten\n conf\n end\n end", "def default_config\n self.class.default_config\n end", "def configuration\n @configuration ||= Configuration.new\n end", "def configuration\n @configuration ||= Configuration.new\n end", "def configuration\n @configuration ||= Configuration.new\n end", "def configuration\n @configuration ||= Configuration.new\n end", "def config\n @_config ||= Config.new\n yield @_config if block_given?\n @_config\n end", "def load_config\n # Nothing in base class. This should be used to load the configuration from\n # disk if saved to a file.\n configuration || {}\n end", "def configuration\n Configuration::get\n end", "def configuration\n Configuration::get\n end", "def config\n # This could be a single global but it doesn't use enough RAM or CPU\n # cycles that I care.\n @config ||= PoiseProfiler::Config.new\n end", "def get_config\n\t\tend" ]
[ "0.8466089", "0.81432164", "0.81432164", "0.7828079", "0.7828079", "0.7784814", "0.7784814", "0.77504766", "0.7748834", "0.7745521", "0.7709197", "0.77064747", "0.7683163", "0.7662794", "0.75887907", "0.75597256", "0.75597256", "0.75597256", "0.75597256", "0.75597256", "0.75597256", "0.75159", "0.7471388", "0.7462383", "0.74370104", "0.74364924", "0.74150616", "0.741365", "0.741365", "0.73799455", "0.7364934", "0.73499066", "0.73499066", "0.73499066", "0.73499066", "0.73499066", "0.73499066", "0.73499066", "0.73499066", "0.73499066", "0.73499066", "0.73499066", "0.73499066", "0.73499066", "0.73499066", "0.73499066", "0.73499066", "0.73499066", "0.73499066", "0.7345358", "0.73381114", "0.7335001", "0.7330016", "0.73235804", "0.7318131", "0.73134404", "0.72834045", "0.72834045", "0.72834045", "0.72834045", "0.72834045", "0.7270018", "0.72615814", "0.72615814", "0.72615814", "0.72493535", "0.7238667", "0.72180194", "0.7217243", "0.7203128", "0.7200704", "0.71959114", "0.7189556", "0.7186748", "0.7173506", "0.716667", "0.7159951", "0.71492565", "0.7147724", "0.7145123", "0.71234524", "0.7118641", "0.7118641", "0.7118641", "0.7118641", "0.71078855", "0.7107292", "0.7100624", "0.7100624", "0.70976895", "0.70965165" ]
0.73600733
37
The default logging device.
def logger self.configuration.logger || Logger.new($stdout) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def default_logger\n Logger.new(debug? ? STDOUT : nil)\n end", "def default_logger\n log = ::Logger.new(STDOUT)\n log.level = ::Logger::INFO\n log\n end", "def init_logger \n if not Object.const_defined?(get_rails_default_logger_name)\n Logger.new(STDOUT)\n else\n eval(get_rails_default_logger_name)\n end \n end", "def stdout_device\n # TODO: Find a more elegant way to access the internal log device\n @logger.instance_variable_get(:@logdev).dev\n end", "def default_logger\n Logger.new(STDOUT)\n end", "def log_capture_device\n @log_capture_device ||= STDOUT\n end", "def logger\n @logger ||= Logger.new(\"/dev/null\")\n end", "def default_logger\n logger = Logger.new(STDOUT)\n logger.level = Logger::INFO\n logger\n end", "def logger\n @logger ||= default_logger\n end", "def logger\n @logger ||= default_logger\n end", "def logger\n @logger || default_logger\n end", "def logger\n @logger || default_logger\n end", "def setup_default_logger(logger)\n progname = 'Expectacle'\n @logger = if logger == :syslog\n Syslog::Logger.new(progname)\n else\n default_io_logger(logger, progname)\n end\n @logger.level = Logger::INFO\n @logger.formatter = proc do |severity, datetime, pname, msg|\n \"#{datetime} #{pname} [#{severity}] #{msg}\\n\"\n end\n end", "def set_output(log_device)\n __setobj__(Logger.new(log_device))\n end", "def logger\n init_logger if Log4r::Logger[STANDARD_LOGGER].nil?\n Log4r::Logger[STANDARD_LOGGER]\n end", "def logger\n @logger ||= LogAdapter.new(Stevenson.new, (ENV[\"DAODAO_LOG_LEVEL\"] || :info).to_sym)\n end", "def stdout_device=(log_device)\n # TODO: Find a more elegant way to access the internal log device\n @logger.instance_variable_get(:@logdev).send(:set_dev, log_device)\n end", "def logger\n @logger ||= NilLogger.new\n end", "def none\n Internal::Logging::DevNullLogging::DEV_NULL_LOGGING\n end", "def log_for_device(app_id, device)\n log_device_name = \"monkey_current#{device}.txt\"\n log = File.join(@log_dir, log_device_name)\n ADB.start_logging(app_id, device, log)\n log\n end", "def initialize(logdev)\n @progname = nil\n @level = DEBUG\n @default_formatter = Formatter.new\n @formatter = nil\n @logdev = nil\n if logdev\n @logdev = LogDevice.new(logdev)\n end\n end", "def initialize\n @logProvider = DefaultLogger.new\n end", "def default_logger\n self.logger = Logger.new(STDERR)\n\n if settings.service.debug_mode || $DEBUG\n logger.level = Logger::DEBUG\n else\n logger.level = Logger::INFO\n end\n\n logger\n end", "def default_logger\n self.logger = Logger.new(STDERR)\n\n if settings.service.debug_mode || $DEBUG\n logger.level = Logger::DEBUG\n else\n logger.level = Logger::INFO\n end\n\n logger\n end", "def default_log_path\n if @config.log_root\n File.join(@config.log_root, default_log_file_name)\n else\n nil\n end\n end", "def logger\n @logger ||= Doing.logger\n end", "def logger\n Steno.logger(logger_name)\n end", "def log_handler\n @log_handler ||= ::Logger.new(STDOUT)\n end", "def logger\n @logger ||= Esi.config.logger || Logger.new(Esi.config.log_target).tap do |l|\n l.level = Logger.const_get(Esi.config.log_level.upcase)\n end\n end", "def logger\n @logger || StartupLogger.instance\n end", "def logger\n @logger ||= LogAdapter.new(Stevenson.new, (ENV[\"JEKYLL_LOG_LEVEL\"] || :info).to_sym)\n end", "def null_logger\n NullLoggerSingleton.instance\n end", "def logger(name = self.class.name)\n Logger.new(name)\n end", "def logger( name )\n return default_logger if name.nil?\n @hash.fetch(name, default_logger)\n end", "def logger\n\t\t\tunless CLILogging.class_variable_defined?(:@@logger)\n\t\t\t\t@@logger = CLILogger.new\n\t\t\t\t@@logger.progname=$0\n\t\t\tend\n\t\t\t@@logger\n\t\tend", "def logger\n\t\t\tunless CLILogging.class_variable_defined?(:@@logger)\n\t\t\t\t@@logger = CLILogger.new\n\t\t\t\t@@logger.progname=$0\n\t\t\tend\n\t\t\t@@logger\n\t\tend", "def default_logger\n logger = Logger.new(STDERR)\n logger.level = Mongoid::Config.log_level\n logger\n end", "def log\n @log ||= Logging.logger[File.basename($0)]\n end", "def logger\n return @logger ||= default_logger\n end", "def logger\n backend.logger\n end", "def logger\n RAILS_DEFAULT_LOGGER\n end", "def logger\n if defined?(BOWLINE_LOGGER)\n BOWLINE_LOGGER\n else\n nil\n end\n end", "def logger\n\t\t\t@system_lock.synchronize {\n\t\t\t\treturn @systems[0].logger unless @systems.empty?\n\t\t\t}\n\t\t\tSystem.logger\n\t\tend", "def log_capture_device=(value)\n @log_capture_device = value\n end", "def logger\n @logger ||= _new_logger\n end", "def logger\n @logger ||= build_logger\n end", "def init_logging\n app_name = ENV[\"APP_NAME\"] || \"calcentral\"\n format = PatternFormatter.new(:pattern => \"[%d] [%l] [CalCentral] %m\")\n\n Rails.logger = Log4r::Logger.new(app_name)\n Rails.logger.level = DEBUG\n Rails.logger.outputters = init_file_loggers(app_name, format)\n\n stdout = Outputter.stdout #controlled by Settings.logger.level\n stdout.formatter = format\n # level has to be set in the logger initializer, after Settings const is initialized.\n # see initializers/logging.rb\n Rails.logger.outputters << stdout\n end", "def logger\n @logger ||= create_logger\n end", "def logger\n @logger ||= create_logger\n end", "def log_handler\n @log_handler || Beanpicker::log_handler\n end", "def initialize_logger()\n case logger_type\n when :local\n log_path = File.join(RAILS_ROOT, 'log', \"#{config_basename}.log\")\n system(\"cat /dev/null > #{log_path}\")\n ActiveSupport::BufferedLogger.new(log_path)\n when :remote\n RemoteLogger.new(config_basename, File.join(RAILS_ROOT, 'log'), proc_id)\n when :stderr\n logger = ActiveSupport::BufferedLogger.new($stderr)\n logger.auto_flushing = true\n logger\n else\n raise ArgumentError, \"logger_type must be :local,:remote or :stderr\"\n end\n end", "def initialize_logger()\n case logger_type\n when :local\n log_path = File.join(RAILS_ROOT, 'log', \"#{config_basename}.log\")\n system(\"cat /dev/null > #{log_path}\")\n ActiveSupport::BufferedLogger.new(log_path)\n when :remote\n RemoteLogger.new(config_basename, File.join(RAILS_ROOT, 'log'), proc_id)\n when :stderr\n logger = ActiveSupport::BufferedLogger.new($stderr)\n logger.auto_flushing = true\n logger\n else\n raise ArgumentError, \"logger_type must be :local,:remote or :stderr\"\n end\n end", "def initialize_logger()\n case logger_type\n when :local\n log_path = File.join(RAILS_ROOT, 'log', \"#{config_basename}.log\")\n system(\"cat /dev/null > #{log_path}\")\n ActiveSupport::BufferedLogger.new(log_path)\n when :remote\n RemoteLogger.new(config_basename, File.join(RAILS_ROOT, 'log'), proc_id)\n when :stderr\n logger = ActiveSupport::BufferedLogger.new($stderr)\n logger.auto_flushing = true\n logger\n else\n raise ArgumentError, \"logger_type must be :local,:remote or :stderr\"\n end\n end", "def logger\n @logger ||= ::Logger.new STDOUT\n end", "def logger\n @logger ||= ::Logger.new STDOUT\n end", "def logger\n @logger ||= ::Logger.new STDOUT\n end", "def log\n @logger ||= Logger.new(nil)\n @logger\n end", "def default_log_root\n File.join(kitchen_root, Kitchen::DEFAULT_LOG_DIR)\n end", "def log\n @options[:log] || DEFAULT_LOG_FILE\n end", "def logger\n @logger ||= Logging.logger_for(self.class.name)\n end", "def log_host\n @log_host || host\n end", "def logdev_logger(filepath_or_logdev)\n Omnitest::Core::LogdevLogger.new(resolve_logdev(filepath_or_logdev))\n end", "def logger\n @logger || Rcal::Util::Loggable.default_logger\n end", "def logger\n @logger ||= configuration.logger\n end", "def logger\n unless defined? @logger\n logger = Logging.logger[self]\n @logger = Kernel.const_defined?('Rails') ? Rails.logger : logger\n end\n @logger\n end", "def logger\n @logger ||= set_logger\n end", "def initialize\n Logging.setup(Logger::INFO)\n end", "def logger\n @logger ||= (defined?(Rails) ? Rails.logger : Logger.new('monocle.log'))\n end", "def file_logger\n if !@file_logger\n if defined?(Rails) && Rails.root\n @file_logger = Logger.new(Rails.root.join(\"log/veritrans.log\").to_s)\n else\n @file_logger = Logger.new(\"/dev/null\")\n end\n end\n\n @file_logger\n end", "def defaultLogger(opts=nil, parser=nil)\n if opts && parser\n #noinspection RubyArgCount\n result = Logger.new(opts[:logFile] || 'dataMetaXtra.log', 'daily', 10*1024*1024)\n result.level = case opts[:level] ? opts[:level].downcase[0] : 'i'\n when 'd'\n Logger::DEBUG\n when 'i'\n Logger::INFO\n when 'w'\n Logger::WARN\n when 'e'\n Logger::ERROR\n else\n parser.educate\n raise \"Invalid log level #{opts[:level]}\"\n end\n result.datetime_format = '%Y-%m-%d %H:%M:%S'\n result\n else\n result = Logger.new($stdout)\n result.level = Logger::WARN\n result\n end\n end", "def log\n @log || MPDClient.log\n end", "def logfile=(dev)\n @logfile = dev\n @logger = nil\n end", "def initialize device = STDOUT, level = INFO\n self.level = level\n @logformat = '%d{%Y-%m-%d %H:%M:%S}%t%L%t%m %s %e'\n @placeholder = {\n 'n' => \"\\n\",\n 'p' => $$,\n 't' => \"\\t\",\n 'x' => File.basename($0),\n 'X' => File.expand_path($0)\n }\n\n @filename = nil\n if device.kind_of? IO then\n raise \"log destination already closed #{ device }\" if device.closed?\n @device = device\n @fileformat = nil\n else\n @device = nil\n @fileformat = device.to_s\n reopen\n end\n\n @sign = @mark = false\n end", "def logger\n classname = (self.is_a? Module) ? self : self.class.name\n @logger ||= Loggable.logger_for(classname)\n end", "def logfile\n nil\n end", "def logger\n @logger ||= Testable::Logger.new.create\n end", "def logger\n @logger ||= Testable::Logger.new.create\n end", "def logger\n initialize_logger unless @logger\n @logger\n end", "def default_formatter\n if protocol == :syslog\n # Format is text output without the time\n SemanticLogger::Formatters::Default.new(time_format: nil)\n else\n SemanticLogger::Formatters::Syslog.new(facility: facility, level_map: level_map, max_size: max_size)\n end\n end", "def logger\n @logger ||= Logger.new(@log_file_name)\n\n @logger.formatter = proc do |severity, datetime, progname, msg|\n \"%s, [%s #%d] (%s) %5s -- %s: %s\\n\" % [severity[0..0], datetime, $$, Conf.global_conf[:hostname], severity, progname, msg]\n end\n\n if Conf.global_conf[:debug]\n @logger.level = Logger::DEBUG\n else\n @logger.level = Logger::INFO\n end\n @logger\n end", "def use_logger(value)\n @global_logger = value\n end", "def logger\n Logging.logger\n end", "def logger; LOGGER; end", "def log\n @log ||= Logging.logger[self]\n end", "def logger\n @logger ||= self.class.logger\n end", "def logger\n Logging.logger\n end", "def logger\n raise NotImplementedError\n end", "def logger\n @logger ||= Neo4j::Config[:logger] || default_logger\n end", "def logger\n @logger ||= Neo4j::Config[:logger] || default_logger\n end", "def start_logger\n if config && config[:log] == \"file\" && config.log_file_path\n start_file_logger(config.log_file_path)\n else\n start_stdout_logger\n end\n\n logger.level =\n if config\n config.log_level\n else\n Appsignal::Config::DEFAULT_LOG_LEVEL\n end\n logger << @in_memory_log.string if @in_memory_log\n end", "def default_log_file_name\n @options['log_file_name'] || \"#{@name}.log\"\n end", "def initialize\n @log = Logging::Logger[self]\n @options = self.default_options\n end", "def logger; settings(:logger); end", "def logger\n instance ? instance.logger : Kitchen.logger\n end", "def logger\n instance ? instance.logger : Kitchen.logger\n end", "def logging_prefs; end", "def logger\n return @local_logger if @local_logger\n FileUtils.mkdir_p(File.dirname(log_path)) unless File.exists?(File.dirname(log_path))\n\n if API_KEYS[\"logentries\"]\n token = API_KEYS[\"logentries\"][Rails.env][\"nsc\"]\n @local_logger = Le.new(token, :debug => false, :local => log_path, :ssl => true, :tag => true)\n else\n @local_logger = Logger.new(log_path)\n end\n @local_logger\n end", "def logger\n LOGGER\n end", "def logger\n return @logger if @logger\n\n # Figure out where the output should go to.\n output = nil\n if ENV[\"MELISSADATA_LOG\"] == \"STDOUT\"\n output = STDOUT\n elsif ENV[\"MELISSADATA_LOG\"] == \"NULL\"\n output = nil\n elsif ENV[\"MELISSADATA_LOG\"]\n output = ENV[\"MELISSADATA_LOG\"]\n else\n output = nil #log_path.join(\"#{Time.now.to_i}.log\")\n end\n\n # Create the logger and custom formatter\n @logger = Logger.new(output)\n @logger.formatter = Proc.new do |severity, datetime, progname, msg|\n \"#{datetime} - #{progname} - [#{resource}] #{msg}\\n\"\n end\n\n @logger\n end", "def logger\n @logger ||= Logger.new($stdout).tap do |log|\n log.progname = self.class.name\n end\n end" ]
[ "0.73106635", "0.69823974", "0.69325185", "0.686806", "0.6844342", "0.67556345", "0.67399937", "0.672019", "0.66074646", "0.659246", "0.65316397", "0.65316397", "0.6519563", "0.64846325", "0.6461179", "0.6457614", "0.6408749", "0.6396789", "0.63909763", "0.6367269", "0.6365913", "0.6348271", "0.62833095", "0.62833095", "0.62688565", "0.62618476", "0.62614524", "0.6257917", "0.62574536", "0.62527335", "0.6250202", "0.6247626", "0.62401384", "0.6219621", "0.62160003", "0.62160003", "0.6208575", "0.6207839", "0.61956847", "0.6182535", "0.6168038", "0.6166415", "0.61643606", "0.6133023", "0.6128115", "0.6116258", "0.6114664", "0.61105245", "0.61105245", "0.61074543", "0.6107352", "0.6107352", "0.6107352", "0.6106551", "0.6106551", "0.6106551", "0.6105856", "0.6102814", "0.6089736", "0.6083133", "0.607863", "0.60784525", "0.60754997", "0.6027868", "0.6020592", "0.6019139", "0.6013123", "0.6006458", "0.60045344", "0.5996734", "0.5991783", "0.59867567", "0.5979538", "0.59615535", "0.595651", "0.59525746", "0.59525746", "0.5929543", "0.5925216", "0.59234554", "0.59162873", "0.59158665", "0.59156734", "0.59146535", "0.5903723", "0.59006834", "0.5898279", "0.58962524", "0.58962524", "0.5894067", "0.58935827", "0.5882904", "0.5877461", "0.58754337", "0.58754337", "0.5870681", "0.5869829", "0.5866779", "0.5864759", "0.58616877" ]
0.5858611
100
Is the logger live Returns true if the current stage is included in the release stages config, false otherwise.
def live? configuration.release_stage? end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def debug?\n DEBUG == log_level\n end", "def staging?\n self.environment == ENV_STAGE\n end", "def debug?\n severity == :DEBUG\n end", "def log_debug?\n @logger.debug?\n end", "def debug?\n @loggers.any? { |logger| logger.respond_to?(:debug?) && logger.debug? }\n end", "def debug?\n debugging || !ENV['DEBUG'].nil?\n end", "def debug?\n !production?\n end", "def debug_logging\n log.level == Logger::DEBUG\n end", "def debug?\n @debug || to_bool( ENV['LAUNCHY_DEBUG'] )\n end", "def live?\n return @live\n end", "def live?\n source_version.nil?\n end", "def live?\n source_version.nil?\n end", "def running?\n return RUN_MODE if RUN_MODE.present?\n Rails.env.production?\n end", "def live?\n (phase == 'Running' && ready?) || (phase == 'Succeeded')\n end", "def debug?\n $DEBUG\n end", "def debug?\n @debug || ENV['HATCHET_DEBUG'] || false\n end", "def debug?\n level <= DEBUG\n end", "def live?\n case ENV['RUBY_LIBRARY_LIVE'].to_s.downcase\n when 'on', 'true', 'yes', 'y'\n true\n else\n false\n end\n end", "def production?\n environment == :production\n end", "def debugging?\n debugging = ENV[\"DEBUG\"]\n if defined?(Rails) && Rails.respond_to?(:env)\n debugging = true if [\"development\", \"test\"].include? Rails.env\n end\n debugging\n end", "def for_startup?\n self.stage_id > 1\n end", "def staging?\n status == \"STAGING\"\n end", "def production?\n @environment == :production\n end", "def release_build?\n false # we currently have nothing that qualifies. previously LTS was a type of this\n end", "def production?\n @production ||= true\n end", "def debugging?\n ENV['DEBUG'] && ENV['DEBUG'] != ''\nend", "def development?\n env == 'development'\n end", "def development?\n environment == 'development'\n end", "def development?\n !production?\n end", "def live?\n case ENV['RUBY_LIBRARY_LIVE'].to_s.downcase\n when 'on', 'true', 'yes', 'y'\n true\n else\n false\n end\n end", "def debug?; @loggers.first.level <= DEBUG; end", "def production?\n config[:target] == :production\n end", "def production?\n settings.environment == 'production'\n end", "def development?\n self.environment == ENV_DEV\n end", "def production?\n\n return true if Rails.env.production?\n\n end", "def debug?\n self[:debug] == 'true'\n end", "def production?\n self.environment == ENV_PROD\n end", "def debug?\n level >= ASL_LEVEL_DEBUG\n end", "def live?\n !!self[:live]\n end", "def dev?\n Rails.env == 'development'\n end", "def active?\n [email protected]? || !connection.is_experiment_completed?(@id)\n end", "def debug?\n @@debug\n end", "def production?\n rails_env == \"production\" || rails_env == \"staging\"\n end", "def jenkins?\n @logger.info \"Jenkins Detected: #{not (ENV['WORKSPACE'].nil?)}\"\n return !ENV['WORKSPACE'].nil?\nend", "def logging_enabled?\n !!logging_enabled\n end", "def debug?\n\t\t!!@debuggable_status\n\tend", "def debug?\n @@debug\n end", "def livedns?\n current == :livedns\n end", "def debugging?\n\t\t(datastore['DEBUG'] || '') =~ /^(1|t|y)/i\n\tend", "def dev_env?\n env == 'development'\n end", "def debug?\n return @debug_mode if defined?(@debug_mode)\n @debug_mode = ENV['MOLINILLO_DEBUG']\n end", "def development?\n config[:target] == :development\n end", "def debug?\n true\n end", "def debug?\n true\n end", "def debug_mode?\n @@debug_mode\n end", "def production?\n environment == :production\nend", "def debug?\n instance.options[:debug]\n end", "def debug?\n instance.options[:debug]\n end", "def production?\n ENV[\"JEKYLL_ENV\"] == \"production\"\nend", "def should_notify_release_stage?\n @release_stage.nil? || @notify_release_stages.nil? || @notify_release_stages.include?(@release_stage)\n end", "def debug?\n !!@debug\n end", "def has_pushed_rhn_stage?\n return true if rhnqa? || rhnqa_shadow?\n\n has_pushed_since_last_respin?(RhnStagePushJob)\n end", "def development?\n environment == :development\nend", "def should_build?\n config.auto_build && !dev_server_running?\n end", "def debug?\n false\n end", "def debugging?\n Options[:debug]\n end", "def log?\n log != nil\n end", "def log?\n log != nil\n end", "def log?\n\t\t\t@glogging\n\t\tend", "def development?\n ::Rails.env.development?\n end", "def log?\n @log != false\n end", "def log?\n @log != false\n end", "def log?\n @log != false\n end", "def use_logger?\n config.present? and config[:use_logger] == true\n end", "def debug_stream?\n @debug_stream\n end", "def debug_stream?\n @debug_stream\n end", "def production? ; @app.options[:env] == :production ; end", "def verbose?\n !!ENV[\"DEBUG\"]\nend", "def debug?\n @level <= 0\n end", "def development? ; @app.options[:env] == :development ; end", "def isRunning\n return running?\n end", "def log?\n @log ||= false\n end", "def running?\n @db && @db.running?\n end", "def build?\n config[:mode] == :build\n end", "def enabled?\n !!PaperTrail.config.enabled\n end", "def debug?; @logger.debug? end", "def live?\n (uid && self.class.started_uids.include?(uid)) ? true : false\n end", "def testing?\n $TESTING ||= env?(:test) || Merb::Config[:testing]\n end", "def show_log?\n @runopts[:show_log] ||= false\n end", "def is_live?\n self.live? and self.published_on <= Time.now and !self.live_pages.blank?\n end", "def development?\n @request.development?\n end", "def running?\n @running\n end", "def running?\n @running\n end", "def running?\n @running\n end", "def running?\n @running\n end", "def running?\n @running\n end", "def running?\n @running\n end", "def running?\n @running\n end", "def testing?\n $TESTING || Merb::Config[:testing]\n end", "def running?\n @running.true?\n end" ]
[ "0.66961265", "0.6560573", "0.65462697", "0.6542471", "0.64925534", "0.6458757", "0.6455649", "0.645095", "0.6423734", "0.639385", "0.63637185", "0.63637185", "0.6348001", "0.63463944", "0.6323715", "0.63054425", "0.62925965", "0.62831175", "0.6260614", "0.625884", "0.62480795", "0.6232508", "0.6215526", "0.6211979", "0.62116504", "0.62101054", "0.620856", "0.61893576", "0.61847526", "0.61828667", "0.6172852", "0.6169805", "0.6168525", "0.6157903", "0.61539125", "0.61479026", "0.61454266", "0.6140973", "0.6105316", "0.60992014", "0.60974044", "0.6096252", "0.6093356", "0.6087445", "0.608047", "0.60726506", "0.60670817", "0.6059316", "0.60501415", "0.6045734", "0.60271746", "0.6024886", "0.60134506", "0.60134506", "0.6001838", "0.59966", "0.5992244", "0.5992244", "0.5981321", "0.5980843", "0.59611356", "0.59543437", "0.5952039", "0.59481853", "0.59425324", "0.5941269", "0.59390146", "0.59390146", "0.59376454", "0.5933195", "0.59277767", "0.59277767", "0.59277767", "0.59172374", "0.59161526", "0.59161526", "0.5915436", "0.59038556", "0.5863492", "0.5857041", "0.58551335", "0.5827416", "0.58183926", "0.58042014", "0.5802532", "0.58025134", "0.57987285", "0.5794886", "0.57917917", "0.5791649", "0.57866913", "0.5783124", "0.5783124", "0.5783124", "0.5783124", "0.5783124", "0.5783124", "0.5783124", "0.5782663", "0.5781464" ]
0.81160575
0
Looks up ignored exceptions Returns true if this exception should be ignored, false otherwise.
def ignored?(exception) configuration.ignored?(exception) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ignore?\n configuration.ignored_errors.each do |error|\n return true if @exception.is_a? error\n end\n\n false\n end", "def ignore?\n @should_ignore\n end", "def ignore?\n !!@ignore\n end", "def ignored?\n !!ignored\n end", "def ignored?\n @ignored\n end", "def ignored?()\n #This is a stub, used for indexing\n end", "def ignored?\r\n infoxml = get_info\r\n \r\n if ignored = infoxml['ignored']\r\n return ignored == '1'\r\n end\r\n \r\n return false\r\n end", "def ignore!\n @should_ignore = true\n end", "def should_ignore(event)\n @ignored.any? { |ign| event.summary =~ /#{ign}/i }\n end", "def raise_exceptions?\n false\n end", "def exception?\n false\n end", "def ignored?\n\t\treturn self.status == 'I'\n\tend", "def should_ignore?(path)\n @ignore && @ignore.ignore?(path)\n end", "def ignored?(object)\n ignores.exists?(:ignoreable_id => object.id, :ignoreable_type => object.class.to_s)\n end", "def ignore\n @ignore = true\n end", "def an_exception?\n an_exception == true\n end", "def matches_an_issue_exception(user)\n if user.notification_preferences[:exceptions][:no_issue_updates] == '1'\n return true if event.type == :issue_edited\n end\n if user.notification_preferences[:exceptions][:for_issue_trackers].present?\n excluded = user.notification_preferences[:exceptions][:for_issue_trackers]\n return true if excluded.include?(context.tracker_id) && excluded.include?(context.tracker_id_was)\n end\n if user.notification_preferences[:exceptions][:for_issue_priorities].present?\n excluded = user.notification_preferences[:exceptions][:for_issue_priorities]\n return true if excluded.include?(context.priority_id) && excluded.include?(context.priority_id_was)\n end\n false\n end", "def ignored?\n marking == \"IGNORED\"\n end", "def ignore\n @ignore = true\n end", "def test_does_not_consider_unknown_errors\n ig = ScoutApm::ErrorService::IgnoredExceptions.new(context, [\"ThisDoesNotExist\", \"IgnoredExceptionsTest::FakeError\"])\n assert ig.ignored?(FakeError.new(\"ignore this one\"))\n end", "def enduser_ignored?\n true\n end", "def ignored?(path)\n @@conf.ignore.each do |ignore|\n return true if rel_path(path).match(ignore)\n end\n return false\n end", "def ignored?(package_name)\n raise RuntimeError, \"#{self.class} needs to overwrite ignored?\"\n end", "def ignore!\n self.ignored = true\n end", "def ignored\n ignores.map {|ignore| ignore.ignoreable}\n end", "def ignored\n @diagnostics.select {|d| d.severity == :ignore }\n end", "def exception_is_handled?(exception)\n custom_exception_handler(exception)\n end", "def ignores?(file_path)\n !select_matching_file_patterns(ignore_paths, file_path).empty?\n end", "def excepted?(method)\n is_ignored?(method.file) ||\n except_methods.any? { |except_method| Exceptable.matches method, except_method }\n end", "def exception?\n !!@exception\n end", "def ignore &block\n begin; block.call; rescue; end\n end", "def exception?\n !exception.nil?\n end", "def ignore_if(&block)\n @@ignores << block\n end", "def skip_this_when(enabled:, expected_exception:)\n yield\n rescue expected_exception => e\n e.tap do\n skip e.message if enabled && e.is_a?(expected_exception)\n end\n end", "def cannot_raise(exception = StandardError)\n yield\n rescue exception\n false\n rescue Exception => ex\n raise ex\n else\n true\n end", "def is_exception?; end", "def skip?\n raise NotImplementedError\n end", "def has_exceptions?\n !exception_queue.empty?\n end", "def handle_ignored\n if @note.ignore == true\n raise ActiveRecord::RecordNotFound\n end\n end", "def has_exception_case?\n @exception_case > 0\n end", "def display_exceptions?\n !!@exception_display_handler\n end", "def ignored?(path)\n path_clean = normalize_path(path)\n @ignored_callbacks.any? { |b| b.call(path_clean) }\n end", "def ignored?(other)\n\trelationship = get_relationship(other)\n\t\n\treturn relationship && relationship.friend_status == \"IGNORED\"\n end", "def rescue_exception?\n !!@rescue_exception\n end", "def ignore\n @ignored = true\n self\n end", "def ignored?(name)\n f = @field_components_by_name[name]\n !f || f.ignored\n end", "def ignored?(user)\n @ignored_ids.include?(user.resolve_id)\n end", "def ignored?(user)\n @ignored_ids.include?(user.resolve_id)\n end", "def ignore_task?(task)\n why_ignore(task) != :execute\n end", "def ignore; end", "def skip?\n !value_of(entity.only_if)\n end", "def ignore\n\t\t\treturn @data['ignore']\n\t\tend", "def handling_disabled?(event)\n event[:check][:handle] == false\n end", "def ignore?(diagnostic)\n @ignore_warnings &&\n diagnostic.level == :warning\n end", "def service_exception?\n original.body.include? '\"isException\":true,'\n end", "def has_exception?\n exceptions.any?\n end", "def ignore?(pkg)\n ignores.include?(pkg)\n end", "def ignore\n @ignore ||= []\n end", "def ignore_raise\n yield\nrescue StandardError\n :raised\nend", "def exception_state?(sym); @exception_states.include?(sym) end", "def skipped?\n @skipped\n end", "def skip?\n false \n end", "def ignore(ignore_msg = nil)\n capture_result(Assert::Result::Ignore, ignore_msg)\n end", "def skipped?\n @skipped\n end", "def skipped?\n !!skipped\n end", "def skipped?\n !!@skipped\n end", "def exception?\n exception_state?(peek_current_state)\n end", "def ignored_association?(association)\n ignored_associations.include? association.to_sym\n end", "def should_retry(e)\n options[:exceptions_to_retry].each {|ex_class| return true if e.instance_of?(ex_class)}\n false\n end", "def cancelled?\n\t\[email protected] {\n\t\t\[email protected]_a? Cancel\n\t\t}\n\tend", "def ignored_file?(path); end", "def notify_or_ignore(exception, context = {})\n notify(exception, context) unless ignored?(exception)\n end", "def can_throw?\n\t\treturn @can_throw\n\tend", "def service_exception?\n data['isException'] == true\n end", "def never?\n !skipped? && coverage.nil?\n end", "def remove_inhibited_exceptions(exceptions)\n propagate_exception_in_plan(exceptions) do |e, object|\n if plan.force_gc.include?(object)\n true\n elsif object.respond_to?(:handles_error?)\n object.handles_error?(e)\n end\n end\n end", "def ignore(value = true)\n @ignore = value\n end", "def ignore(value = true)\n @ignore = value\n end", "def ignore(*args); end", "def suppress?\n !!@suppress\n end", "def ignore?(path)\n\t\t\[email protected]? { |ignore| ::Middleman::Util.path_match(ignore, path) }\n\t\tend", "def error_on_disabled?\n false\n end", "def skipped?\n !klass || !klass.ancestors.include?(ActiveRecord::Base) || @nodoc\n end", "def raise_on_error?\n @raise\n end", "def ignored?(fn)\n @ignored.any? { |spec| File.fnmatch spec, fn }\n end", "def trap_exception\n begin\n yield\n true\n rescue\n @exception = $!\n false\n end\n end", "def exception_filtered?(exception_data)\n @exception_whitelist&.any? do |expectation|\n if expectation[0] === exception_data[:error]\n expectation[1][:found] += 1\n true\n end\n end\n end", "def skip?(logger)\n false\n end", "def no_rescue(*exceptions)\n @options[:rescue] =\n if !exceptions.nil? and !exceptions.empty?\n ->(e) { !e.class.in?(exceptions) }\n else\n NONE\n end\n end", "def skips_around\n @skips_around\n end", "def skip?\n @skip\n end", "def skip?\n @skip\n end", "def skip?\n @skip\n end", "def skipped?\n\t\t@skipped\n\tend", "def should_intercept_exception?(frame, ex)\n # special case, or we go into infinite loop. CodeRay uses\n # exceptions for flow control :/\n if defined?(CodeRay::Encoders) && frame.eval('self') == CodeRay::Encoders\n false\n\n # normal case\n elsif intercept_object\n intercept_object.call(LazyFrame.new(frame), ex)\n else\n false\n end\n end", "def ignore_nonmulti?(task)\n ignore_by_type?(task, :nonmulti)\n end", "def raise_errors?\n @raise_errors != false\n end", "def ignore?(path)\n path = File.absolute_path(path)\n\n dir =\n if File.directory?(path)\n path\n else\n File.expand_path('..', path)\n end\n\n relative_to_home = dir.start_with?(HOME)\n\n if !relative_to_home\n dir = HOME\n end\n\n scan_for_dotignore!(dir)\n\n (@path2dotignore[dir] + @additional_ignores).any? do |pattern|\n Dotsmack::fnmatch?(pattern, path)\n end\n end", "def skipped_test?(event)\n return false unless event['event'] == 'test' && event['status'] == 'error'\n\n !!event['message'].match(/Skipped|Incomplete/i)\n end", "def ignored?(file)\n return true if File.extname(file) == \".tmp\"\n return true if file.match(/___$/)\n return true if File.basename(file) == \".DS_Store\"\n return false\n end" ]
[ "0.8006646", "0.7350852", "0.7195413", "0.7142639", "0.70912725", "0.697696", "0.65832925", "0.65617436", "0.6544055", "0.6519214", "0.64191777", "0.6399227", "0.63532585", "0.63216203", "0.6268937", "0.6157815", "0.6155958", "0.6108784", "0.60998607", "0.60486263", "0.60438275", "0.5959021", "0.5942784", "0.59370166", "0.59367627", "0.5934877", "0.5914494", "0.5901256", "0.58697796", "0.58571416", "0.58387625", "0.58371377", "0.5827378", "0.58031803", "0.57977957", "0.5791847", "0.578991", "0.5788464", "0.5784656", "0.5782264", "0.5761345", "0.5705687", "0.56891197", "0.5683393", "0.5663127", "0.5653538", "0.5646058", "0.5646058", "0.56254923", "0.5623594", "0.5607739", "0.5591724", "0.55864686", "0.55795157", "0.5574644", "0.5561242", "0.5523425", "0.55227363", "0.5517807", "0.55158967", "0.55042934", "0.5503453", "0.5496949", "0.549603", "0.548129", "0.54747677", "0.54604733", "0.54515946", "0.5440086", "0.5438403", "0.5434856", "0.5418155", "0.5400628", "0.5386559", "0.53812736", "0.5371989", "0.5360238", "0.5360238", "0.53485954", "0.5346572", "0.5339006", "0.5334652", "0.5331135", "0.53125477", "0.53040105", "0.53003836", "0.52982223", "0.5290896", "0.5282574", "0.52676857", "0.52502716", "0.52502716", "0.52502716", "0.5248719", "0.5244003", "0.52419364", "0.5241305", "0.5241256", "0.5240139", "0.52368087" ]
0.82893014
0
Advance to the next card, first stashing the current card and updating its status depending whether it passed or failed
def next_card if ["pass","fail"].include?(params[:card_status]) quiz = Quiz.find(params[:quiz_id]) user = quiz.user card = Card.find(params[:card_id]) card.pass(user) if params[:card_status] == "pass" card.fail(user) if params[:card_status] == "fail" cv = CardView.find_or_create(user, card) quiz.shuffle_card_into_queue(cv) current_card = quiz.current_card redirect_to quiz_card_path(quiz, current_card) else flash[:error] = "Couldn't figure out whether you passed or failed that particular card... try again?" redirect_to :back end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def flip_to_next_card\n @flashcard_deck.next_card\n @current_card = @flashcard_deck.current_card\n end", "def next_phase\n if phase == 'Action'\n reset_action_variables\n update_attributes(phase: \"Buy\") \n elsif phase == 'Buy'\n reset_player_balances\n current_player.discard.add_deck_to_top( current_player.played )\n current_player.discard.add_deck_to_top( current_player.hand )\n current_player.draw_card(5)\n next_turn\n update_attributes(phase: \"Action\")\n end\n end", "def begin_next_dance\n\t partner = @dance_card[0]\n\t @dance_card.shift\n\t p @dance_card\n\t p \"Now dancing with #{partner}.\"\n\tend", "def take_card\n @deck.shift\n end", "def deal\r\n @deck_of_cards.shift\r\n end", "def deal\r\n @cards.shift\r\n end", "def choose_card \n\n @cards.shift\n end", "def return_card( card )\n @deck.unshift( card )\n end", "def draw\n raise \"not enough cards\" if @cards.empty?\n @cards.shift\n end", "def deal\n @deckOfCards.shift\n end", "def deal\n\n @first_card = @cards.shift\n\n return @first_card\n\n end", "def advance!\n if next_status\n self.status = next_status\n return save\n end\n end", "def next_flashcard\n @flashcard = Flashcard.find(params[:id])\n authorize @flashcard, :show?\n\n @article = @flashcard.article\n flashcards_queue = FlashcardQueue.find_by(user: current_user, article: @flashcard.article)\n if flashcards_queue.any?\n redirect_to article_flashcard_path(@article, flashcards_queue.dequeue!)\n else\n redirect_to article_quiz_results_path(@article)\n end\n end", "def get_next_card\n if params[:type]\n num_of_cards = session[:all_cards].size - 1\n random_num = rand(0..num_of_cards)\n\n #Ensures a previously dealt card is not dealt again\n while( session[:all_cards][random_num].to_i==0 )\n random_num = rand(0..num_of_cards)\n end\n\n selected = session[:all_cards][random_num]\n session[:all_cards][random_num] = 0\n session[:player_cards] << selected if params[:type].to_s==\"player\"\n session[:dealer_cards] << selected if params[:type].to_s==\"dealer\"\n @status = :ok\n else\n selected = nil\n @status = :failed \n end\n respond_to do |format|\n format.json { render :json => {:status => @status, :selected => selected} }\n end\n end", "def draw\n @cards.shift\n end", "def play_card(discard_pile, color, number)\n @cards.length.times do |i|\n unless @cards[i].color == color && @cards[i].number == number\n next\n end\n\n discard_pile.cards << @cards[i]\n @last_action = number\n if number == \"Pickup 2\"\n discard_pile.pickup_2_count += 2\n end\n if number == \"Pickup 4\"\n discard_pile.pickup_4_count += 4\n end\n @cards.delete_at(i)\n return true\n end\n puts \"unexpected error, didn't play card\" # given validation, this should not be seen\n self\n end", "def use_card\n waiting_to_confirm_placement = true\n waiting_to_use_card = true\n invalid_usage = nil\n invalid_confirmation = nil\n remind_cannot_discard = nil\n \n while waiting_to_confirm_placement\n while waiting_to_use_card\n DisplayManager.prepare_ingame_display\n show_state\n puts \"Newest Card: #{@selected_card.to_s} #{'* you cannot discard this card' if @drew_from_discard}\" unless @selected_card.nil?\n puts \"You cannot discard this card because you drew it from the discard pile.\" if remind_cannot_discard\n remind_cannot_discard = false\n \n @card_to_replace = nil\n @card_to_discard = nil\n\n puts InputManager.input_options({ negative: 'Discard this Card', rack_positions: 'Switch With Card at Position' }, invalid_usage)\n invalid_usage = nil\n\n @placement_response = InputManager.get\n\n # If player chooses a location in their rack\n # Get ready to exchange those cards\n if InputManager::INPUTS[:rack_positions].include?(@placement_response)\n prep_place_card_in_rack(@placement_response)\n waiting_to_use_card = false\n\n # If player chooses to discard their card\n # get ready to discard their card\n # Disallow discard if card was drawn from the discard pile\n elsif InputManager.negative?(@placement_response)\n if @drew_from_discard\n remind_cannot_discard = true\n else\n prep_discard_drawn_card\n waiting_to_use_card = false\n end\n else\n invalid_usage = @placement_response\n end\n end\n\n DisplayManager.prepare_ingame_display\n show_state\n puts \"Newest Card: #{@selected_card.to_s}\"\n\n if @card_to_replace\n puts \"You want to exchange #{@card_to_replace.to_s} with #{@selected_card.to_s}.\"\n else\n puts \"You do not want to use #{@selected_card.to_s}.\"\n end\n\n puts \"You are discarding #{@card_to_discard.to_s}.\"\n\n puts InputManager.input_options({ affirmative: 'Save and Complete Turn', negative: 'Do Something Different' }, invalid_confirmation)\n invalid_confirmation = nil\n confirm_response = InputManager.get\n\n # If player confirms their decision\n # persist their decision\n if InputManager.affirmative?(confirm_response)\n save_and_discard(@placement_response)\n waiting_to_confirm_placement = false\n \n # If player changes their mind\n # allow them to choose how to use their card again \n elsif InputManager.negative?(confirm_response)\n waiting_to_use_card = true\n else\n invalid_confirmation = confirm_response\n end\n end\n end", "def hit\n @cards.push @deck.shift\n end", "def hit\n new_card = @deck.cards.pop\n @current_hand << new_card\n @total += new_card.value\n puts \"drew #{@current_hand[-1].card_id}\"\n\n @current_hand.each do |card|\n if @total > 21\n if card.value == 11 && card.ace_low == false\n @total -= 10\n card.ace_low = true\n end\n end\n end\n end", "def draw_final_card\n @card3 = Card.draw(@card1.id, @card2.id)\n puts \"The next card is the #{@card3}\"\n\n # player wins if card is Joker\n raise Rules::Win::Joker if @card3.joker?\n end", "def take!\n\t\[email protected]\n\tend", "def continue_game\n\t\thandle_no_set\n\t\tuntil @is_end\n\t\t\tshow_progress\n\t\t\tshow_hand\n\t\t\tuser_input = get_user_cards\n\t\t\tupdate user_input\n\t\t\thandle_no_set\n\t\tend\n\tend", "def one_game_loop()\n\n # Initial loop to go through players and get their choices\n @players.each do |pl|\n playing = true\n\n while (playing)\n print_state()\n\n # Check if player has a blackjack at the very beginning\n if blackjack(pl.cards)\n puts \"Player #{pl.index} has a blackjack!\"\n credit_player(pl,blackjack=true)\n playing = false\n pl.cur_playing = false # Player has already won, don't include in final round\n next\n end\n\n print \"Player #{pl.index}, what would you like to do: \"\n action = gets.chomp\n\n case action\n when \"hit\"\n new_card = @cards.pop \n pl.cards << new_card\n when \"split\"\n puts \"not implemented, I have to go do homework!\"\n when \"double\"\n new_card = @cards.pop\n pl.cards << new_card\n playing = false\n when \"stand\"\n playing = false\n else\n puts \"no such action, try again...\"\n end\n\n if value(pl.cards) > 21 # Lost :(\n puts \"Player #{pl.index}, you lost...\"\n debit_player(pl)\n playing = false\n pl.cur_playing = false # Player has already lost, don't inlude in final round\n elsif value(pl.cards) == 21 # 21, but not blackjack, so maybe we draw with the dealer, we check later\n puts \"Player #{pl.index} has 21 points - we await for the final round to compare with the dealer\"\n playing = false\n end\n end\n end\n\n # By this point, the first round is finished. Dealer starts revealing cards...\n \n dealer_reveal_cards()\n final_round()\n\n end", "def deal()\n card = self.cards.shift()\n raise \"Cannot deal more than 52 cards.\" unless card\n return card\n end", "def advance_status\n self.status = next_status\n end", "def double_down(amount, next_card)\n hand = @hands[@current_hand]\n place_double_bet(amount)\n hit(next_card) # Add this card to the current hand of the player\n print_player_hands() # Display the state of the table to show the player the card that was received\n @current_hand += 1\n end", "def deal_post_flop\n # Burns the top card of the deck.\n @deck.cards.shift\n # Moves the top card of the deck into the community table cards array.\n @community_cards.push(@deck.cards.shift)\n print 'The community cards are: '\n puts \"\"\n card_output(@community_cards)\n puts \"\"\n sleep(3)\nend", "def take_card\n raise OutOfCardsError if empty?\n @cards.pop\n end", "def remaining_cards\r\n @deck_of_cards.each do |card|\r\n card.output_card\r\n end\r\n end", "def return\n @cards += @discard\n @discard.clear\n @cards.shuffle\n end", "def deal\n @deck.shift\n end", "def advance!\n @state = (@state + 1) % @@states.length\n end", "def draw_cards(count)\n self.deck.shift(count)\n end", "def hit!(deck)\n @dealt << deck.cards.shift\n end", "def deal_card\n @deck.pop\n end", "def deal_card\n @cards.pop\n end", "def deal_cards\n MAX_CARDS.times do |d|\n @player_manager.players.each do |player| \n player.rack.add_card(@decks_manager.draw_pile.draw_card)\n end\n end\n\n # prep the discard pile with one card from the top of the draw pile\n @decks_manager.discard_top_card\n end", "def check_if_complete(card)\t\n\t\tif @cards.has_value?(4)\n\t\t\tcompleted_card = card\n\t\t\t@complete_sets.push(completed_card)\n\t\t\tremove_set_from_hand(completed_card)\n\t\tend\n\n\tend", "def pick\n raise NoMoreCardsException.new(\"Deck is empty\") if cards_left == 0\n @deck.shift\n end", "def opponent_turn\n #set variables\n game = Game.find(self.game_id)\n player = Player.find(game.players.last)\n draw_card = Card.where(player_id: -2).first\n opponent_cards = Card.where(player_id: self.id)\n possible_plays = []\n colors = [\"blue\", \"green\", \"red\", \"yellow\"]\n\n #determine eligible cards in hand.\n opponent_cards.each do |card|\n if card.color == draw_card.color || card.number == draw_card.number || (card.card_action != nil && card.card_action == draw_card.card_action)\n possible_plays.push(card)\n end\n end\n #starts decision logic for card to play, otherwise draws a card and looks for possible plays\n if possible_plays.any?\n #discard current draw pile card\n draw_card.player_id = -1\n draw_card.save\n\n #determines card to be played, sets card to be the new draw pile card\n selected_card = possible_plays.sample\n selected_card.player_id = -2\n selected_card.save\n\n #determines if card has special action, and executes action if it does.\n if selected_card.card_action != nil\n\n if selected_card.card_action === \"skip\" || selected_card.card_action === \"reverse\"\n self.opponent_turn\n elsif selected_card.card_action === \"draw\"\n draw_two = Card.where(player_id: 0).sample(2)\n draw_two.each do |card|\n card.player_id = game.players.first.id\n card.save\n end\n self.opponent_turn\n elsif selected_card.card_action === \"draw_four\"\n draw_four = Card.where(player_id: 0).sample(4)\n draw_four.each do |card|\n card.player_id = player.id\n card.save\n end\n self.opponent_turn\n elsif selected_card.card_action === \"wild_color\"\n selected_card.color = colors.sample\n selected_card.save\n end\n\n else\n #switches current turn once card has been selected and played\n if game.current_turn = self.id\n game.current_turn = player.id\n game.save\n else\n game.current_turn = self.id\n game.save\n end\n\n end\n\n else\n #draws a card from the draw pile and repeats opponent_turn process.\n drawn_card = Card.where(player_id: 0).sample\n drawn_card.player_id = self.id\n drawn_card.save\n self.opponent_turn\n end\n end", "def deal_card\r\n\t\tcards.pop\r\n\tend", "def deal_cards\n Print.heading('Dealing initial cards')\n deal_card_to_players\n dealer_card_to_dealer(false) #false to hide first dealer card\n deal_card_to_players\n dealer_card_to_dealer(true)\n end", "def runner\n welcome\n card = hit?(initial_round)# code runner here\n until card > 21\n display_card_total(card)\n card += hit?(deal_card)\n end\n display_card_total(card)\n end_game(card)\nend", "def deal()\n loop_size = Constant::CARD_COUNT / 6\n loop_size.times do\n @players.each do |player|\n break if @deck.cards.empty?\n player.hand += @deck.cards.pop(2)\n end\n end\n end", "def runner\n welcome\n card =initial_round\n until card > 21 do\n card = hit? (card)\n display_card_total (card)\n end\nend_game(card)\nend", "def check_all_cards\n all_cards = @cpu_hand.cards + @player_hand.cards\n\n if all_cards.length == 0\n complete\n elsif !all_cards.any? { |c| @total + c.value <= 31 }\n # There was no way to continue with the previous set, start a new one with\n # the other player.\n\n @scorer.peg_player( @turn, 1, 'a Go' )\n\n start_set\n\n @turn = other_player @turn\n end\n\n @engine.delay_update( 1 ) if @turn == :cpu\n end", "def addCard(card)\n\t\t@cards << card\n\t\t@value += getValue(card.rank)\n\t\tif card.rank == 'Ace'\n\t\t\t@hasAce = true\n\t\tend\n\t\tif @cards.length == 2 and @value == 21 and @hasAce # updates should be in different method\n\t\t\t@blackjack = true\n\t\tend\n\t\tif @value > 21 and @hasAce\n\t\t\t@value -= 10\n\t\t\t@hasAce = false\n\t\tend\n\tend", "def apply_effects(card)\n if(card.value == 10) then\n #skip\n if(@game.user.length > 2) then\n self.set_next_player_turn(2)\n end\n elsif(card.value == 11) then\n #reverse\n if(@game.user.length > 2) then\n self.toggle_clockwise\n self.set_next_player_turn(1)\n end\n elsif(card.value == 12) then\n #draw2\n user = self.get_next_player\n self.draw_cards(user, 2)\n if(@game.user.length > 2) then\n self.set_next_player_turn(2)\n end\n elsif(card.value == 14) then\n #wild draw 4\n user =self.get_next_player\n self.draw_cards(user, 4)\n if(@game.user.length > 2) then\n self.set_next_player_turn(2)\n end\n else\n #just a regular card, change the whoseturn to the next user\n self.set_next_player_turn(1)\n end\n end", "def action_useCard(pockerCard_id)\n\n if check_cardOwn(pockerCard_id) and check_currentTurn()#only owner can play card and current turn on\n if check_usableNumShape(pockerCard_id) #check bottom number and shape\n\n sourcePlayer_id = Game.last.players.by_user(current_user).id\n destPlayer_id = Game.last.players.dummy.id # destPlayer_id = 2 (dummy) \n\n action_moveCard(dest_id: destPlayer_id, source_id: sourcePlayer_id, card_id: pockerCard_id)\n\n #check effect of cards\n card_effect = Pockercard.find(pockerCard_id).effect\n action_addDummyList(pockerCard_id)\n action_putBottomCard(pockerCard_id)\n if card_effect == \"none\"\n action_endTurn(1) #move to next player=[\n elsif card_effect == \"back\" \n Game.last.toggle_order!\n action_endTurn(1) #skip next player\n elsif card_effect == \"jump\" \n action_endTurn(2) #move to next next player\n elsif card_effect == \"attack\"\n action_attackCard(pockerCard_id)\n action_endTurn(1) #move to next next player\n elsif card_effect == \"change\"\n action_setBottomCardStep()\n elsif card_effect == \"onemore\" \n else\n action_endTurn(1) #skip next player\n end\n check_winOrLose() \n end\n end\n\n\n\n end", "def hit!\n\t\t@cards << @deck.take!\n\tend", "def restock_hand!\n return if Bot::CONFIG.hand_size == unplayed_cards.count\n (Bot::CONFIG.hand_size - unplayed_cards.count).times do\n add_player_card PlayerCard.create(answer: game.available_answers.sample)\n end\n end", "def next_turn\n print \"\\nYour move, #{@mover.designation}: \"\n move = get_move\n\n print \"\\nMover = #{@mover.designation}. Move = #{move}.\\n\"\n\n if move == 'Q'.to_i\n @is_active = false\n print \"\\n***** Game halted by user. *****\\n\"\n return\n end\n\n add_good_move(move)\n check_winner\n check_tie_game\n\n print_color_grid\n\n @turn = @turn + 1\n @mover = @player[@turn % 2]\n end", "def deal_flop\n # Burns the top card of the deck.\n @deck.cards.shift\n # Moves the top three cards of the deck into the community table cards array.\n @community_cards = @deck.cards.shift(3)\n puts ''\n print 'The flop is: '\n card_output(@community_cards)\n puts \"\"\n sleep(3)\nend", "def next\n params[:topic] ||= ''\n @flash_cards = FlashCard.find(\n :all,\n :conditions => [\"lower(topic) = ?\", params[:topic].downcase],\n :order => :id\n )\n found = false\n @flash_card = @flash_cards.detect do |item|\n if found\n next true \n elsif item.id.to_s == params[:id] \n found = true\n end\n false \n end || @flash_cards.first\n\n respond_to do |format|\n format.json { render :json => @flash_card }\n end\n end", "def deal_card(game_deck,player)\n card = game_deck.deck.pop\n ace_checker(card,player)\n player.hand.push(card)\n puts\"#{player.player_name} received #{card.identify}\"\n puts \"Current hand: #{player.display_hand}\"\n win_or_bust(player)\n hit_or_stay(player)\nend", "def remove_card\n @cards.shift\n\n end", "def deal\n puts @deck.first\n @deck.shift\n end", "def deal_card\n if @unshuffled_deck[-1] == nil\n @unshuffled_deck = @addhand\n @addhand = @emptyarray\n @x = 0\n end\n card = @unshuffled_deck[@x]\n @unshuffled_deck[@x] = nil\n @x+=1\n return card\n end", "def draw_card\n waiting_to_pick_pile = true\n\n invalid_pile = nil\n while waiting_to_pick_pile\n DisplayManager.prepare_ingame_display\n show_state\n\n puts \"Do you want to draw a new card, or use the top discarded card?\"\n puts InputManager.input_options({ affirmative: 'Draw New Card', negative: 'Take Last Discarded Card' }, invalid_pile)\n invalid_pile = nil\n \n response = InputManager.get\n\n # If player picks the draw pile\n # draw the top card from that pile\n if InputManager.affirmative?(response)\n choose_new_card\n waiting_to_pick_pile = false\n\n # If player picks from discard pile\n # draw top card from that pile\n # player cannot discard this card\n elsif InputManager.negative?(response)\n choose_discard\n waiting_to_pick_pile = false\n else\n invalid_pile = response\n end\n end\n end", "def play_as_master_second\r\n card_avv_s = @card_played[0].to_s\r\n card_avv_info = @deck_info.get_card_info(@card_played[0])\r\n max_points_take = 0\r\n max_card_take = @cards_on_hand[0]\r\n min_card_leave = @cards_on_hand[0]\r\n min_points_leave = @deck_info.get_card_info(min_card_leave)[:points] + card_avv_info[:points]\r\n take_it = []\r\n leave_it = []\r\n # build takeit leaveit arrays\r\n @cards_on_hand.each do |card_lbl|\r\n card_s = card_lbl.to_s\r\n bcurr_card_take = false\r\n card_curr_info = @deck_info.get_card_info(card_lbl)\r\n if card_s[2] == card_avv_s[2]\r\n # same suit\r\n if card_curr_info[:rank] > card_avv_info[:rank]\r\n # current card take\r\n bcurr_card_take = true\r\n take_it << card_lbl\r\n else\r\n leave_it << card_lbl\r\n end\r\n elsif card_s[2] == @briscola.to_s[2]\r\n # this card is a briscola \r\n bcurr_card_take = true\r\n take_it << card_lbl\r\n else\r\n leave_it << card_lbl\r\n end\r\n # check how many points make the card if it take\r\n points = card_curr_info[:points] + card_avv_info[:points]\r\n if bcurr_card_take\r\n if points > max_points_take\r\n max_card_take = card_lbl\r\n max_points_take = points\r\n end\r\n else\r\n # leave it as minimum\r\n if points < min_points_leave or (points == min_points_leave and\r\n card_curr_info[:rank] < @deck_info.get_card_info(min_card_leave)[:rank] )\r\n min_card_leave = card_lbl\r\n min_points_leave = points\r\n end\r\n end\r\n end\r\n #p min_points_leave\r\n #p min_card_leave\r\n curr_points_me = 0\r\n @team_mates.each{ |name_pl| curr_points_me += @points_segno[name_pl] }\r\n tot_points_if_take = curr_points_me + max_points_take\r\n curr_points_opp = 0\r\n @opp_names.each{ |name_pl| curr_points_opp += @points_segno[name_pl] }\r\n \r\n #p take_it\r\n #p leave_it\r\n #p max_points_take\r\n #p min_points_leave\r\n if take_it.size == 0\r\n #take_it is not possibile, use leave it\r\n @log.debug(\"play_as_master_second, apply R1 #{min_card_leave}\")\r\n return min_card_leave \r\n end\r\n max_card_take_s = max_card_take.to_s\r\n if tot_points_if_take >= @target_points\r\n # take it, we win\r\n @log.debug(\"play_as_master_second, apply R2 #{max_card_take}\")\r\n return max_card_take\r\n end\r\n if @pending_points > 0\r\n card_to_play = best_taken_card(take_it)[0]\r\n @log.debug(\"play_as_master_second, apply R2-decl #{card_to_play}\")\r\n return card_to_play \r\n end\r\n if max_card_take_s[2] == @briscola.to_s[2]\r\n # card that take is briscola, pay attention to play it\r\n if max_points_take >= 20\r\n @log.debug(\"play_as_master_second, apply R3 #{max_card_take}\")\r\n return max_card_take\r\n end\r\n elsif max_points_take >= 10\r\n # take it, strosa!\r\n @log.debug(\"play_as_master_second, apply R4 #{max_card_take}\")\r\n return max_card_take\r\n end\r\n best_leave_it = nil\r\n if leave_it.size > 0\r\n best_leave_it = best_leaveit_card(leave_it)\r\n end\r\n if best_leave_it == nil\r\n card_to_play = best_taken_card(take_it)[0]\r\n @log.debug(\"play_as_master_second, apply R9 #{card_to_play} - force taken\")\r\n return card_to_play\r\n end\r\n points_best_leave = @deck_info.get_card_info(best_leave_it)[:points]\r\n if card_avv_info[:points] == 0 and points_best_leave == 0\r\n @log.debug(\"play_as_master_second, apply R10 #{best_leave_it} \")\r\n return best_leave_it\r\n end\r\n if take_it.size > 0\r\n w_and_best = best_taken_card(take_it)\r\n # we can take it\r\n if curr_points_opp > 29 and max_points_take > 0 and take_it.size > 1\r\n # try to take it\r\n card_to_play = w_and_best[0]\r\n @log.debug(\"play_as_master_second, apply R5 #{card_to_play}\")\r\n return card_to_play\r\n end\r\n if curr_points_opp > 36 and (card_avv_info[:points] > 0 or points_best_leave > 0)\r\n # try to take it\r\n card_to_play = w_and_best[0]\r\n @log.debug(\"play_as_master_second, apply R11 #{card_to_play}\")\r\n return card_to_play\r\n end\r\n if points_best_leave > 2 or min_points_leave > 3 and w_and_best[1] < 320\r\n # I am loosing too many points?\r\n card_to_play = w_and_best[0]\r\n @log.debug(\"play_as_master_second, apply R6 #{card_to_play}\")\r\n return card_to_play\r\n end\r\n end \r\n # leave it\r\n if best_leave_it\r\n @log.debug(\"play_as_master_second, apply R7 #{best_leave_it}\")\r\n return best_leave_it\r\n end\r\n \r\n @log.debug(\"play_as_master_second, apply R8 #{min_card_leave}\")\r\n return min_card_leave \r\n #crash\r\n end", "def playCard()\n if (@hand.length == 0)\n puts \"#{@name} RAN OUT OF CARDS\"\n return false\n end\n topCard = @hand.shift\n return topCard\n end", "def add_card(card)\n @deck.add(card, :back)\n end", "def remove_card\n @cards.shift\n end", "def play_as_master_first\r\n @pending_points = 0\r\n w_cards = []\r\n curr_points_me = @team_mates.inject(0){ |result, name_pl| result + @points_segno[name_pl] }\r\n @cards_on_hand.each do |card_lbl|\r\n card_s = card_lbl.to_s # something like '_Ab'\r\n segno = card_s[2,1] # character with index 2 and string len 1\r\n is_card_lbl_briscola = card_s[2] == @briscola.to_s[2] \r\n curr_w = 0\r\n curr_w += 70 if is_card_lbl_briscola\r\n # check if it is an asso or 3\r\n curr_w += 220 if card_s[1] == \"A\"[0]\r\n curr_w += 200 if card_s[1] == \"3\"[0] \r\n if card_s =~ /[24567]/\r\n # liscio value\r\n lisc_val = (card_s[1] - '0'[0]).to_i\r\n curr_w += 50 + lisc_val\r\n end\r\n curr_w += 60 if card_s[1] == \"F\"[0]\r\n # check horse and king cards\r\n if card_s[1] == \"C\"[0]\r\n if is_mariazz_possible?(segno)\r\n curr_w += 90 + 70\r\n else\r\n curr_w += 30\r\n end\r\n end \r\n if card_s[1] == \"R\"[0]\r\n if is_mariazz_possible?(segno)\r\n curr_w += 100 + 70\r\n else\r\n curr_w += 20\r\n end\r\n end\r\n # penalty for cards wich are not stroz free\r\n curr_w += 10 * @strozzi_on_suite[segno].size\r\n if (curr_points_me + @deck_info.get_card_info(card_lbl)[:points]) > @target_points\r\n curr_w -= (@deck_info.get_card_info(card_lbl)[:points] + 100)\r\n curr_w -= 200 if is_card_lbl_briscola\r\n curr_w -= 1000 if is_card_lbl_briscola and card_s[1] == \"A\"[0]\r\n end\r\n \r\n w_cards << [card_lbl, curr_w ] \r\n end\r\n # find a minimum\r\n #p w_cards\r\n min_list = w_cards.min{|a,b| a[1]<=>b[1]}\r\n @log.debug(\"Play as first: best card#{min_list[0]}, (w_cards = #{w_cards.inspect})\")\r\n return min_list[0]\r\n end", "def deal_card(player)\n if !self.is_empty?\n player.hand << cards.pop\n else\n self.initialize(1)\n end\n end", "def player_turn\n puts \"It's #{player.name}'s turn.\"\n\n blackjack_or_bust?(player)\n\n while !player.is_busted?\n puts \"What do you want to do? 1) Hit 2) Stay\"\n response = gets.chomp\n\n if ![\"1\", \"2\"].include?(response)\n puts \"Error, you must enter 1 or 2.\"\n next\n end\n\n if response == \"2\"\n puts \"#{player.name} choses to stay.\"\n break\n end\n\n new_card = deck.deal_one\n puts \"Dealing card to #{player.name}: #{new_card}\"\n player.add_card(new_card)\n puts \"#{player.name}'s total is now: #{player.total}\"\n\n blackjack_or_bust?(player)\n\n end\n end", "def deal_cards\n\t\t\tend", "def redeal\n # take all current cards in play and add to deck\n @deck.concat(@cards_in_play)\n @cards_in_play = Array.new\n\n #shuffle cards \n @deck.shuffle!\n\n #deal 12 more new cards\n @cards_in_play.concat(@deck.pop(12))\nend", "def dealer_turn\r\n dealer.reveal_hand\r\n if dealer.total_for_hand < 17\r\n loop do\r\n dealer.add_to_hand(deck)\r\n break if dealer.total_for_hand > 16\r\n end\r\n puts \"#{dealer.name} has #{dealer.total_for_hand}\"\r\n end\r\n end", "def runner\n welcome\n card_total=initial_round\n begin\n deal_card\n card_total=hit?(card_total)\n display_card_total(card_total)\nend until card_total>21 \nend_game(card_total)\nend", "def deal_card\n\t\tCard.deal_new(self)\n\t\tupdate_total\n\tend", "def deal_community_cards\n if @stage_of_play == 1\n puts 'The flop is now being being dealt.'\n sleep(1)\n deal_flop\n elsif @stage_of_play == 2\n puts 'The turn is now being being dealt.'\n sleep(1)\n deal_post_flop\n elsif @stage_of_play == 3\n puts 'The river is now being dealt.'\n sleep(1)\n deal_post_flop\n end\nend", "def runner\nwelcome\ncards=initial_round\ncards=hit?(cards)\ndisplay_card_total(cards)\nwhile cards<21\n cards=hit?(cards)\nend\nend_game(cards)\nend", "def lay_card\n @hand.shift\n end", "def runner\n welcome\n cards_counter = initial_round\n\n until cards_counter > 21\n compare = hit?(cards_counter)\n compare == cards_counter ? display_card_total(cards_counter):display_card_total(compare)\n cards_counter = compare\n end\nend_game(cards_counter)\nend", "def flip_deck\n if @deck.empty?\n if @recycle_count < @recycle_limit\n @deck = @discard.reverse.collect {|c| c.flip}\n @discard = CardArray.new\n @recycle_count += 1\n else\n nil\n end\n else\n @discard.push @deck.pop\n end\n @discard.first.flip\n end", "def draw\n @deck.shift\n end", "def runner\n # code runner here\n welcome \n cards = initial_round\n while cards < 21 do\n cards = hit?(cards)\n # display_card_total(cards)\n end\nend_game(cards)\nend", "def next_turn!\n @campaign.gangs.update_all( retreating: false )\n @campaign.players.update_all( maintenance_required: false )\n\n @campaign.turn += 1\n @campaign.players_hire_and_move!\n\n @campaign.save!\n end", "def drawcard\n @deck.pop\n end", "def add_one_card\n if request.post?\n front = params[:card][:front]\n back = params[:card][:back]\n\n if front.blank? || back.blank?\n flash[:error] = 'Front and Back must not be blank'\n else\n @deck = Deck.all.select{|x| x.title =~ /^Misc/}.sort_by(&:created_at).last\n\n # first time only\n if @deck.nil?\n @deck = Deck.new(:title => 'Misc 1', :front_description => 'Thai', :back_description => 'English', :active => true)\n @deck.save!(false)\n end\n\n # already have ten cards, make a new one\n if @deck.cards.length >= 10\n @deck.title =~ /^Misc (.*)/\n number = $1.to_i + 1\n @deck = Deck.new(:title => \"Misc #{number}\", :front_description => 'Thai', :back_description => 'English', :active => true)\n @deck.save!(false)\n end\n\n Card.create(:front => front, :back => back, :deck_id => @deck.id)\n flash[:notice] = \"Added card to deck #{@deck.title}, front: #{front}, back: #{back}\"\n end\n end\n\n render :layout => false\n end", "def set_next_suit(suit)\n end", "def move_card(draw, play)\n if play.empty? && draw.any?\n play << draw.shift\n elsif draw.any? && (within_one?(draw[0].rank.to_i, play[-1].rank.to_i))\n play << draw.shift\n else\n end\nend", "def deal(player)\n cards = []\n 26.times do \n cards << self.deck.shift\n end\n\n player.receive_cards(cards)\n end", "def add_three_cards\n cards = self.assignments.remaining.first(3)\n cards.each do |card|\n card.update_attribute(:card_status, \"in_play\")\n end\n end", "def generate_new_card\n new_card = @my_deck.grab_single_card\n old_card = @old_card\n user_guess = @guess\n puts \"\"\n puts new_card\n puts \"\"\n compare_cards(new_card, old_card, user_guess)\n end", "def take_card(card)\n @cards << card\n end", "def runner\n welcome #welcomes player \n cardtotal = initial_round #stores the two cards from the first dealing\n until cardtotal > 21 #until their card total is greater than 21\n cardtotal = hit?(cardtotal) #set the new card total equal to the player's decision\n display_card_total(cardtotal)\n if cardtotal == 21\n puts \"You cards add up to #{cardtotal}! Blackjack!\"\n return\n end\n end\n end_game(cardtotal) #ends the game and returns player's cardtotal\nend", "def take_card\n @store.pop\n end", "def replace_card_with_new(player, card)\n @deck << card\n @deck.shuffle!\n new_card = @deck.shift\n return if new_card == card\n # This will raise if player didn't have the old card\n player.replace_cards(@main_token, [card], [new_card])\n @output_streams.each { |os| os.new_cards(player.user) }\n end", "def move_card( card, places )\n \n n = places\n card_locus = @deck.index( card )\n while (n != 0) do\n \n if (card_locus + 1 < @deck.length )\n @deck[card_locus], @deck[card_locus +1] = @deck[card_locus+1], @deck[card_locus]\n card_locus = card_locus+1\n else\n card = @deck.slice!(card_locus)\n @deck.insert(1, card)\n card_locus = 1\n \n end\n # subtract 1 from n\n n -= 1\n \n end\n \n end", "def receive_revealed_card(pos, value)\n if @visited_first[value] == nil \n @visited_first[value] = pos\n elsif @visited_first[value] != pos\n @visited_second[value] = pos\n end\n end", "def drawCard\n\t\t@hand = @hand.push(@deck.pop)\n\tend", "def play_like_a_dummy\r\n # very brutal algorithm , always play the first card\r\n card = @cards_on_hand.pop\r\n return card\r\n end", "def runner\n welcome\n # initial_round\n cardTotal = initial_round\n until cardTotal > 21\n cardTotal = hit? cardTotal\n display_card_total cardTotal\n end\n end_game cardTotal\nend", "def runner\n welcome\n sum_card = initial_round\n until sum_card >= 21 \n sum_card = hit?(sum_card)\n display_card_total(sum_card)\n end_game(sum_card)\n end\nend", "def runner\n welcome\n card_total = initial_round # first round delt cards\n\n until card_total > 21\n card_total = hit?(card_total)\n display_card_total(card_total)\n end\n\n end_game(card_total)\nend", "def next=(aCard)\n if aCard.instance_of? Card\n self.next_id = aCard.id\n else\n self.next_id = aCard\n end\n end", "def advance; end", "def advance; end" ]
[ "0.72943795", "0.6760288", "0.6730438", "0.6686069", "0.65777946", "0.65393955", "0.6482234", "0.64423966", "0.643125", "0.6353475", "0.6287692", "0.62747866", "0.62694", "0.6247394", "0.623715", "0.6227019", "0.62233144", "0.62193733", "0.61794186", "0.6125053", "0.61191374", "0.6109969", "0.6088011", "0.6084664", "0.60605216", "0.6055528", "0.6042332", "0.6013826", "0.5989963", "0.59581244", "0.59379935", "0.591424", "0.59114283", "0.58991116", "0.5880228", "0.58688563", "0.58652157", "0.5849591", "0.58470094", "0.5829465", "0.58283293", "0.58271897", "0.5818423", "0.58174956", "0.5808652", "0.5807449", "0.5807238", "0.5801992", "0.57978207", "0.57922333", "0.57823783", "0.57789725", "0.5755443", "0.5739386", "0.5738376", "0.57382166", "0.57348645", "0.5724089", "0.57237005", "0.57230806", "0.57163024", "0.57044923", "0.57024986", "0.57012635", "0.5699258", "0.5695292", "0.5691612", "0.56887794", "0.5684842", "0.5679777", "0.56783885", "0.567601", "0.5661891", "0.56571585", "0.56530976", "0.56494075", "0.56491864", "0.5646742", "0.56459874", "0.56425714", "0.5636188", "0.5636159", "0.56308764", "0.56280065", "0.5626366", "0.562081", "0.5611497", "0.56112105", "0.5609421", "0.5604273", "0.56032944", "0.5601235", "0.5600714", "0.55928916", "0.5589075", "0.5588372", "0.5583919", "0.55762565", "0.55676025", "0.55676025" ]
0.80646867
0
Create the target instance
def create(state) info("Creating instance #{instance.name}") return if state[:server_id] domain = create_domain state[:server_id] = domain.id state[:hostname] = domain.public_ip_address instance.transport.connection(state).wait_until_ready info("Libvirt instance #{domain.name} created.") end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_target_mock\n ref = self.service_name.downcase.gsub(/ /, '-')\n create_row = {\n ref: ref,\n display_name: self.service_name,\n type: 'instance',\n iaas_type: 'ec2',\n iaas_properties: {},\n project_id: self.project.id\n }\n Model.create_from_row(self.project.model_handle(:target), create_row, convert: true, ret_obj: { model_name: :target_instance })\n end", "def create( target_class, *args )\n @class_loader.newInstance(target_class, *args )\n end", "def create!(attributes = {})\n self << target_class.create!(attributes)\n end", "def create(attributes = {})\n self << target_class.create(attributes)\n end", "def create_instance\n create_instances(1).first\n end", "def target\n target_hash_or_obj = self[:datacenter]\n if target_hash_or_obj.is_a?(Target)\n target_hash_or_obj\n else\n target_hash = target_hash_or_obj\n target_idh = base_node.model_handle(:target_instance).createIDH(id: target_hash[:id])\n target_idh.create_object(target_hash)\n end\n end", "def build(attrs = {})\n target_class.build(@client, attrs)\n end", "def create!\n raise NotImplementedError\n end", "def create\n # TODO: implement create\n end", "def create\n \n end", "def create\n raise NotImplementedError\n end", "def create\n raise NotImplementedError\n end", "def create!\n end", "def create()\n instance = create_instance()\n set_instance_properties(instance)\n create_instance_children(instance)\n return instance\n end", "def create(_options)\n raise NotImplementedError\n end", "def create!(*args, &block)\n instance = new(*args, &block)\n instance.create!\n instance\n end", "def create_object\n sut.send(:new)\n end", "def create(_instance)\n raise Errors::Backend::NotImplementedError, 'Requested functionality is not implemented'\n end", "def start_instance\n @instance = Task.find(@task).instances.create\n end", "def create_target(host,port,ssl,sel)\n\t\ttar = Target.create(\n\t\t\t\t:host => host, \n\t\t\t\t:port => port, \n\t\t\t\t:ssl => ssl, \n\t\t\t\t:selected => sel\n\t\t\t)\n\t\ttar.save\t\n\t\t#framework.events.on_db_target(context, rec)\n\tend", "def initialize( target )\n\t\t@target = target\n\tend", "def create # rubocop:disable Metrics/AbcSize\n inst_details = AttrFinder.new(@instanceparameters)\n inst_details.options = @options\n inst_details.validate = @validate\n inst_details.function = 'server'\n BmcAuthenticate.new(@options)\n request = OracleBMC::Core::Models::LaunchInstanceDetails.new\n ssh_public_key = @instanceparameters['server']['ssh-key']\n request.availability_domain = inst_details.ad\n request.compartment_id = inst_details.compartment\n request.display_name = @instanceparameters['server']['display_name']\n request.image_id = inst_details.image\n request.shape = @instanceparameters['server']['shape']\n request.subnet_id = inst_details.subnet\n request.metadata = { 'ssh_authorized_keys' => ssh_public_key }\n api = OracleBMC::Core::ComputeClient.new\n response = api.launch_instance(request)\n @instance_id = response.data.id\n compartment(inst_details.compartment)\n running_instance = api.get_instance(@instance_id).wait_until(:lifecycle_state,\n OracleBMC::Core::Models::Instance::LIFECYCLE_STATE_RUNNING,\n max_interval_seconds: 5, max_wait_seconds: 300)\n if @instanceparameters['server']['attachments']\n @instanceparameters['server']['attachments'].each do |vol|\n attach(@instance_id, vol['volume'])\n end\n end\n running_instance\n end", "def new()\n trace(\"Instance #{index} created\")\n index += 1\n end", "def initialize(target)\n @target = target\n end", "def initialize(target)\n @target = target\n end", "def initialize_target\n end", "def create\n raise \"Not supported\"\n end", "def create_instance(opts)\n opts = check_params(opts,[:class_instance_keys])\n super(opts)\n end", "def new_instance(suite, platform, index)\n Instance.new(\n driver: new_driver(suite, platform),\n lifecycle_hooks: new_lifecycle_hooks(suite, platform),\n logger: new_instance_logger(suite, platform, index),\n suite: suite,\n platform: platform,\n provisioner: new_provisioner(suite, platform),\n transport: new_transport(suite, platform),\n verifier: new_verifier(suite, platform),\n state_file: new_state_file(suite, platform)\n )\n end", "def new_instance(suite, platform, index)\n Instance.new(\n :busser => new_busser(suite, platform),\n :driver => new_driver(suite, platform),\n :logger => new_logger(suite, platform, index),\n :suite => suite,\n :platform => platform,\n :provisioner => new_provisioner(suite, platform),\n :binding => new_binding(suite, platform),\n :state_file => new_state_file(suite, platform)\n )\n end", "def create params\n raise_start_server unless Server::node\n new params\n end", "def create_target(host,port,ssl,sel)\n\t\ttar = WmapTarget.create(\n\t\t\t\t:host => host,\n\t\t\t\t:address => host,\n\t\t\t\t:port => port,\n\t\t\t\t:ssl => ssl,\n\t\t\t\t:selected => sel\n\t\t\t)\n\t\t#framework.events.on_db_target(rec)\n\tend", "def build\n raise FedoraMigrate::Errors::MigrationError, \"No qualified targets found in #{source.pid}\" if target.nil?\n\n # create target, and apply depositor metadata\n obj = target.new\n\n obj.apply_depositor_metadata @depositor_utln\n obj.visibility = Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC\n\n user = User.find_by_user_key(@depositor_utln)\n# CurationConcerns::Workflow::ActivateObject.call(target: obj, comment: 'activate object', user: user)\n\n create_and_add_payload(obj, @payload_primary, @depositor_utln)\n\n #deal with 2 primary datastream objects, storing second object in a new file set\n create_and_add_payload(obj, @payload_secondary, @depositor_utln) unless @payload_secondary.nil?\n\n #handle a case of bad hand created data on old records\n create_and_add_payload(obj, \"ARCHIVAL_SOUND\", @depositor_utln) if @payload_primary == \"ARCHIVAL_WAV\"\n\n # back up old data\n #create_and_add_fcrepo3_set obj\n\n process_desc_metadata obj\n process_admin_metadata obj\n process_technical_metadata obj\n process_relsext_metadata obj\n\n# obj.save\n\n process_collection_metadata obj\n\n active_workflow = Sipity::Workflow.find(2)\n Sipity::Entity.create!(proxy_for_global_id: obj.to_global_id.to_s,\n workflow: active_workflow,\n workflow_state: nil)\n\n obj\n end", "def build\n klass = create_class(@class_name, @options)\n klass.new\n end", "def newInstance( params={} )\n model.new( params ).extend( InstanceMethods )\n end", "def create\n raise NotImplemented\n end", "def create\n end", "def initialize(target)\n @target = target\n end", "def set_target\n access_key = '16e5c472cbad4e592e977029af54b1500b551362'\n secret_key = '932998f43862357f0da1a3e9b0340f5b38c55eb1'\n connection = Vws::Api.new(access_key, secret_key)\n width = 32\n active_flag = true\n application_metadata = nil\n puts @target.image.inspect\n puts vuforia_return = \"ok\"\n # vuforia_return = connection.add_target(target_name, \"http://api.playarshop.com\" + @target.image.url, width, active_flag, application_metadata)\n vuforia_return = connection.add_target(target_name, \"./public/logo.png\", width, active_flag, application_metadata)\n puts vuforia_return.inspect\n @target = Target.find(params[:id])\n end", "def create\n @target = Target.new(target_params)\n\n respond_to do |format|\n if @target.save\n format.html { redirect_to @target, notice: 'Target was successfully created.' }\n format.json { render :show, status: :created, location: @target }\n else\n format.html { render :new }\n format.json { render json: @target.errors, status: :unprocessable_entity }\n end\n end\n end", "def create(name)\n object = new(name)\n @instances.push(object)\n object\n end", "def instantiate(params)\n instance = Instance.new( *interpolate(params), @parent )\n # instance.apply_resource_type # TODO: do we need apply_security_scheme?\n instance\n end", "def setup_create\n\t\traise NotImplementedError, \"machine_create is not implemented\"\n\tend", "def initialize(name, target, agent)\n @agent = agent\n @name = name\n @target = target\n if self.class.exist(name, agent)\n import\n else\n create\n end\n end", "def create\n identify.tap { type }\n end", "def instantiate\n resource.new(data)\n end", "def create!(opts = {})\n instance = new(opts)\n instance.save!\n instance\n end", "def create\n name_to_const.new\n end", "def create(state)\n self.class.lock!\n state[:hostname] = Socket.gethostname\n logger.info(\"[Localhost] Instance #{instance} ready.\")\n end", "def create\n \n end", "def create\n raise NotImplementedError\n end", "def create(*args)\n instance = self.new(*args)\n instance.save\n return instance\n end", "def create(instance)\n raise NotImplementedError, 'Expected adapter to override `create`'\n end", "def initialize(target)\n @target = target\n end", "def initialize(target)\n @target = target\n end", "def createVehicle _obj, _args\n \"_obj createVehicle _args;\" \n end", "def create!(*args)\n instance = self.new(*args)\n instance.save!\n return instance\n end", "def create\n raise ArgumentError, 'Symlink target undefined' unless to\n might_update_resource do\n provider.create\n end\n end", "def create(config)\n PlatformOps::Utils.validated_config config, %i(ami_id ssh_user)\n\n @ami_id = config[:ami_id]\n @ssh_user = config[:ssh_user]\n @ingress_cidrs = combine_ingress_addresses(config[:ingress_ip], config[:ingress_cidrs])\n @ssh_public_key_path = config[:ssh_public_key] || '~/.ssh/id_rsa.pub'\n @ssh_private_key_path = config[:ssh_private_key] || '~/.ssh/id_rsa'\n @ssh_poll_private_ip = config[:ssh_poll_private_ip]\n @security_group_connections = config[:security_group_connections] || []\n @eip_allocation_id = config[:eip_allocation_id]\n @instance_type = config[:instance_type] || 't2.medium'\n @cidr = config[:cidr]\n\n begin\n route_table_id = create_route_table\n subnet_id = create_subnet(route_table_id, @cidr)\n security_group_id = create_security_group(security_group_connections)\n instance_id = create_instance(subnet_id, security_group_id)\n instance = wait_for_instance(instance_id)\n associate_eip(instance_id) if @eip_allocation_id\n instance\n rescue Interrupt, StandardError => e\n logger.error e\n\n destroy\n\n raise\n end\n end", "def create_ec2_instance(attrs)\n instance = ec2.instances.create(attrs)\n perform_instance_checks(instance)\n instance\n end", "def create_instance(security_groups, key, user_data, size, region)\n @instances = nil\n Instance.new(@@ec2.run_instances(id, 1, 1, security_groups, key, user_data, nil, size, nil, nil, region).first)\n end", "def create(klass)\n klass.send(@creator, @conditions) if @creator\n end", "def create\n make_create_request\n end", "def create\n @target_def = TargetDef.new(params[:target_def])\n\n respond_to do |format|\n if @target_def.save\n flash[:notice] = 'TargetDef was successfully created.'\n format.html { redirect_to(@target_def) }\n format.xml { render :xml => @target_def, :status => :created, :location => @target_def }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @target_def.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create_instance(credentials, image_id, opts)\n new_vapp = nil\n vapp_opts = {} #assemble options to pass to Fog::Terremark::Real.instantiate_vapp_template\n terremark_hwp = hardware_profiles(credentials, {:name => 'default'}).first #sanity check values against default\n name = opts[:name]\n if not name\n name = \"inst#{Time.now.to_i}\"\n end\n if name.length > USER_NAME_MAX\n raise \"Parameter name must be #{USER_NAME_MAX} characters or less\"\n end\n unless ( (terremark_hwp.include?(:cpu, opts[:hwp_cpu].to_i)) &&\n (terremark_hwp.include?(:memory, opts[:hwp_memory].to_i)) ) then\n raise Deltacloud::Exceptions::ValidationFailure.new(\n StandardError.new(\"Error with cpu and/or memory values. you said cpu->#{opts[:hwp_cpu]} and mem->#{opts[:hwp_memory]}\")\n )\n end\n vapp_opts['cpus'] = opts[:hwp_cpu]\n vapp_opts['memory'] = opts[:hwp_memory]\n safely do\n terremark_client = new_client(credentials)\n#######\n#FIXME# what happens if there is an issue getting the new vapp id? (eg even though created succesfully)\n#######\n vapp_id = terremark_client.instantiate_vapp_template(name, image_id, vapp_opts).body['href'].split('/').last\n new_vapp = terremark_client.get_vapp(vapp_id)\n return convert_instance(new_vapp, terremark_client, credentials.user) #return an Instance object\n end\n end", "def new\n \n end", "def create(opts = {})\n instance = new(opts)\n instance.save\n instance\n end", "def create; end", "def create; end", "def create; end", "def create; end", "def create_object\n definition.sought_type.new\n end", "def create\n \t\n end", "def create\n begin\n send(rest_xact_handler) do\n @target = model.new\n @target.attributes = @request_resource\n @target.save!\n end\n rescue ActiveRecord::UnknownAttributeError => e\n # Ugly heuristic, but didn't find anything better\n field_name = $1 if e.message =~ /unknown attribute: (.*)/\n raise BadRequest.new(e.message,\n :per_field_msgs => { field_name => 'Is not defined' },\n :retry_possible => false)\n rescue ActiveRecord::RecordInvalid => e\n raise UnprocessableEntity.new(e.message,\n :per_field_msgs => target.errors.inject({}) { |h, (k, v)| h[k] = v; h },\n :retry_possible => false)\n rescue ActiveRecord::RecordNotSaved => e\n raise UnprocessableEntity.new(e.message,\n :retry_possible => false)\n end\n\n if is_true?(params[:_suppress_response])\n render :nothing => true, :status => :created\n else\n find_target(:id => @target.id)\n respond_with(@target, :status => :created) do |format|\n yield(format) if block_given?\n end\n end\n end", "def create\n attrs = create_attributes\n @object = klass.new\n object.reindex_extent = Hyrax::Adapters::NestingIndexAdapter::LIMITED_REINDEX\n run_callbacks :save do\n run_callbacks :create do\n klass == Collection ? create_collection(attrs) : work_actor.create(environment(attrs))\n end\n end\n log_created(object)\n end", "def before_create\n self.target_id = target.id if target\n end", "def create\n Puppet.debug( \"#{self.resource.type}: CREATE #{resource[:name]}\" ) \n end", "def camCreate _obj, _args\n \"_obj camCreate _args;\" \n end", "def run_instance options = {}\n instances = InstanceCollection.new(:config => config)\n instances.create(options.merge(:image => self))\n end", "def create\n # Submit de new, crea nueva instance en DB.\n end", "def create_project_target(target, project, machine, pool)\n handle_action_exceptions(__method__) do\n cmd_line = [\n \"createprojecttarget '#{target}' '#{project}' \"\\\n \"'#{machine}' '#{pool}'\"\n ]\n cmd_line << 'json' if @json\n\n handle_return(@toolshck_ether.cmd(cmd_line.join(' ')))\n end\n end", "def create!(attributes = {})\n object = klass.new(attributes)\n object.save!\n object\n end", "def create_instance(credentials, image_id, opts)\n racks = new_client( credentials )\n hwp_id = opts[:hwp_id] || 1\n name = Time.now.to_s\n if (opts[:name]) then name = opts[:name] end\n safely do\n return convert_srv_to_instance(racks.start_server(image_id, hwp_id, name))\n end\n end", "def create_target_picture\n @target = Sprite.new(@viewport)\n @target.bitmap = Cache.picture(MAP_TARGET)\n center(@target)\n @target.opacity = 0\n end", "def create\n # USELESS\n end", "def create\n # USELESS\n end", "def new\n \n end", "def new\n \n end", "def new\n \n end", "def new\n \n end", "def new\n \n end", "def instantiate!; end", "def create_target_position\n @target = Sprite.new\n return if no_target\n @target.bitmap = Cache.picture(TARGET_PICTURE)\n center_im(@target)\n @target.x = @target_x\n @target.y = @target_y\n @target.z = 14\n end", "def initialize(target_class)\n @target_class = target_class\n @machine = Machine.new(target_class)\n end", "def create_record attributes\n creator = @creator_class.new attributes, @context\n FactoryGirl.create @clazz.name.underscore.to_sym, creator.factorygirl_attributes\n end", "def create_instance(supervisor)\n Instance.new(supervisor, self, get_instance_id)\n end", "def create(args = {})\n end", "def create\n @instance = @provider.instances.new(params[:instance])\n @instance.state = \"Building\"\n respond_to do |format|\n if @instance.save\n @instance.create_instance(@provider.connect!)\n format.html { redirect_to cloud_provider_instance_path(@provider,@instance), notice: 'Instance was successfully created.' }\n format.json { render json: @instance, status: :created, location: @instance }\n else\n format.html { render action: \"new\" }\n format.json { render json: @instance.errors, status: :unprocessable_entity }\n end\n end\n end", "def create(opts)\n opts = check_params(opts,[:woc_instance,:local_ips,:enabled,:serverssl])\n super(opts)\n end", "def create(options)\n new(options).create\n end" ]
[ "0.77701956", "0.7059186", "0.7038512", "0.6935186", "0.6782983", "0.66949844", "0.65368384", "0.6534073", "0.65126467", "0.6511741", "0.6472529", "0.6472529", "0.64515924", "0.64464325", "0.6393916", "0.638048", "0.63799226", "0.63681304", "0.6237704", "0.6235831", "0.62307566", "0.6214942", "0.6202843", "0.6200285", "0.6200285", "0.6194491", "0.6186894", "0.6186145", "0.61829287", "0.61822855", "0.61761045", "0.6173931", "0.613796", "0.6130317", "0.60911316", "0.60669196", "0.6058997", "0.60351586", "0.6034141", "0.60266864", "0.60197425", "0.6018407", "0.60137266", "0.6010233", "0.6009337", "0.6008552", "0.5997716", "0.59936625", "0.5988714", "0.59886026", "0.5986473", "0.59860605", "0.59766394", "0.5964046", "0.5964046", "0.5960498", "0.59592974", "0.5951443", "0.5950011", "0.59483004", "0.59346324", "0.5928683", "0.59282255", "0.59192836", "0.59163886", "0.59114957", "0.5908189", "0.5907671", "0.5907671", "0.5907671", "0.5907671", "0.5907567", "0.59064376", "0.58906627", "0.58895075", "0.5886189", "0.58851147", "0.5883965", "0.5879007", "0.5877539", "0.5873056", "0.58670056", "0.5861183", "0.5857553", "0.5852562", "0.5852562", "0.58504146", "0.58504146", "0.58504146", "0.58504146", "0.58504146", "0.5835335", "0.58314013", "0.5823769", "0.57927066", "0.5790998", "0.5789732", "0.5789037", "0.57859266", "0.578106" ]
0.5989781
48
Destroy the target instance
def destroy(state) info("Destroying instance #{instance.name}") return if state[:server_id].nil? instance.transport.connection(state).close domain = load_domain(state[:server_id]) destroy_domain(domain) unless domain.nil? info("Libvirt instance #{state[:server_id]} destroyed.") state.delete(:server_id) state.delete(:hostname) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def destroy!\n destroy_instance(_id)\n end", "def destroy!\n self.destroy\n end", "def teardown\n response = Vanagon::Utilities.http_request(\n \"#{@pooler}/vm/#{@target}\",\n \"DELETE\",\n nil,\n { 'X-AUTH-TOKEN' => @token }\n )\n if response and response[\"ok\"]\n Vanagon::Driver.logger.info \"#{@target} has been destroyed\"\n $stderr.puts \"#{@target} has been destroyed\"\n else\n Vanagon::Driver.logger.info \"#{@target} could not be destroyed\"\n warn \"#{@target} could not be destroyed\"\n end\n rescue Vanagon::Error => e\n Vanagon::Driver.logger.info \"#{@target} could not be destroyed (#{e.message})\"\n warn \"#{@target} could not be destroyed (#{e.message})\"\n end", "def destroy(inst)\n self.class.destroy inst\n end", "def destroy\n @instance = @provider.instances.find(params[:id])\n InstanceOperations.terminate_instances(@provider.connect!, @instance)\n @instance.destroy\n redirect_to cloud_provider_path(@provider) \n end", "def destroy\n kill\n reset\n end", "def destroy\n response = perform_destroy(nil)\n self.destroyed = response.status\n self\n end", "def delete_instance instance_id\n instances.delete_instance name: instance_path(instance_id)\n end", "def terminate\n self.destroy\n end", "def destroy\n self.delete_\n end", "def destroy(_state)\n workflow do\n run_destroy.bind do\n remove_instance_directory\n end\n end\n end", "def destroy\n requires_existing_vm\n domain.undefine\n set_domain(nil)\n end", "def destroy\n \n end", "def destroy\n \n end", "def destroy\n @instance = Instance.find(params[:id])\n @instance.destroy\n\n respond_to do |format|\n format.html { redirect_to(instances_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n resources = find_resources_by_tag\n\n destroy_instance(resources)\n destroy_security_group(resources)\n destroy_subnet(resources)\n destroy_route_table(resources)\n\n nil\n end", "def destroy\r\n do_without_exception(:destroy!)\r\n end", "def destroy\n nil\n end", "def destroy\n @instance = Instance.find(params[:id])\n @instance.destroy\n\n respond_to do |format|\n format.html { redirect_to instances_url }\n format.json { head :no_content }\n end\n end", "def destroy\n FileUtils.rm_rf(target)\n end", "def rm(target)\n target.active_record_instance.destroy\n end", "def destroy\n @instance.destroy\n respond_to do |format|\n format.html { redirect_to instances_url, notice: 'Instance was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @instance.destroy\n respond_to do |format|\n format.html { redirect_to instances_url, notice: 'Instance was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n Process.kill(9, pid)\n end", "def delete_instance instance_id\n execute do\n instances.delete_instance(\n instance_path(instance_id)\n )\n end\n end", "def destroy\n fail ESP::NotImplementedError\n end", "def destroy!\n destroy || raise(ActiveRecord::RecordNotDestroyed)\n end", "def destroy\n #@instance = Instance.find(params[:id])\n @instance.destroy\n\n respond_to do |format|\n format.html { redirect_to instances_url }\n format.json { head :no_content }\n end\n end", "def destroy!; end", "def destroy!\n manager.delete(self)\n end", "def destroy!\n manager.delete(self)\n end", "def delete_vm_instance(name, namespace)\n @conn.vminstances.destroy(name, namespace)\n end", "def destroy \n ec2 = self.class.new_ec2(@resource.value(:user), @resource.value(:password))\n ec2.terminate_instances({:instance_id => @property_hash[:instance_id]})\n ec2.delete_security_group({:group_name => @resource.value(:name)})\n end", "def destroy\n @_destroyed = true\n _owner.update_attribute(embedded_as, nil) if _owner\n end", "def destroy\n @instance_name = InstanceName.find(params[:id])\n @instance_name.destroy\n\n respond_to do |format|\n format.html { redirect_to(instance_names_url) }\n format.xml { head :ok }\n end\n end", "def destroy_instance_database\n instance_database.delete!\n end", "def destroy_instance_database\n instance_database.delete!\n end", "def destroy! #TODO\n destroy || raise(ActiveRecord::RecordNotDestroyed)\n end", "def destroy; end", "def destroy; end", "def destroy; end", "def destroy; end", "def destroy; end", "def destroy; end", "def destroy; end", "def destroy; end", "def destroy; end", "def destroy; end", "def destroy; delete end", "def destroy\n fail ESP::NotImplementedError\n end", "def destroy\n fail ESP::NotImplementedError\n end", "def destroy\n fail ESP::NotImplementedError\n end", "def destroy\n fail ESP::NotImplementedError\n end", "def destroy\n fail ESP::NotImplementedError\n end", "def destroy\n fail ESP::NotImplementedError\n end", "def destroy\n fail ESP::NotImplementedError\n end", "def destroy\n fail ESP::NotImplementedError\n end", "def destroy\n delete\n freeze\n end", "def destroy\n sivel2_gen_destroy\n end", "def destroy(_ = nil)\n OneProvisionLogger.info('(Destroy skipped)')\n end", "def destroy!\n destroy || raise(RecordNotDestroyed)\n end", "def destroy(context={})\n rpc_execute('unlink', [self.id], context)\n end", "def destroy_instance(instance)\n tenant = @identity.find_tenant(instance.tenant_id)\n fog_options = @fog_options[:storage].merge(:hp_tenant_id => tenant.id)\n\n storage = VCAP::Services::Swift::Storage.new(@logger, fog_options)\n storage.delete_account\n\n @logger.debug \"Account meta data (should be 'Recently deleted'): \" + storage.get_account_meta_data.body.to_s\n\n @identity.delete_users_by_tenant_id(instance.tenant_id, @fog_options[:name_suffix])\n @identity.delete_tenant(instance.tenant_id)\n raise SwiftError.new(SwiftError::SWIFT_DESTROY_INSTANCE_FAILED, instance.inspect) unless instance.destroy\n end", "def destroy\n # Shutdown pool if active\n service.pool_action uuid, :destroy if active?\n # If this is a persistent domain we need to undefine it\n service.pool_action uuid, :undefine if persistent?\n end", "def stop()\n self.destroy()\n\n # Stop the EC2 instance\n $ec2.terminate_instances([self.id])\n end", "def destroy(context={})\n self.class.rpc_execute('unlink', self.id, context)\n end", "def destroy\r\n end", "def destroy\n\t\trun_callbacks(:destroy) { delete } \n\tend", "def destroy\n log_failure do\n handler.destroy\n end\n end", "def destroy\n @ec2_instance.destroy\n respond_to do |format|\n format.html { redirect_to ec2_instances_url, notice: 'Ec2 instance was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @test_instance.destroy\n respond_to do |format|\n format.html { redirect_to test_instances_url, notice: 'Test instance was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def teardown\n teardown_construct(@target_path)\n @release.destroy\n @release = nil\n end", "def destroy\n @entity.destroy\n self\n end", "def destroy(context)\n set_resource(context)\n\n begin\n instance_variable_get(:\"@#{resource_name}\").destroy\n instance_variable_get(:\"@#{resource_name}\").to_json\n rescue StandardError => e\n context.halt(500, { status: 'error', message: e.message }.to_json)\n end\n end", "def delete\n ensure_service!\n service.delete_instance path\n true\n end", "def destroy\n end", "def destroy\n end", "def destroy\n end", "def destroy\n end", "def destroy\n end", "def destroy\n get_instance.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end", "def deleteInstance(iController, ioInstance)\n # Clean up everything that was registered before destruction\n ioInstance.unregisterAll\n # Wait for any timer event that has to finish\n ioInstance.killTimers\n # Quit everything\n ioInstance.destroy\n end", "def destroy()\n\n super()\n end", "def destroy\n @instance_action = InstanceAction.find(params[:id])\n @instance_action.destroy\n\n respond_to do |format|\n format.html { redirect_to instance_actions_url }\n format.json { head :no_content }\n end\n end", "def destroy!\n destroy || _raise_record_not_destroyed\n end", "def destroy\n @instance_eni.destroy\n respond_to do |format|\n format.html { redirect_to instance_enis_url, notice: 'Instance eni was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @robot_instance.destroy\n respond_to do |format|\n format.html { redirect_to robot_instances_url, notice: 'Robot instance was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n delete_object\n true\n end", "def perform_destroy\n api.stack_destroy(self)\n end", "def destroy\n run_callbacks :destroy do\n rpc_execute('unlink', [id], context)\n @destroyed = true\n freeze \n end\n end", "def destroy!\n orchio_purge\n end", "def destroy\r\n end", "def destroy\r\n end", "def destroy\n self.unuse_pto_hours\n @time_off_instance.destroy\n respond_to do |format|\n format.html { redirect_to time_off_instances_url, notice: 'Time off instance was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @instance_eni_tag.destroy\n respond_to do |format|\n format.html { redirect_to instance_eni_tags_url, notice: 'Instance eni tag was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n\t\t\n\tend", "def destroy\n\n end", "def destroy\n\n end", "def ensure_destroy\n ensure_stop\n destroy if exist?\n end", "def destroy_resource object\n object.destroy\n end" ]
[ "0.8236234", "0.7452323", "0.74413985", "0.7380395", "0.7368819", "0.7356065", "0.73176205", "0.72651625", "0.7252328", "0.7234828", "0.72163683", "0.72151816", "0.71572155", "0.71572155", "0.71491283", "0.70986295", "0.7098422", "0.7060006", "0.705898", "0.70420086", "0.70385754", "0.70288557", "0.70284456", "0.7019495", "0.70048904", "0.6982226", "0.6978405", "0.69768435", "0.69719803", "0.69573396", "0.69573396", "0.6943113", "0.692684", "0.69066757", "0.6878241", "0.6876104", "0.6876104", "0.6866519", "0.68547946", "0.68547946", "0.68547946", "0.68547946", "0.68547946", "0.68547946", "0.68547946", "0.68547946", "0.68547946", "0.68547946", "0.6829018", "0.68194836", "0.68194836", "0.68194836", "0.68194836", "0.68194836", "0.68194836", "0.68194836", "0.68194836", "0.68191284", "0.6810291", "0.6798645", "0.679856", "0.67978394", "0.6763426", "0.6732024", "0.67278063", "0.67149293", "0.67038673", "0.66992253", "0.66956407", "0.66914374", "0.6691417", "0.6689046", "0.6674099", "0.6672751", "0.6666646", "0.66642195", "0.66642195", "0.66642195", "0.66642195", "0.66642195", "0.6651949", "0.66517776", "0.66495997", "0.6645057", "0.66417193", "0.66389185", "0.6637434", "0.66359353", "0.66297543", "0.6629208", "0.6628697", "0.6627322", "0.6627322", "0.6619355", "0.65971565", "0.6596231", "0.65939003", "0.65939003", "0.65938294", "0.6591722" ]
0.72489876
9
The command string to use for finding the domain IP address
def default_ip_command %q( awk "/$mac/ {print \$1}" /proc/net/arp ) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def description\n \"Query for the ip address of the given DNS name\"\nend", "def command(str)\n ar=str.split(/ /)\n case ar[0]\n when \"++addr\"\n if ar.size<2\n return @addr.to_s\n else \n @addr=ar[1].to_i\n end\n\n end\n return nil\n end", "def get_dns_ipaddr(host)\n dns = Dnsruby::DNS.new({\n :nameserver => [ IWMNns ],\n :search => [ 'canishe.com' ],\n :ndots => 1\n })\n\n answer = dns.getaddress(host)\n\n return answer.to_s\nend", "def get_ipaddr(dns_query, parsed_dns, length)\n address = \"\"\n case length\n when IPV4_ADDR_LENGTH\n address = dns_query[parsed_dns[:index], length].unpack(\"CCCC\").join('.')\n when IPV6_ADDR_LENGTH\n address = dns_query[parsed_dns[:index], length].unpack(\"nnnnnnnn\").map{|v| sprintf(\"%x\", v)}.join(':')\n end\n parsed_dns[:index] += length\n return address\n end", "def to_ipaddr\n unless ip_addr?\n lookup = `host #{to_s} | grep address`.split(/\\s+/)\n return to_s unless lookup.length == 4\n lookup[3]\n else \n to_s\n end\n end", "def get_ip_address\n rpc_get_fact_direct('host_ip')\n end", "def ip\n ssh.exec!(\"/sbin/ifconfig | grep 'inet addr:'| grep -v '127.0.0.1' | cut -d: -f2 | awk '{ print $1}'\").chomp\n end", "def ipaddr?; end", "def domain\n Domain.new((address.split('@')[1] || '').strip)\n end", "def read_host_ip\n ip = read_machine_ip\n base_ip = ip.split(\".\")\n base_ip[3] = \"1\"\n base_ip.join(\".\")\n end", "def getControlIP(x, y, domain = \"grid\") \n qs = <<CONTROL_QS\nSELECT nodes.control_ip\n FROM nodes \n LEFT JOIN locations ON nodes.location_id = locations.id \n LEFT JOIN testbeds ON locations.testbed_id = testbeds.id\nWHERE testbeds.node_domain='#{domain}' \n AND locations.x=#{x} \n AND locations.y=#{y};\nCONTROL_QS\n\n addr = nil\n runQuery(qs) { |ip|\n addr = ip\n }\n return addr\n end", "def get_ip_address\n items = `ifconfig | grep \"inet addr\"`.split\n addresses = []\n items.each do |item|\n addresses << item if item =~ /addr:/\n end\n ip = \"\"\n addresses.each do |address|\n ip = address.split(':')[1]\n if ip != '127.0.0.1'\n break\n end\n end\n ip\nend", "def getControlIP(hrn, domain = \"grid\")\n qs = <<CONTROL_QS\nSELECT nodes.control_ip\n FROM nodes\n LEFT JOIN locations ON nodes.location_id = locations.id\n LEFT JOIN testbeds ON locations.testbed_id = testbeds.id\nWHERE testbeds.name='#{domain}'\n AND nodes.hrn='#{hrn}';\nCONTROL_QS\n\n addr = nil\n runQuery(qs) { |ip|\n addr = ip\n }\n return addr\n end", "def set_domain_name(opts = {})\n cmds = command_builder('ip domain-name', opts)\n configure(cmds)\n end", "def ipaddr; end", "def ip_address(env)\n ip_address_record(env)[:address]\n end", "def domain\n try_opt(:domain)\n end", "def parse_ip\n @request[FHOST] || BLANK_STR\n end", "def echo_ip(ip) echo(ip ? ip.to_s : nil, 16, '-- dynamic --') end", "def ip_cmd_from_file\n\n ip_cmd = ''\n ip_cmd += 'to ' + self.to + ' ' if ! self.to.nil? && ! self.to.empty?\n ip_cmd += 'via ' + self.via + ' ' if ! self.via.nil? && ! self.via.empty?\n ip_cmd += 'dev ' + self.device + ' ' if ! self.device.nil? && ! self.device.empty?\n ip_cmd += 'table ' + self.table + ' ' if ! self.table.nil? && ! self.table.empty?\n return ip_cmd\n\n end", "def ip_address_or_f_q_d_n\n return @ip_address_or_f_q_d_n\n end", "def fqdn_correct?(host_name, domain_name, ip_addr)\n cmd_if %{egrep -q '^#{ip_addr}[[:space:]]+#{host_name}.#{domain_name}' /etc/hosts >/dev/null}\nend", "def ip_address; end", "def ip_address; end", "def ip_address; end", "def ip_address; end", "def ip_address; end", "def ip_address; end", "def get_ip_address\n IO.popen(\"ifconfig\") do |io|\n while line = io.gets\n return $1 if (line =~ /inet addr:([\\d\\.]+)/ and $1 != '127.0.0.1')\n end\n end\n return nil\nend", "def FQDN (domain, vlan)\n val = \"\" # the value to be returned\n cmd = `host -l #{domain} ns1.nwt01.corp.tripadvisor.com`\n cmd.each do |line|\n if line.match(/\\d*\\.\\d*\\.#{vlan}\\.\\d*/) # regx to find the specific command\n val << line.split(\" \")[0] << \",\"\n end\n end\n val = val[0...-1]\n puts val # prints the final string to the terminal, this need to be changed to a return statment if another needs the result\nend", "def domain(domain)\n get(\"/dns/domain/#{domain}\")\n end", "def ip\n ''\n end", "def addresses_ip_command(service_arg=service, options={})\n mac=self.mac\n\n # Aug 24 17:34:41 juno arpwatch: new station 10.247.4.137 52:54:00:88:5a:0a eth0.4\n # Aug 24 17:37:19 juno arpwatch: changed ethernet address 10.247.4.137 52:54:00:27:33:00 (52:54:00:88:5a:0a) eth0.4\n # Check if another ip_command string was provided\n ip_command_global=service_arg.ip_command.nil? ? 'grep $mac /var/log/arpwatch.log|sed -e \"s/new station//\"|sed -e \"s/changed ethernet address//g\" |sed -e \"s/reused old ethernet //\" |tail -1 |cut -d \":\" -f 4-| cut -d \" \" -f 3' : service_arg.ip_command\n ip_command_local=options[:ip_command].nil? ? ip_command_global : options[:ip_command]\n\n ip_command=\"mac=#{mac}; server_name=#{name.gsub(DOMAIN_CLEANUP_REGEXP, '_')}; \"+ip_command_local\n\n ip_address=nil\n\n if service_arg.uri.ssh_enabled?\n ip_address=ssh_ip_command(ip_command, service_arg.uri)\n else\n # It's not ssh enabled, so we assume it is\n if service_arg.uri.transport==\"tls\"\n raise Fog::Errors::Error.new(\"TlS remote transport is not currently supported, only ssh\")\n end\n ip_address=local_ip_command(ip_command)\n end\n\n # The Ip-address command has been run either local or remote now\n\n if ip_address==\"\"\n #The grep didn't find an ip address result\"\n ip_address=nil\n else\n # To be sure that the command didn't return another random string\n # We check if the result is an actual ip-address\n # otherwise we return nil\n unless ip_address=~/^(\\d{1,3}\\.){3}\\d{1,3}$/\n raise Fog::Errors::Error.new(\n \"The result of #{ip_command} does not have valid ip-address format\\n\"+\n \"Result was: #{ip_address}\\n\"\n )\n end\n end\n\n return { :public => [ip_address], :private => [ip_address]}\n end", "def getCmcIP(hrn, domain = \"grid\")\n qs = <<CMC_QS\nSELECT nodes.cmc_ip\n FROM nodes\n LEFT JOIN locations ON nodes.location_id = locations.id\n LEFT JOIN testbeds ON locations.testbed_id = testbeds.id\nWHERE testbeds.name='#{domain}'\n AND nodes.hrn='#{hrn}';\nCMC_QS\n\n addr = nil\n runQuery(qs) { |ip|\n addr = ip\n }\n return addr\n end", "def ip\n if ifconfig =~ /inet addr:([0-9.]+)/\n $1\n else\n \"0.0.0.0\"\n end\n end", "def getNameserverIPs(domain, addrtype = Resolv::DNS::Resource::IN::A)\n myresolv = Resolv::DNS.new()\n\n nameserver_addresses=Array.new\n myresolv.each_resource(domain, Resolv::DNS::Resource::IN::NS) do |nsrsc|\n nameserver_addresses.push(myresolv.getresource(nsrsc.name, addrtype).address)\n end\n\n myresolv.close()\n\n return nameserver_addresses\nend", "def name\n if ipv4?\n \"[#{ip_address}]\"\n elsif ipv6?\n \"[IPv6:#{ip_address}]\"\n elsif @config[:host_encoding] && @config[:host_encoding] == :unicode\n ::SimpleIDN.to_unicode(host_name)\n else\n dns_name\n end\n end", "def fetch_primary_ip_address\n capture(<<-GETADDR, :shell => \"bash\").chomp\n _if=\"$(netstat -nr | grep ^0\\.0\\.0\\.0 | awk '{print $8}')\";\n _ip=\"$(/sbin/ifconfig $_if | \\\n grep '^[[:space:]]*inet ' | awk '{print $2}' | \\\n awk -F':' '{print $2}')\";\n\n if [ -z \"$_ip\" -o \"$_ip\" == \"\" ] ; then\n echo \"\";\n return 10;\n else\n echo $_ip;\n fi\n GETADDR\nend", "def parse_domain_name\n mdata = /ip domain-name ([\\w.]+)/.match(config)\n { domain_name: mdata.nil? ? '' : mdata[1] }\n end", "def ip\n if (ip = @host.at('tag[name=host-ip]'))\n ip.inner_text\n end\n end", "def normalized_host; end", "def reverse_dns_lookup (ip)\n\t\tputs \"Retrieve the hostname by the reverse DNS lookup on IP: #{ip}\"\n\t\thostname = ip\n\t\tbegin\n\t\t\thostname = Socket.gethostbyaddr(ip.split('.').collect{ |x| x.to_i}.pack(\"CCCC\"))[0]\n\t\t\treturn hostname.downcase\n\t\trescue => ee\n\t\t\tputs \"Exception on method reverse_dns_lookup: #{ee}\" if @verbose\n\t\t\treturn hostname\n\t\tend\n\tend", "def get_fqdn(ip)\n begin\n resp = Socket.getaddrinfo(ip, nil)\n rescue\n return nil\n end\n fqdn = resp[0][2]\n nip = resp[0][3]\n return nil if (fqdn == nip)\n return fqdn\nend", "def get_fqdn(ip)\n begin\n resp = Socket.getaddrinfo(ip, nil)\n rescue\n return nil\n end\n fqdn = resp[0][2]\n nip = resp[0][3]\n return nil if (fqdn == nip)\n return fqdn\nend", "def my_ip\n get(\"/tools/myip\")\n end", "def host_ip\n Socket.gethostbyname(@backend.host)[3].unpack('CCCC') rescue [0, 0, 0, 0]\n end", "def hostname\n Resolv.getname(ip_address) rescue nil\n end", "def check_for_wildcard(domain)\n address = Resolv.getaddress(\"asdjlamsdklmasdnoemfjvcn.#{domain}\")\n puts \"The address '#{address}' \"\n\n address\n rescue Resolv::ResolvError => re\n # This error means that the domain isn't using wildcard subdomains\n # Silently ignore this error and allow nil to be returned\n rescue StandardError => se\n warn \"An unexpected error occurred: #{se.message}\"\n end", "def get_host\n begin\n host=%x(/usr/sbin/dsconfigad -show | /usr/bin/awk '/Computer Account/ {print $4}').chomp\n return host\nrescue\n puts \"this machine must not be bound to AD.\\n try again.\"\nend\nend", "def ipaddress\n config[\"ipaddress\"]\n end", "def get_domain_name(host)\n domain = nil\n search = nil\n resolv_conf = if host['platform'].include?('windows')\n if host.is_cygwin?\n host.exec(Command.new(\"cat /cygdrive/c/Windows/System32/drivers/etc/hosts\")).stdout\n else\n host.exec(Command.new('type C:\\Windows\\System32\\drivers\\etc\\hosts')).stdout\n end\n else\n host.exec(Command.new(\"cat /etc/resolv.conf\")).stdout\n end\n resolv_conf.each_line do |line|\n if (match = /^\\s*domain\\s+(\\S+)/.match(line))\n domain = match[1]\n elsif (match = /^\\s*search\\s+(\\S+)/.match(line))\n search = match[1]\n end\n end\n return_value ||= domain\n return_value ||= search\n\n return unless return_value\n\n return_value.gsub(/\\.$/, '')\n end", "def ipsource\n lanconfig[\"ip address source\"].downcase!\n end", "def domain\n @domain ||= PublicSuffix.parse(@fqdn).domain\n end", "def ip_by_interface(int)\n return `ifconfig #{int} | awk '/inet addr/ {split ($2,A,\":\"); print A[2]}'`.chomp\n end", "def ip; end", "def ip; end", "def ip\n # Get its IP that could have changed upon restart\n # cf https://github.com/moby/moby/issues/2801\n # Make sure we refresh its info before querying it, as we could hit a cache of a previous IP.\n _exit_status, stdout, _stderr = @cmd_runner.run_cmd \"#{podman_cmd} container inspect #{@container} | grep IPAddress\"\n stdout.strip.match(/\\d+\\.\\d+\\.\\d+\\.\\d+/)[0]\n end", "def ip\n TestLab::Utility.ip(self.address)\n end", "def domain\n domain = request.host\n domain << \":#{request.port}\" unless [80, 443].include?(request.port)\n domain\n end", "def ip(args = nil)\n if args and args[:meta]\n meta = args[:meta]\n elsif httpsession = Thread.current[:hayabusa][:httpsession]\n meta = httpsession.meta\n else\n raise \"Could not figure out meta-data.\"\n end\n \n if !meta[\"HTTP_X_FORWARDED_FOR\"].to_s.strip.empty? and ips = meta[\"HTTP_X_FORWARDED_FOR\"].split(/\\s*,\\s*/)\n return ips.first.to_s.strip\n elsif ip = meta[\"REMOTE_ADDR\"].to_s.strip and !ip.empty?\n return ip\n else\n raise \"Could not figure out IP from meta-data.\"\n end\n end", "def usage()\n puts \"ruby DNS.rb domain-host lookup-host1..lookup-hostN\"\n puts \"e.g. ruby DNS.rb 192.168.0.1 news.bbc.co.uk\"\n puts \"e.g. ruby DNS.rb 192.168.0.1 www.facebook.com www.twitter.com www.instagram.com\"\n exit(0)\nend", "def host_from_fqdn(question)\n domains.each do |domain|\n dd = dotted_domain(domain)\n if question.index(dd)\n host = question[0, question.index(dd)]\n end\n end\n nil\n end", "def what_should_my_dns_be?\n get_mongo_dns_endpoint(get_instance_id)\n end", "def ip_v4_address; end", "def get_host_info(s)\n\n # Prepare response array of aliases (IP and addresses)\n aliases = []\n\n # Get information from the given IP or name\n begin\n resp = Socket.getaddrinfo(s, nil)\n rescue\n aliases << s\n else\n\n fqdn = resp.first[2]\n ip = resp.first[3]\n aliases << fqdn\n\n if fqdn != ip\n host_dom = fqdn.split('.', 2)\n if $local_domain && host_dom.length == 2 && host_dom.last == $local_domain\n aliases << host_dom.first\n end\n aliases << ip\n end\n\n end\n\n return aliases\n\nend", "def address\n return @mac_address if defined? @mac_address and @mac_address\n re = %r/[^:\\-](?:[0-9A-F][0-9A-F][:\\-]){5}[0-9A-F][0-9A-F][^:\\-]/io\n cmds = '/sbin/ifconfig', '/bin/ifconfig', 'ifconfig', 'ipconfig /all', 'cat /sys/class/net/*/address'\n\n null = test(?e, '/dev/null') ? '/dev/null' : 'NUL'\n\n output = nil\n cmds.each do |cmd|\n begin\n r, w = IO.pipe\n ::Process.waitpid(spawn(cmd, :out => w))\n w.close\n stdout = r.read\n next unless stdout and stdout.size > 0\n output = stdout and break\n rescue\n # go to next command!\n end\n end\n raise \"all of #{ cmds.join ' ' } failed\" unless output\n\n @mac_address = parse(output)\n end", "def fqdn\n ssh.exec!(\"hostname --fqdn\").chomp\n end", "def hostname_to_ip hostname\n begin \n ip = Resolv.getaddress(config[:host])\n rescue Resolv::ResolvError => resolv_err\n raise Exception.new(\"Resolver error: #{resolv_err}\")\n end\n return ip\n end", "def show_like_ip\n return if table != 254\n\n str = \"\"\n if dst_len == 0\n str = \"default via #{extract_gateway_address_from_attrs} dev #{extract_oif_from_attrs}\"\n elsif dst_len == 32\n str = \"#{extract_dst_from_attrs} dev #{extract_oif_from_attrs}\"\n else\n end\n\n puts str unless str == \"\"\n end", "def only_ip()\n\n ip = ARGV[0]\n\n ipv4 = /^([1-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(\\.([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])){3}$/\n ipv6 = /^\\s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)(\\.(25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)){3}))|:)))(%.+)?\\s*$/ \n\n if ip =~ ipv4 || ip =~ ipv6\n $onlyip = ip\n ARGV.shift\n else\n usage()\n end\n\nend", "def domain_name_label\n label = nil\n entries.each do |entry|\n entry.ip_configurations.each do |ip_config|\n if ip_config['public_ipaddress']['attached']\n label = ip_config['public_ipaddress']['domain_name_label']\n end\n end\n end\n\n label\n end", "def address\n if @domain.nil?\n\t@local\n else\n\t@local + '@' + @domain\n end\n end", "def read_guest_ip_prlctl\n vm_info = json { execute_prlctl('list', @uuid, '--full', '--json') }\n ip = vm_info.first.fetch('ip_configured', '')\n ip == '-' ? '' : ip\n end", "def extract_ip(addrinfo)\n addrinfo[2]\n end", "def read_host_ip(machine,env)\n nets = env[:libvirt_compute].list_networks\n if nets.size == 1\n net = nets.first\n else\n domain = env[:libvirt_compute].servers.get(machine.id.to_s)\n xml=Nokogiri::XML(domain.to_xml)\n networkname = xml.xpath('/domain/devices/interface/source').first.attributes['network'].value.to_s\n puts \"network name = #{networkname}\"\n net = env[:libvirt_compute].list_networks.find {|netw| netw[:name] == networkname}\n end\n # FIXME better implement by libvirt xml parsing\n `ip addr show | grep -A 2 #{net[:bridge_name]} | grep -i 'inet ' | tr -s ' ' | cut -d' ' -f3 | cut -d'/' -f 1`.chomp\n end", "def host_from_wildcard(question)\n domains.each do |domain|\n dd = dotted_domain(domain)\n if question.index(dd)\n host = question[0, question.index(dd)]\n return Array(host.split('.')).last\n end\n end\n nil\n end", "def request_address\n address.split('@').join(\"-request@\")\n end", "def ip\n @ip ||= @node.search('IP/listEntry').map(&:inner_text)\n end", "def netId _args\n \"netId _args;\" \n end", "def test_server_ip\n ip = `hostname -i`\n ip.gsub(\"\\n\", \"\")\n end", "def get_host\n host=%x(/usr/sbin/dsconfigad -show | /usr/bin/awk '/Computer Account/ {print $4}').chomp\n return host\n raise Error, \"this machine must not be bound to AD.\\n try again.\" if host == nil\nend", "def get_host\n host=%x(/usr/sbin/dsconfigad -show | /usr/bin/awk '/Computer Account/ {print $4}').chomp\n return host\n raise Error, \"this machine must not be bound to AD.\\n try again.\" if host == nil\nend", "def hostip\n static_network_config[\"ipAddress\"]\n end", "def last(options=nil)\n known_args = [:Objectify, :Short]\n objectify = false\n short = false\n\n if (options)\n if (!options.kind_of?(Hash))\n raise ArgumentError, \"Expected Hash, but \" +\n \"#{options.class} provided.\"\n end\n NetAddr.validate_args(options.keys,known_args)\n\n if( options.has_key?(:Short) && options[:Short] == true )\n short = true\n end\n\n if( options.has_key?(:Objectify) && options[:Objectify] == true )\n objectify = true\n end\n\n end\n\n ip_int = @network | @hostmask\n if (!objectify)\n ip = NetAddr.ip_int_to_str(ip_int, @version)\n ip = NetAddr.shorten(ip) if (short && !objectify && @version == 6)\n else\n ip = NetAddr.cidr_build(@version,ip_int)\n end\n\n return(ip)\n end", "def domain(arg=nil)\n set_or_return(\n :domain,\n arg,\n :kind_of => String,\n :required => true\n )\n end", "def host_2_ip (hostname)\n\t\tputs \"Perform DNS query on host: #{hostname}\" if @verbose\n\t\tbegin\n\t\t\tips=Array.new\n\t\t\tif is_ip?(hostname)\n\t\t\t\tputs \"No change - same IP is returned. \" if @verbose\n\t\t\t\treturn hostname.strip\n\t\t\telse\n\t\t\t\tips=Resolv.getaddresses(hostname)\n\t\t\t\tif (ips.empty?) then\n\t\t\t\t\tputs \"Failed to resolve #{hostname}\" if @verbose\n\t\t\t\t\treturn nil\n\t\t\t\telse\n\t\t\t\t\tputs \"IP found: #{ips.first}\" if @verbose\n\t\t\t\t\treturn ips.first.strip\n\t\t\t\tend\n\t\t\tend\n\t\trescue => ee\n\t\t\tputs \"Exception on method host_2_ip for host #{hostname}: #{ee}\" if @verbose\n\t\t\treturn nil\n\t\tend\n\tend", "def ip(options=nil)\n known_args = [:Objectify, :Short]\n objectify = false\n short = false\n\n if (options)\n if (!options.kind_of?(Hash))\n raise ArgumentError, \"Expected Hash, but \" +\n \"#{options.class} provided.\"\n end\n NetAddr.validate_args(options.keys,known_args)\n\n if( options.has_key?(:Short) && options[:Short] == true )\n short = true\n end\n\n if( options.has_key?(:Objectify) && options[:Objectify] == true )\n objectify = true\n end\n end\n\n\n if (!objectify)\n ip = NetAddr.ip_int_to_str(@ip, @version)\n ip = NetAddr.shorten(ip) if (short && @version == 6)\n else\n ip = NetAddr.cidr_build(@version,@ip)\n end\n\n return(ip)\n end", "def ip\n TestLab::Utility.ip(self.address)\n end", "def hostname(ip_address)\n @resolver.getname(ip_address).to_s\n rescue\n 'IP address not found'\n end", "def fqdn\n exit_code, stdout = ssh.exec(\"hostname --fqdn\")\n (exit_code == 0) ? stdout.chomp : \"\"\n end", "def resolve_fqdn\n hostname = from_cmd(\"hostname\")\n addrinfo = Socket.getaddrinfo(hostname, nil).first\n iaddr = IPAddr.new(addrinfo[3])\n Socket.gethostbyaddr(iaddr.hton)[0]\n rescue\n nil\n end", "def domain\n URI.parse(@config.split('<')[0].split('->')[0])\n end", "def base_domain(response)\n if response.respond_to? :request\n host = response.request.host.sub /:\\d+$/, ''\n return if host =~ /^([\\d.]+|localhost)$/\n\n host =~ /([^.]*)\\.([^.]*|..\\...|...\\...|..\\....)$/\n \".#{$1}.#{$2}\"\n end\n end", "def get_domain url\n uri = URI.parse url\n host = uri.host.downcase\n host.start_with?('www.') ? host[4..-1] : host\n end", "def public_ip_v4_address; end", "def query_ip\n @attributes[:query_ip]\n end", "def name\n ip_address\n end", "def fqdn domain_name\n Service.fqdn domain_name, dns\n end", "def name\n ip_address\n end", "def device_ipaddress; end" ]
[ "0.65247685", "0.62867314", "0.6252401", "0.61461025", "0.600678", "0.5996924", "0.5995298", "0.5967437", "0.5939789", "0.593251", "0.59211797", "0.5920145", "0.5893078", "0.5885891", "0.5872979", "0.5871618", "0.5858549", "0.5847169", "0.58024335", "0.5798698", "0.57979286", "0.5792048", "0.5791373", "0.5791373", "0.5791373", "0.5791373", "0.5791373", "0.5791373", "0.5772837", "0.5717762", "0.5716319", "0.5716064", "0.5705289", "0.5689132", "0.565043", "0.56463623", "0.5639915", "0.5625593", "0.5624483", "0.5622756", "0.5615302", "0.560238", "0.55869573", "0.55869573", "0.55814946", "0.5580533", "0.55641377", "0.5561835", "0.55594194", "0.5556423", "0.5555371", "0.5549952", "0.553946", "0.5535215", "0.5532745", "0.5532745", "0.55326885", "0.5519504", "0.5507785", "0.54986763", "0.54913443", "0.5489832", "0.54880345", "0.5487566", "0.5484539", "0.5479255", "0.54777265", "0.54748654", "0.5473687", "0.54736865", "0.54704136", "0.54588443", "0.54565364", "0.54555106", "0.54413587", "0.54341173", "0.5429702", "0.542304", "0.54226375", "0.54204565", "0.541489", "0.541489", "0.54147214", "0.54123724", "0.54050696", "0.5402157", "0.5400712", "0.53944427", "0.53856385", "0.53838396", "0.5381365", "0.53778654", "0.5372501", "0.53704953", "0.5366674", "0.5364864", "0.53632814", "0.5359694", "0.53594023", "0.5359321" ]
0.60366035
4
Returns the default image name for the configured platform
def default_image "/var/lib/libvirt/images/kitchen-#{instance.platform.name}" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def default_image\n if instance.platform.name =~ /^(.*)-([^-]*)$/\n \"#{$1}:#{$2}\"\n else\n instance.platform.name\n end\n end", "def image_name(server)\r\n name = instance.platform.name\r\n return name unless server\r\n\r\n # 1:\r\n if server.downcase.start_with? \"https://cloud-images.ubuntu.com\"\r\n info \"Using cloud-image '#{name}'\"\r\n return name.downcase.sub(/^ubuntu-/, \"\")\r\n end\r\n # 2:\r\n if server.downcase.start_with? \"https://images.linuxcontainers.org\"\r\n name = name.downcase.split(\"-\")\r\n # 'core' parses out in this method as the 'version' so just use 'ubuntu-core' in the kitchen.yml\r\n if UBUNTU_RELEASES.key?(name[1]) && name[0] == \"ubuntu\"\r\n name[1] = UBUNTU_RELEASES[name[1]]\r\n name[0] = \"ubuntu-core\" if name[1] == \"16\" # Logic patch for the edge case. We'll do something different if this gets complicated\r\n end\r\n name = name.join(\"/\")\r\n info \"Using standard image #{name}\"\r\n end\r\n name\r\n end", "def default_image_id\n region_name = config[:compute_provider][\"#{config[:compute_provider][:name]}_region\".to_sym]\n region_map = images.fetch('regions', {}).fetch(region_name, {})\n image_id = region_map && region_map[instance.platform.name]\n\n if image_id.nil?\n error(\"Could not determine default image_id in #{region_name} region for platform #{instance.platform.name}\")\n end\n\n image_id\n end", "def default_image_path\n \"default_interest_images/#{name.gsub(/\\s/,'').gsub(/\\W/,'_').downcase}\"\n end", "def default_name\n \tself.name ||= File.basename(self.image.filename, '.*').titleize if self.image_url\n end", "def default_name\n debug(\"Instance name: #{instance.name}\")\n \"#{instance.platform.name}-#{Time.now.to_i}\"\n end", "def default_flavor_id\n flavor_id = images['default_flavor_id']\n\n if flavor_id.nil?\n error(\"Could not determine default flavor_id for platform #{instance.platform.name} via #{config[:compute_provider][:name]}\")\n end\n\n flavor_id\n end", "def image_filename(mobile = false)\n mobile ? mobile_image_filename : desktop_image_filename\n end", "def image_name\n @image_name || image_bundle.image_name\n end", "def platform_name; non_framework_platforms.first; end", "def image_name\n boot_disk = disks.first\n unless boot_disk.is_a?(Disk)\n source = boot_disk[:source]\n match = source.match(%r{/zones/(.*)/disks/(.*)$})\n boot_disk = service.disks.get(match[2], match[1])\n end\n boot_disk.source_image.nil? ? nil : boot_disk.source_image\n end", "def platform_name\n self.platform ? self.platform.name : NOT_SET\n end", "def default_url\n \"\" + [version_name, \"default_cat_icon.png\"].compact.join('_')\n end", "def image_name\n path = \"sets/#{set}/#{dimension}_#{polarity}_#{verbal}\"\n # Rails.application.assets.find_asset(path).nil? ? 'placeholder' : path\n path\n end", "def imageName()\n if (@imageName.nil?)\n if (File.exists?(IMAGE_NAME_FILE))\n File.open(IMAGE_NAME_FILE) { |f|\n @imageName = f.read.chomp\n }\n else\n @imageName = 'unknown'\n MObject.warn(\"Can't find '#{IMAGE_NAME_FILE}'\")\n end\n end\n @imageName\n end", "def default_docker_image\n \"#{env}/#{appname}:#{image_tag}\"\n end", "def safe_default_image\n if default_image.try(:filename).present? \n default_image\n elsif self.images.present?\n self.images.first\n else\n Product::generic_default_image\n end\n\n # To populate run task: assets::populate_default_image\n #default_image.presence || Product::generic_default_image\n end", "def default_url\n \"/images/fallback/\" + [version_name, \"default.png\"].compact.join('_')\n end", "def full_image_name\n return generate_name if @options[:generate]\n return \"tongueroo/hi:ufo-12345678\" if ENV['TEST']\n\n unless File.exist?(docker_name_path)\n puts \"Unable to find #{docker_name_path} which contains the last docker image name that was used as a part of `ufo docker build`. Please run `ufo docker build` first.\"\n exit 1\n end\n IO.read(docker_name_path).strip\n end", "def default_path\n \"archangel/\" + [version_name, \"asset.png\"].compact.join(\"_\")\n end", "def default_url\n \"/images/fallback/\" + [version_name, \"base-default.png\"].compact.join(\"_\")\n end", "def default_url\n \"/images/fallback/\" + [version_name, \"default.png\"].compact.join('_')\n end", "def default_image\n end", "def build_host_name\n if @platform.abs_resource_name\n @platform.abs_resource_name\n elsif @platform.vmpooler_template\n @platform.vmpooler_template\n else\n @platform.name\n end\n end", "def build_host_name\n if @platform.abs_resource_name\n @platform.abs_resource_name\n elsif @platform.vmpooler_template\n @platform.vmpooler_template\n else\n @platform.name\n end\n end", "def platform_shortname\n if rhel?\n if \"rocky\" == Ohai[\"platform\"]\n \"rocky\"\n else\n \"el\"\n end\n elsif suse?\n \"sles\"\n else\n Ohai[\"platform\"]\n end\n end", "def default_app_name\n if is_workspace\n return default_build_settings(key: \"PRODUCT_NAME\")\n else\n return app_name\n end\n end", "def platform_shortname\n if platform_family == 'rhel'\n 'el'\n else\n platform\n end\n end", "def default_path\n \"archangel/fallback/\" + [version_name, \"asset.png\"].compact.join(\"_\")\n end", "def image_name\n File.basename(image.path || image.filename) if image\n end", "def image_name\n setting.data[\"image\"]\n end", "def base_name(image)\n # Remove registry namespace (anything before the final slash)\n # as well as tag.\n if image.include?(\"/\")\n base = image.split('/').last.split(':').first\n else\n base = image.split(':').first\n end\n\n # Strip noise-word suffixes from image name\n base.sub!(NOISE_WORDS, '')\n\n base\n end", "def default_url\n \"/images/fallback/\" + [version_name, \"network-thumb-default.png\"].compact.join(\"_\")\n end", "def platform_icon(software, size=17)\n img = case software.platform.to_s\n when /windows/i\n \"icons/windows_#{size}.png\"\n when /mac/i\n \"icons/mac_#{size}.png\"\n when /linux/i\n \"icons/tux_#{size}.png\"\n when /ios/i\n \"icons/iOS_#{size}.png\"\n when /android/i\n \"icons/android_#{size}.png\"\n else\n \"icons/none_#{size}.png\"\n end\n image_tag img, style: \"vertical-align: middle\"\n end", "def default_url\n version = version_name.downcase if version_name\n du = \"/images/fallback/\" + [version, \"empty_deal_image.png\"].compact.join('_')\n # puts \"default_url=>#{du}\"\n du\n end", "def platform_icon(software, size=17)\n img = case software.platform.to_s\n when /windows/i\n \"icons/windows_#{size}.png\"\n when /mac/i\n \"icons/mac_#{size}.png\"\n when /linux/i\n \"icons/tux_#{size}.png\"\n when /ios/i\n \"icons/iOS_#{size}.png\"\n when /android/i\n \"icons/android_#{size}.png\"\n else\n \"icons/download_#{size}.png\"\n end\n image_tag img, style: \"vertical-align: middle\", alt: \"platform icon\"\n\tend", "def platform_name\n capabilities['platformName']\n end", "def image_name(extension = true)\n if @image_name.nil? and self.type == LAYER_NORMAL\n name = \"#{self.uid}.png\"\n else\n name = @image_name\n end\n\n if extension\n name\n else\n name.sub(\".png\", \"\")\n end\n end", "def default_url\n # # For Rails 3.1+ asset pipeline compatibility:\n #asset_path([version_name, \"default.png\"].compact.join('_'))\n #\n [version_name, \"logono.gif\"].compact.join('_')\n end", "def default_url\n \"/images/user/avatar/\" + [version_name, \"default.png\"].compact.join('_')\n end", "def default_url\n # For Rails 3.1+ asset pipeline compatibility:\n # asset_path(\"fallback/\" + [version_name, \"default.png\"].compact.join('_'))\n\n \"/images/fallback/\" + [version_name, \"default.png\"].compact.join('_')\n end", "def original_name # :nodoc:\n if platform == Gem::Platform::RUBY or platform.nil?\n \"#{@name}-#{@version}\"\n else\n \"#{@name}-#{@version}-#{@original_platform}\"\n end\n end", "def name_or_filename\n name.blank? ? image_name : name\n end", "def default_url\n # For Rails 3.1+ asset pipeline compatibility:\n ([version_name, \"default.png\"].compact.join('_'))\n \n #\"/images/fallback/\" + [version_name, \"default.png\"].compact.join('_')\n end", "def image_name\n image.try(:name)\n end", "def default_url\n # For Rails 3.1+ asset pipeline compatibility:\n # ActionController::Base.helpers.asset_path(\"fallback/\" + [version_name, \"default.png\"].compact.join('_'))\n \"fallback/\"+[version_name, \"default_#{mounted_as}.jpg\"].compact.join('_')\n end", "def default_url\n ActionController::Base.helpers.asset_path(\"logos/\" + [version_name, \"missing.png\"].compact.join('/'))\n end", "def default_url\n ActionController::Base.helpers.asset_path(\"placeholders/\" + [\"default\", version_name].compact.join('_')) + \".png\"\n return \"/images/placeholders/\" + [\"default\", version_name].compact.join('_') + \".png\"\n end", "def image_name_with_specified_version(version)\n @registry.nil? ? \"#{@name}:#{version}\" : \"#{@registry}/#{@name}:#{version}\"\n end", "def default_name\n @default_name ||= \"__#{name}_default__\"\n end", "def repository_name\n config['image']\n end", "def current_image_filename\n \"#{name.downcase.gsub(' ', '')}certification.png\"\n end", "def random_default_image\n Random.new.rand(1..6).to_s + '.jpg'\n end", "def default_name\n [\n instance.name.gsub(/\\W/, '')[0..14],\n (Etc.getlogin || 'nologin').gsub(/\\W/, '')[0..14],\n Socket.gethostname.gsub(/\\W/, '')[0..22],\n Array.new(7) { rand(36).to_s(36) }.join\n ].join('-')\n end", "def default_url\n # # For Rails 3.1+ asset pipeline compatibility:\n # # ActionController::Base.helpers.asset_path(\"fallback/\" + [version_name, \"default.png\"].compact.join('_'))\n #\n # \"/images/fallback/\" + [version_name, \"default.png\"].compact.join('_')\n \"/placeholder.png\"\n end", "def select_default_image\n Fog::Brightbox::Compute::ImageSelector.new(list_images).latest_ubuntu\n end", "def name\n RUBY_PLATFORM\n end", "def default_url\n ActionController::Base.helpers.asset_path(\"default/\" + [version_name, \"standard.jpg\"].compact.join('_'))\n end", "def default_name\n path.dirname.basename.to_s\n end", "def default_url\n # For Rails 3.1+ asset pipeline compatibility:\n # ActionController::Base.helpers.asset_path(\"fallback/\" + [version_name, \"default.png\"].compact.join('_'))\n\n # \"/images/fallback/\" + [version_name, \"default.png\"].compact.join('_')\n # 'default_avatar.png' #rails will look at 'app/assets/images/default_avatar.png'\n end", "def full_name\n if platform == Gem::Platform::RUBY or platform.nil? then\n \"#{name}-#{version}\".untaint\n else\n \"#{name}-#{version}-#{platform}\".untaint\n end\n end", "def imageNamed(name)\n sugarcube568_imageNamed(name) || imageNamed_old(name)\n end", "def image\n [:MANAGED_MAP, :UNMANAGED_MAP].each do |map|\n unless self.class.const_defined?(map)\n raise MapUndefined, \"Const #{map} is undefined; maps appear unbuilt\"\n end\n end\n\n map = @managed ? self.class::MANAGED_MAP : self.class::UNMANAGED_MAP\n distro = @distro.downcase.to_sym\n\n unless map[distro].nil?\n return map[distro][@version] unless map[distro][@version].nil?\n return map[distro][\"*\"] unless map[distro][\"*\"].nil?\n end\n\n nil\n end", "def platform_name()\n if @handle.ptr == nil\n raise \"this is disposed\"\n end\n result = Native.RunMetadata_platform_name(@handle.ptr)\n result\n end", "def get_exotic_archname(platform_type)\n case platform_type\n when /aarch64/\n \"ARM\"\n when /ppc64le/\n \"Power\"\n else\n nil\n end\nend", "def default_url\n ActionController::Base.helpers.asset_path(\"images/\" + [version_name, \"missing.png\"].compact.join('/'))\n end", "def default_url\n # For Rails 3.1+ asset pipeline compatibility: ActionController::Base.helpers.asset_path(\"fallback/\" + [version_name, \"default.png\"].compact.join('_'))\n \"/assets/fallback/\" + [version_name, \"default.png\"].compact.join('_')\n end", "def get_logo_display\n\t\tif !logged_in?\n\t\t\treturn \"City Produce\"\n\t\tend\n\t\tif current_user.admin?\n\t\t\treturn \"Admin\"\n\t\tend\n\t\treturn \"Home\"\n\tend", "def default_url\n # For Rails 3.1+ asset pipeline compatibility:\n # ActionController::Base.helpers.asset_path(\"fallback/\" + [version_name, \"default.png\"].compact.join('_'))\n\n \"http://res.cloudinary.com/djjs4pnpf/image/upload/v1528878490/jpsez3ep8okeusjdqinz.jpg\"\n end", "def default_image\n attachments.for('default_image')\n end", "def default_provider\n return nil unless node && node['platform_family']\n Chef::Provider::Dropbox.const_get(node['platform_family'].split('_')\n .map(&:capitalize).join)\n end", "def os_name # rubocop:disable Lint/DuplicateMethods\n @os_name ||= @name.match(PLATFORM_REGEX)[1]\n end", "def default_url\n asset_path(\"fallback/\" + [version_name, \"default-photo.png\"].compact.join('_'))\n end", "def default_url\n asset_path(\"fallback/\" + [version_name, \"default-photo.png\"].compact.join('_'))\n end", "def default_url\n asset_path(\"fallback/\" + [version_name, \"default-photo.png\"].compact.join('_'))\n end", "def default_url\n # For Rails 3.1+ asset pipeline compatibility:\n \"/assets/\" + [version_name, \"default.gif\"].compact.join('_')\n end", "def localized_image_filename (source)\n localized_filename = get_language_filename( source, true )\n language_filename = get_language_filename( source, false )\n if language_image_exists?(localized_filename)\n return localized_filename\n elsif language_image_exists?(language_filename)\n return language_filename\n else\n return source\n end\n end", "def set_default_config\n config[:image_os_type] = \"windows\" if config[:connection_protocol] == \"winrm\"\n end", "def default_url\n # # For Rails 3.1+ asset pipeline compatibility:\n ActionController::Base.helpers.asset_path('fallback/' + [version_name, 'default.png'].compact.join('_'))\n #\n # \"/images/fallback/\" + [version_name, \"default.png\"].compact.join('_')\n end", "def default_url\n # For Rails 3.1+ asset pipeline compatibility:\n # asset_path(\"fallback/\" + [version_name, \"default.png\"].compact.join('_'))\n asset_path \"#{DEFAULT_AVATAR_NAME}.png\"\n end", "def imageNamed(name)\n imageNamed568(name) || imageNamed_old(name)\n end", "def default_container_name\n full_release_name.gsub('_', '-')\n end", "def default_url\n # For Rails 3.1+ asset pipeline compatibility:\n # ActionController::Base.helpers.asset_path(\"fallback/\" + [version_name, \"default.png\"].compact.join('_'))\n asset_path('gray_blank.gif')\n # Settings.assets.gray_image\n end", "def image_url_to_default_filename (image_url = @todays_image_url)\n\n matches = image_url.scan(/\\S+\\/(\\S+.jpg)/) # returns an array\n\n if (matches.length != 1)\n return nil\n end\n \n return matches[0].to_s\n \n end", "def instance_name(suite, platform)\n Instance.name_for(suite, platform)\n end", "def default_url\n \"photo/#{version_name}.jpg\"\n end", "def image_name\n @presenter.is_homepage? ? 'thumb' : 'thumb_inner'\n end", "def profile_image(type=nil)\n image = images.find(:first)\n case type\n when :big\n image.nil? ? \"default_big.png\" : image.public_filename(type)\n when :medium\n image.nil? ? \"default_medium.png\" : image.public_filename(type)\n when :thumb\n image.nil? ? \"default_thumb.png\" : image.public_filename(type)\n when :medium_square\n image.nil? ? \"default_medium_square.png\" : image.public_filename(type)\n when :small_square\n image.nil? ? \"default_small_square.png\" : image.public_filename(type)\n when :tiny_square\n image.nil? ? \"default_tiny_square.png\" : image.public_filename(type)\n else\n image.nil? ? \"default_medium.png\" : image.public_filename(type)\n end\n end", "def fedora_name\n 'image_file'\n end", "def first_available_image\n image = %w[o s m l].each do |size|\n field = \"size_#{size}\"\n value = send(field)\n return \"#{id}-#{size}.#{img_type}\" if value.present?\n end\nend", "def default_image_for_item_type\n case item_type.to_s\n when 'parfume'\n '/assets/_sample/category-grey-1.jpg'\n when 'song'\n '/assets/_sample/category-grey-2.jpg'\n else\n '/assets/_sample/category-grey-3.jpg'\n end\n end", "def set_image_title\n controller_name == 'temp_listings' ? '' : 'Pin it @ Pinterest'\n end", "def default_url(*args)\n \"/site_assets/_shared/avatars/\" + [\"empty_avatar\", version_name].compact.join('_') + '.png'\n end", "def icon\n icons = {\n \"chrome\" => \"img-firefox.png\",\n \"safari\" => \"img-safari.png\",\n \"googlechrome\" => \"img-chrome.png\",\n \"iexplore8\" => \"img-ie8.png\",\n \"iexplore7\" => \"img-ie7.png\", \n \"iexplore6\" => \"img-ie6.png\",\n \"iehta\" => \"img-ie8.png\",\n \"firefox\" => \"img-firefox.png\"\n }\n\n if self.browser_name == 'iexplore' or self.browser_name == 'iehta'\n ret = icons[\"iexplore\" + self.browser_version.delete(\".\")]\n else\n ret = icons[self.browser_name]\n end\n\n return \"unknown_#{self.browser_name}\" if ret.nil?\n ret\n end", "def logo\n get_attribute(Yoti::Attribute::APPLICATION_LOGO)\n end", "def profile_pic_default\n if profile_pic.attached? \n profile_pic.variant(resize: \"150x150!\").processed\n else \n \"/default_profile.jpg\"\n end\n end", "def default_url\n # For Rails 3.1+ asset pipeline compatibility:\n # asset_path(\"fallback/\" + [version_name, \"default.png\"].compact.join('_'))\n puts YAML::dump(model)\n puts YAML::dump(model.respond_to?('imageable_type'))\n #raise \"very confusing ... model.class = \" + model.class.to_s + \" version = \" + version_name\n if model.respond_to?('imageable_type')\n \"/images/fallback/#{model.imageable_type.to_s}_\" + [version_name, \"default.png\"].compact.join('_')\n else\n \"/images/fallback/#{model.class.to_s}_\" + [version_name, \"default.png\"].compact.join('_')\n end\n end", "def default_external_name\n @default_external_name || DEFAULT_EXTERNAL_NAME\n end", "def casein_config_logo\n \t'/images/logo_ss.jpg'\n end", "def default_platform_service_name(version: installed_postgresql_major_version, source: installed_postgresql_package_source)\n if platform_family?('rhel', 'fedora', 'amazon') && source.eql?(:repo)\n \"postgresql-#{version}\"\n else\n 'postgresql'\n end\n end" ]
[ "0.90075755", "0.7415205", "0.73246264", "0.7021422", "0.6962539", "0.6931464", "0.6927607", "0.6847147", "0.6842619", "0.6780967", "0.67274594", "0.67179024", "0.6715335", "0.6696803", "0.66665316", "0.6662556", "0.6640817", "0.663227", "0.6610321", "0.65983534", "0.6580736", "0.65739256", "0.65581095", "0.6554852", "0.6554852", "0.6549649", "0.653878", "0.653755", "0.6528249", "0.6484977", "0.6482847", "0.6456853", "0.6443356", "0.64397943", "0.64315474", "0.64271724", "0.64246356", "0.640862", "0.63840413", "0.6311974", "0.6292728", "0.62833697", "0.6267983", "0.62456757", "0.62377685", "0.622547", "0.6224608", "0.6195205", "0.619254", "0.617222", "0.6171651", "0.61589605", "0.6154851", "0.6140243", "0.6118213", "0.6117124", "0.61143947", "0.60933983", "0.6091722", "0.60881203", "0.606791", "0.60665774", "0.6049523", "0.60481966", "0.60431623", "0.6039596", "0.603173", "0.6014937", "0.60139036", "0.60094017", "0.6006237", "0.6001003", "0.5983601", "0.5983601", "0.5983601", "0.59800255", "0.5969832", "0.59545714", "0.5951822", "0.59423953", "0.59416187", "0.59283906", "0.5925805", "0.5922865", "0.59220314", "0.5920721", "0.59201264", "0.59103924", "0.5907944", "0.5897017", "0.5880987", "0.58807254", "0.58699113", "0.5854704", "0.5837872", "0.5834056", "0.5831397", "0.5820076", "0.5819546", "0.5816079" ]
0.77891964
1
The the default name for the domain
def default_name debug("Instance name: #{instance.name}") "#{instance.platform.name}-#{Time.now.to_i}" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def domain_name\n @domain_name ||= default_name\n end", "def default_domain_name\n return @default_domain_name\n end", "def default_domain_name\n return @default_domain_name\n end", "def default_domain_name=(value)\n @default_domain_name = value\n end", "def default_domain_name=(value)\n @default_domain_name = value\n end", "def default_name\n [\n instance.name.gsub(/\\W/, '')[0..14],\n (Etc.getlogin || 'nologin').gsub(/\\W/, '')[0..14],\n Socket.gethostname.gsub(/\\W/, '')[0..22],\n Array.new(7) { rand(36).to_s(36) }.join\n ].join('-')\n end", "def service_name(default_name)\n ENV[\"DOMAIN\"].to_s.empty? ? default_name : ENV[\"DOMAIN\"].split('.').first\n end", "def default_name\n return unless name\n Dry::Core::Inflector.underscore(name).tr('/', '_').to_sym\n end", "def default_name\n @default_name ||= \"__#{name}_default__\"\n end", "def default_short_domain\n Fanforce::Base::DomainEnvironments.method(environment).call[:default_short_domain]\n end", "def domain\n unless @domain\n if defined? ActiveSupport::CoreExtensions::String::Inflections\n @domain = name.tableize\n else\n @domain = name.downcase\n end\n end\n @domain\n end", "def default_host\n primary_host = hosts.primary.first\n primary_host.blank? ? \"#{subdomain}.adaptapp.com\" : primary_host.hostname\n end", "def domain_name\n return @domain_name\n end", "def domain_name\n return @domain_name\n end", "def default_bd_name\n 'Bridge-Domain' + @bd_ids\n end", "def set_name\n self.name = domain.name if name.blank?\n self.name = \"#{name}.#{domain.name}\" if not name.end_with?(domain.name)\n end", "def build_domain_name(env)\n config = env[:machine].provider_config\n domain_name =\n if config.default_prefix.nil?\n env[:root_path].basename.to_s.dup.concat('_')\n elsif config.default_prefix.empty?\n # don't have any prefix, not even \"_\"\n String.new\n else\n config.default_prefix.to_s.dup\n end\n domain_name << env[:machine].name.to_s\n domain_name.gsub!(/[^-a-z0-9_\\.]/i, '')\n domain_name << \"_#{Time.now.utc.to_i}_#{SecureRandom.hex(10)}\" if config.random_hostname\n domain_name\n end", "def default_external_name\n @default_external_name || DEFAULT_EXTERNAL_NAME\n end", "def default_name\n [long_name, short_name].reject(&:empty?).first\n end", "def domain_name=(value)\n @domain_name = value\n end", "def domain_name=(value)\n @domain_name = value\n end", "def domain_name\n Faker::Internet.domain_name\n end", "def domain_name\n Faker::Internet.domain_name\n end", "def new_domain\n domain || tag('Domain') || local_domain_name\n end", "def determine_default_account_subdomain\n Cadenero.default_account_subdomain = options[\"default-account-subdomain\"].presence ||\n ask(\"What will be the subdomain for the default account? [www]\").presence ||\n 'www'\n end", "def set_server_name; self.name = domain; end", "def __name__\n { 'Hostname' => @name, 'Domainname' => domain }\n end", "def set_default_domain\n if GlobalPreference.get(:domain).blank?\n GlobalPreference.set!(:domain, request.host_with_port)\n end\n end", "def default_graphql_name\n to_s.split(\"::\").last.sub(/\\Z/, \"\")\n end", "def domain\n server_name || http_host\n end", "def new_email_default_domain\n system_domains = domains\n {\n default: system_domains[:default],\n domains: system_domains[:names].map{ |domain| domain[:domain_name] }\n }\n end", "def default_account_subdomain\n\t\t'www'\n\tend", "def get_default_base_dn\n\t\treturn self.root_dse[:namingContexts].first.dn\n\tend", "def domain_name=(d)\n @domain_name = d\n end", "def dns_name\n [\"public\", fqdn].join(\".\")\n end", "def default_name\n path.dirname.basename.to_s\n end", "def short\n return '' if name == domain.name\n return '' if name.blank?\n\n File.basename(name, \".#{domain.name}\")\n end", "def default_name\n self.name ||= File.basename(data_file.filename, '.*').titleize if data_file.filename && !self.name\n end", "def fqdn(gear_name = nil)\n \"#{gear_name || canonical_name}-#{domain_namespace}.#{Rails.configuration.openshift[:domain_suffix]}\"\n end", "def clean_name\n global? ? registry.hostname : name\n end", "def clean_name\n global? ? registry.hostname : name\n end", "def cname\n self[:cname] || domain_names&.first&.canonicalize_cname\n end", "def cname\n self[:cname] || domain_names&.first&.canonicalize_cname\n end", "def name_unless_default\n name == Rack::OAuth.default_instance_name ? nil : name\n end", "def generate_default_display_name\n if self.display_name.blank?\n self.display_name = (email.blank? ? \"[no name]\" : self.email.split(\"@\")[0])\n end\n end", "def name\n @_name ||= (@config[:supervisor_name_override] || \"#{@config[:root_name]}-#{`hostname`.chomp}\").gsub(/[^a-zA-Z0-9\\-\\_]/, ' ').gsub(/\\s+/, '-').downcase\n end", "def name\n @name ||= (@config[:name] || self.class.name.split(/::/).last.titleize)\n end", "def default_name\n \tself.name ||= File.basename(self.image.filename, '.*').titleize if self.image_url\n end", "def default_base_path_name\n self.name.split('::').last.downcase\n end", "def fqdn domain_name\n Service.fqdn domain_name, dns\n end", "def determine_default_account_name\n Cadenero.default_account_name = options[\"default-account-name\"].presence ||\n ask(\"What will be the name for the default account? [Root Account]\").presence ||\n 'Root Account'\n end", "def set_domain_name(domain)\n @domain = domain.to_s\n end", "def default_orgname\n ENV['ECM_DEFAULT_ORGNAME']\n end", "def with_default(name)\n name.include?('::') ? name : \"#{name}::default\"\n end", "def with_default(name)\n name.include?('::') ? name : \"#{name}::default\"\n end", "def name\n\t\tif name_source.present?\n\t\t\tproviders = [\"twitter\",\"facebook\",\"google_oauth2\",\"lastfm\",\"vimeo\"]\n\t\t\tp,v = name_source.split(\"::\",2)\n\t\t\treturn name_source unless p.in? providers\n\t\t\tl = self.links.find_by(provider: p)\n\t\t\tif l\n\t\t\t\tnames = l.names\n\t\t\t\treturn names[v.to_sym] if names.is_a? Hash and v and names[v.to_sym]\n\t\t\tend\n\t\tend\n\t\t\n\t\treturn custom_name if custom_name.present?\n\t\treturn email.split('@')[0].titleize if email.present?\n\t\tUser.default_name\n\tend", "def get_default_naming_context(domain=nil)\n bind_default_ldap_server(1, domain) do |session_handle|\n print_status(\"Querying default naming context\")\n\n query_result = query_ldap(session_handle, \"\", 0, \"(objectClass=computer)\", [\"defaultNamingContext\"])\n first_entry_fields = query_result[:results].first\n # Value from First Attribute of First Entry\n default_naming_context = first_entry_fields.first[:value]\n vprint_status(\"Default naming context #{default_naming_context}\")\n return default_naming_context\n end\n end", "def base_name\n @base_name ||= if base = name.to_s.split(\"::\").first\n base.underscore\n end\n end", "def fqdn\n \"#{to_label}.example.com\"\n end", "def format_name\n @site_name ? @site_name : \"\"\n end", "def site_name\r\n site.name rescue nil\r\n end", "def canonical_hostname(domain)\n # Allow hostname overrides\n return $override_dashboard if $override_dashboard && domain == 'studio.code.org'\n return $override_pegasus if $override_pegasus && domain == 'code.org'\n\n return \"#{name}.#{domain}\" if ['console', 'hoc-levels'].include?($node_name)\n return domain if $node_env == 'production'\n\n # our HTTPS wildcard certificate only supports *.code.org\n # 'env', 'studio.code.org' over https must resolve to 'env-studio.code.org' for non-prod environments\n sep = (domain.include?('.code.org')) ? '-' : '.'\n return \"localhost#{sep}#{domain}\" if $node_env == 'development'\n return \"translate#{sep}#{domain}\" if $node_name == 'crowdin'\n \"#{$node_env}#{sep}#{domain}\"\nend", "def guess_company_domain\n if self.company_domain.blank?\n string = self.company_name.to_s.downcase.gsub(' ', '') + \".fr\"\n self.company_domain = string\n end\n end", "def base_distinguished_name\n base_name = \"\"\n AD_DOMAIN.split('.').each do |item|\n base_name+=\"dc=#{item},\"\n end\n base_name.chop\n end", "def name\n @name ||= config(\"name\", \"WorkshopDash\")\n end", "def dns_host_name\n @dns_host_name ||= ::SimpleIDN.to_ascii(@host_name)\n end", "def name_site\n \"#{Site.name}\"\n end", "def set_domain_name(opts = {})\n cmds = command_builder('ip domain-name', opts)\n configure(cmds)\n end", "def get_server_domain\n @hostname ||= Socket.gethostname\n end", "def domain\n try_opt(:domain)\n end", "def my_name\n @my_name ||= self.class.name.split(\"::\").last\n end", "def fqdn\n [name, tag, domain].compact.join('.')\n end", "def config_server_name\n return if config[:server_name]\n\n if config[:server_name_prefix]\n config[:server_name] = server_name_prefix(\n config[:server_name_prefix]\n )\n else\n config[:server_name] = default_name\n end\n end", "def get_fqdn\n return @resource[:name]\n end", "def get_default_tpl\n return self.script_name.sub(/^#{BASE_URL}\\//, '').sub(/\\..+?$/, '')\n end", "def subdomain\n self.name.gsub('_', '.')\n end", "def default_slug\n nil\n end", "def name\n @name ||= options[:name] || service_url\n end", "def name\n @name ||= options[:name] || service_url\n end", "def name\n @name ||= options[:name] || service_url\n end", "def name\n FFI::Libvirt.virDomainGetName(self)\n end", "def name\n FFI::Libvirt.virDomainGetName(self)\n end", "def name\n if ipv4?\n \"[#{ip_address}]\"\n elsif ipv6?\n \"[IPv6:#{ip_address}]\"\n elsif @config[:host_encoding] && @config[:host_encoding] == :unicode\n ::SimpleIDN.to_unicode(host_name)\n else\n dns_name\n end\n end", "def domain_suffix\n offset = environment_first_char.ord - 97\n ENV[\"SUFFIX_CHARACTERS\"][offset..offset + 4]\n end", "def dns_name instance\n instance.dns_name\n end", "def base_name\n name\n end", "def domain_name(subdomain: T.unsafe(nil), domain: T.unsafe(nil)); end", "def name\n @config.db_name.gsub(/@thismachinehostname@/, Socket.gethostname).\n gsub(/@prefix@/, prefix)\n end", "def default_base_title\n Rails.application.class.to_s.split('::').first\n end", "def site_name\n SITE_NAME\n end", "def current_company_name\n begin\n Client.find(current_subdomain).webname\n rescue\n \"\"\n end\n end", "def site_name\n\t\treturn get_configuration['sa_application_name']\n\tend", "def name\n @name ||= self.class.non_namespaced_name\n end", "def display_name\n override_name ||= name\n # default to name if the value of override_name is empty string\n override_name = name if override_name.strip == \"\"\n override_name\n end", "def server_name(name = nil)\n return @server_name if name.nil?\n @server_name = name.to_sym\n end", "def name\n @name ||= self.to_s.demodulize.underscore\n end", "def base_hostname\n @username.match(/.com/) ? @username : \"#{@username}.tumblr.com\"\n end", "def public_dns_name\n data[:public_dns_name]\n end", "def domain\n @domain ||= PublicSuffix.parse(@fqdn).domain\n end", "def subdomain\n ([self.short_name] + self.class.dalianshops.domain.split('.')[1..-1]).join('.')\n end" ]
[ "0.88208437", "0.87415904", "0.87415904", "0.85008746", "0.85008746", "0.79431206", "0.786928", "0.7796924", "0.767584", "0.74892884", "0.739076", "0.7323344", "0.7261558", "0.7261558", "0.7260703", "0.72138095", "0.71745044", "0.7170666", "0.7149342", "0.7145908", "0.7145908", "0.71046287", "0.71046287", "0.7104052", "0.7017104", "0.69945484", "0.69306356", "0.6924174", "0.6921184", "0.68551785", "0.68457896", "0.6839188", "0.68378913", "0.68306136", "0.6800142", "0.67866915", "0.6783812", "0.67830557", "0.67826295", "0.6754101", "0.6754101", "0.67409253", "0.67409253", "0.6706415", "0.6703021", "0.6686714", "0.6682501", "0.6675122", "0.6664765", "0.66450006", "0.66319996", "0.6631733", "0.6609724", "0.6591065", "0.6591065", "0.6590149", "0.65880734", "0.6586562", "0.65812314", "0.65603286", "0.65436137", "0.6527756", "0.64762634", "0.646876", "0.64516497", "0.6440185", "0.64258796", "0.64121395", "0.64049816", "0.64011747", "0.63941574", "0.6393263", "0.63907045", "0.63827974", "0.638105", "0.6380372", "0.63792086", "0.6371274", "0.6371274", "0.6371274", "0.6370121", "0.6370121", "0.63682175", "0.6362981", "0.6351735", "0.6348286", "0.6347885", "0.6340474", "0.63399523", "0.6338186", "0.6333347", "0.63221204", "0.6308363", "0.6306244", "0.6305431", "0.63035446", "0.6300723", "0.62986654", "0.62981015", "0.62889194" ]
0.7273699
12
Create the domain, and all its dependencies
def create_domain debug("Creating domain #{domain_name}") debug("Using options: #{domain_options}") domain = client.servers.create(domain_options) prepare_domain(domain) domain end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_agent_domain\n return false unless validate_params\n puts '########## CREATING DOMAIN ##########'\n dme.create_domain(params[:domain])\n puts '########## CREATING DEFAULT RECORDS ##########'\n create_default_records\n puts '########## CREATING ADDITIONAL RECORDS ##########'\n create_additional_records\n puts '########## RENDERING DATA TO CLIENT##########'\n show_domain\n end", "def new\n @domain = Domain.new\n end", "def new_domain(domain)\n d = Domain.new\n d.name = domain\n d.type = \"NATIVE\"\n\n d.save\n return d\n end", "def create\n # create registrant -> create order -> create domain\n @order = Order.create(user_id:current_user.id)\n @order.save\n\n if @order.save\n puts \"order created with id #{@order.id}\"\n else\n puts 'order creation failed'\n end\n\n @domain = current_user.domains.new(domain_params)\n\n @domain.user_id = current_user.id\n @domain.registrant_id = Registrant.last.id\n @domain.order_id = @order.id\n\n # create an order first\n respond_to do |format|\n if @domain.save\n format.html { redirect_to @domain, notice: 'Domain was successfully created.' }\n format.json { render :show, status: :created, location: @domain }\n else\n format.html { render :new }\n format.json { render json: @domain.errors, status: :unprocessable_entity }\n end\n end\n end", "def start\n\n\t\tdebug \"Starting domain %s\" % [resource[:name]]\n\n\t\tif exists? && status != \"running\"\n\t\t\tdom.create # Start the domain\n\t\telsif status == \"absent\"\n\t\t\tinstall\n\t\tend\n\n\tend", "def create_types\n\t[Domain]\nend", "def create_domain(domain)\n raise MogileFS::ReadOnlyError if readonly?\n res = @backend.create_domain :domain => domain\n res ? res['domain'] : nil\n end", "def start\n\n\t\tdebug \"Starting domain %s\" % [resource[:name]]\n\n\t\tif exists? && status == \"stopped\"\n\t\t\tdom.create # Start the domain\n\t\telse\n\t\t\tinstall\n\t\tend\n\n\tend", "def build(domain_name)\n domain = Domain.new(:name => domain_name,\n :ttl => self.ttl,\n :authority_type => Domain::MASTER)\n\n record_templates.dup.each do |template|\n record = template.build(domain_name)\n\n domain.records << record\n domain.soa_record = record if record.is_a?(SOA)\n end\n\n domain\n end", "def create\n @domain = Domain.new(domain_params)\n\n if @domain.save\n render json: @domain, status: :created, location: @domain\n else\n render json: @domain.errors, status: :unprocessable_entity\n end\n end", "def domain_create(args)\n response = send_request(domain_create_xml(args))\n\n get_result(:xml => response, :callback => :domain_create_process)\n end", "def setup\n # Settings, common_codes, entities, menus, terminologies\n return if self.exist_initial_data\n debug_print \"Progressing setup initial data ...\"\n sys_dom = Domain.system_domain\n\n debug_print \"Finding initial data from System Domain ...\"\n ori_codes = CommonCode.where(\"domain_id = #{sys_dom.id} and parent_id is null\")\n ori_entities = Entity.where(\"domain_id = #{sys_dom.id}\")\n ori_settings = Setting.where(\"domain_id = #{sys_dom.id}\")\n\n debug_print \"Copying initial data to Domain (#{self.name})...\"\n self.clone_code_to_domain(ori_codes)\n self.clone_entity_to_domain(ori_entities)\n self.clone_to_domain(ori_settings, Setting)\n\n debug_print \"Completed setup initial data ...\"\n end", "def create_domain(domain_name)\n Adapter.create_domain(domain_name)\n self[domain_name]\n end", "def domain_create(domain, fields)\n unless ([ :period, :registrant, :admin, :tech, :billing, :nservers ] - fields.keys).empty?\n raise ArgumentError, \"Required fields not found\"\n end\n query :domain_register, {\n domain: domain,\n period: (fields[:period] * 12),\n owner_c: fields[:registrant],\n admin_c: fields[:admin],\n tech_c: fields[:tech],\n billing_c: fields[:billing],\n ns_list: fields[:nservers].join(':')\n }\n end", "def create\n @domain = DOMAIN.new(params[:domain])\n @domain.current_user = current_user\n\n respond_to do |format|\n if @domain.save\n flash[:notice] = 'Domain was successfully created.'\n format.html { redirect_to(domain_url(@domain.id)) }\n format.xml { render :xml => @domain, :status => :created, :location => domain_url(@domain.id) + \".xml\" }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @domain.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create_dependancies\n create_course_plan()\n end", "def run\n\tsuper\n \n ## Read the TLD List and pre-process it to remove crud\n f = File.open Rails.root + \"data/tld.list\"\n tld_list_with_comments = f.read\n tld_list = []\n tld_list_with_comments.each_line {|line| tld_list << line unless line =~ /^\\/\\// }\n tld_list.each {|tld| tld.chomp!}\n \n tld_list = [\"com\",\"net\", \"org\"]\n \n ###\n # Organization\n ###\n \n\tif @object.kind_of? Organization\n\t\tif @object.name\n\t\t tld_list.each do |tld|\n \n\t\t\tbegin\n\t\t\t\t domain = \"#{@object.name}.#{tld}\"\t\t\n\t\t\t\t\tresolved_address = Resolv.new.getaddress(domain)\n\t\t\t\n\t\t\t\t\tif resolved_address\n puts \"Resolved Address: #{resolved_address}\"\n create_object Device, :ip_address => resolved_address, \n :name => domain, \n :organization => @object\n create_object Domain, :name => domain, \n :organization => @object\n end\n \n \t rescue Exception => e\n\t\t\t puts e\n\t\t\t end\n\t\t end\n \n\t\telse\n\t\t\tputs \"Error, object has no name to look up!\"\n\t\tend\n\tend\n\n ###\n # Domain!\n ### \n\n\tif @object.kind_of? Domain\n\t\tif @object.name\n\t\t tld_list.each do |tld|\n\n\t\t\tbegin\n\t\t\t\t domain = \"#{@object.name}.#{tld}\"\t\t\n\t\t\t\t\tresolved_address = Resolv.new.getaddress(domain)\n\n\t\t\t\t\tif resolved_address\n create_object Device, :ip_address => resolved_address, \n :name => domain, \n :domain_id => @object.id,\n :organization_id => @object.organization.id\n end\n\n \t rescue Exception => e\n\t\t\t puts e\n\t\t\t end\n\t\t end\n\n\t\telse\n\t\t\traise \"Error, object has no name to look up!\"\n\t\tend\n\t\n\t\tnil\n\tend\nend", "def create\n Puppet.debug \"starting create #{self.class.to_s}\"\n dns_service = get_dns_service(get_fqdn)\n dns_service.create_record(get_fqdn, get_type, get_ip) if dns_service != nil\n Puppet.debug \"done with create #{self.class.to_s}\"\n end", "def initialize(domain); @domain = domain; end", "def create\n authorize! :create, BudgetDomain\n service = BudgetDomainCreator.call(create_params)\n @budget_domain = service.budget_domain\n if service.status == :ok\n redirect_to @budget_domain, notice: 'Budget domain was successfully created.'\n else\n render :new\n end\n end", "def create_domain(opts = {})\n data, _status_code, _headers = create_domain_with_http_info(opts)\n data\n end", "def create\n create_checkpoints\n create_config_base\n generate_deploy_files\n generate_hiera_template\n end", "def initialize(domain)\n\t\t\t@domain = domain\n\t\t\t@sdb = AwsSdb::Service.new(:logger=>LogDuck.new)\n\t\t\tcreate_domain unless domain_exist?\n\t\tend", "def create\n\t\t@domain = Domain.new(:hostname => params[:hostname])\n\n\t\t# Attempt to save the domain, and return the appropriate JSON or Error\n\t\trespond_to do |format|\n\t\t\tif @domain.save\n\t\t\t\tformat.json { render json: @domain, status: :created }\n\t\t\telse\n\t\t\t\tformat.json { render json: @domain.errors, status: :unprocessable_entity }\n\t\t\tend\n\t\tend\n\t\t\n\t\t# Fetch the hostname IP address and update the record in a new thread\n\t\tt1=Thread.new{fetch_origin_ip()}\n\t\tt1.join\n\tend", "def create_domain_with_http_info(create_domain_options, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: DomainControllerApi.create_domain ...'\n end\n # verify the required parameter 'create_domain_options' is set\n if @api_client.config.client_side_validation && create_domain_options.nil?\n fail ArgumentError, \"Missing the required parameter 'create_domain_options' when calling DomainControllerApi.create_domain\"\n end\n # resource path\n local_var_path = '/domains'\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['*/*'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:body] || @api_client.object_to_http_body(create_domain_options) \n\n # return_type\n return_type = opts[:return_type] || 'DomainDto' \n\n # auth_names\n auth_names = opts[:auth_names] || ['API_KEY']\n\n new_options = opts.merge(\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: DomainControllerApi#create_domain\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def new_spec\n Libvirt::Spec::Domain.new\n end", "def new_spec\n Libvirt::Spec::Domain.new\n end", "def create\n sys_bds_array = BridgeDomain.bd_ids_to_array(system_bridge_domain)\n inp_bds_array = BridgeDomain.bd_ids_to_array(@bd_ids)\n if (inp_bds_array - sys_bds_array).any?\n add_bds = BridgeDomain.bd_list_to_string(inp_bds_array - sys_bds_array)\n config_set('bridge_domain', 'system_bridge_domain', oper: 'add',\n bd: add_bds)\n end\n config_set('bridge_domain', 'create', bd: @bd_ids)\n end", "def create_domain\n unless domain_exists?\n require 'chef/win32/version'\n version = Chef::ReservedNames::Win32::Version.new\n\n Chef::Log.info(\"Configuring network interface settings and creating domain\")\n if version.windows_server_2012?\n code =<<-EOH\n#{network_interface_code}\n\t\t\t\t$DCPromoFile = @\"\n\t\t\t\t[DCINSTALL]\n\t\t\t\tInstallDNS=yes\n\t\t\t\tNewDomain=forest\n\t\t\t\tNewDomainDNSName=#{new_resource.dns_name}\n\t\t\t\tDomainNetBiosName=#{new_resource.netbios_name}\n\t\t\t\tSiteName=#{new_resource.site_name}\n\t\t\t\tReplicaorNewDomain=domain\n\t\t\t\tForestLevel=5\n\t\t\t\tDomainLevel=5\n\t\t\t\tConfirmGC=Yes\n\t\t\t\tSafeModeAdminPassword=\"#{new_resource.restore_mode_password}\"\n\t\t\t\tRebootonCompletion=Yes\n\t\t\t\t\"@\n\t\t\t\t$DCPromoFile | out-file c:/dcpromoanswerfile.txt -Force\n\t\t\t\tdcpromo.exe /unattend:c:/dcpromoanswerfile.txt\n EOH\n elsif version.windows_server_2012_r2?\n code =<<-EOH\n#{network_interface_code}\n\t\t\t\tInstall-ADDSForest -DomainName #{new_resource.dns_name} -SafeModeAdministratorPassword (convertto-securestring '#{new_resource.restore_mode_password}' -asplaintext -force) -DomainMode Win2012R2 -DomainNetbiosName #{new_resource.netbios_name} -ForestMode Win2012R2 -Confirm:$false -Force\n\t\t\t\tStop-Process -ProcessName sshd -force -ErrorAction SilentlyContinue\n EOH\n # cmd = powershell_out(\"Install-ADDSForest -DomainName #{new_resource.dns_name} -SafeModeAdministratorPassword (convertto-securestring '#{new_resource.restore_mode_password}' -asplaintext -force) -DomainMode Win2012R2 -DomainNetbiosName #{new_resource.netbios_name} -ForestMode Win2012R2 -Confirm:$false -Force\")\n end\n Chef::Log.info(\"Creating Active Directory Domain #{new_resource.dns_name}\")\n cmd = powershell_out(code)\n kill_ssh\n Chef::Application.fatal!(\"Failed to create Active Directory Domain #{new_resource.dns_name}\") if cmd.exitstatus != 0\n reboot \"Active Directory Domain #{new_resource.dns_name} created\" do\n action :reboot_now\n reason \"Active Directory Domain #{new_resource.dns_name} created\"\n end\n end\nend", "def build\n raise FedoraMigrate::Errors::MigrationError, \"No qualified targets found in #{source.pid}\" if target.nil?\n\n # create target, and apply depositor metadata\n obj = target.new\n\n obj.apply_depositor_metadata @depositor_utln\n obj.visibility = Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC\n\n user = User.find_by_user_key(@depositor_utln)\n# CurationConcerns::Workflow::ActivateObject.call(target: obj, comment: 'activate object', user: user)\n\n create_and_add_payload(obj, @payload_primary, @depositor_utln)\n\n #deal with 2 primary datastream objects, storing second object in a new file set\n create_and_add_payload(obj, @payload_secondary, @depositor_utln) unless @payload_secondary.nil?\n\n #handle a case of bad hand created data on old records\n create_and_add_payload(obj, \"ARCHIVAL_SOUND\", @depositor_utln) if @payload_primary == \"ARCHIVAL_WAV\"\n\n # back up old data\n #create_and_add_fcrepo3_set obj\n\n process_desc_metadata obj\n process_admin_metadata obj\n process_technical_metadata obj\n process_relsext_metadata obj\n\n# obj.save\n\n process_collection_metadata obj\n\n active_workflow = Sipity::Workflow.find(2)\n Sipity::Entity.create!(proxy_for_global_id: obj.to_global_id.to_s,\n workflow: active_workflow,\n workflow_state: nil)\n\n obj\n end", "def test_service_creation\n JavaBuildpack::Container::Payara::ServiceBindingsHandler.create_service_definitions_from_file_set(\n @payara_complete_domain_configs_yml,\n @config_cache_root,\n @payara_complete_domain_configs_props)\n JavaBuildpack::Container::Payara::ServiceBindingsHandler.create_service_definitions_from_bindings(\n @app_services_config,\n @payara_complete_domain_configs_props)\n\n log('Done generating Domain Configuration Property file for WLST: '\\\n \"#{@payara_complete_domain_configs_props}\")\n log('--------------------------------------')\n end", "def create\n @domain = Domain.new(domain_params.merge({:user_id => current_user.id}))\n respond_to do |format|\n if @domain.save\n format.html { redirect_to @domain, notice: 'Domain was successfully created.' }\n format.json { render action: 'show', status: :created, location: @domain }\n else\n format.html { render action: 'new' }\n format.json { render json: @domain.errors, status: :unprocessable_entity }\n end\n end\n end", "def create(name, org_guid, domain_wildcard, space_guid = nil)\n domain_exist = @client.domains.find { |domain|\n domain.name == name }\n\n org = @client.organization(org_guid)\n\n # if domain doesn't exist will create it and put the organization as owning org\n if (domain_exist == nil)\n domain = @client.domain\n domain.owning_organization = org\n domain.name = name\n domain.wildcard = domain_wildcard\n domain.create!\n else\n domain = domain_exist\n existing_org_domains = org.domains\n\n # if domain exist will check that the org and domain exists, if not will add it\n if (domain.owning_organization != org)\n raise CFoundry::DomainInvalid, \"Domain already exists\"\n elsif (!existing_org_domains.include?(domain))\n existing_org_domains << domain\n org.domains = existing_org_domains\n\n org.update!\n end\n end\n\n # if a space guid is provided will add a connection between domain and space\n if space_guid != nil\n space = @client.space(space_guid)\n existing_space_domains = space.domains\n if (!existing_space_domains.include?(domain))\n existing_space_domains << domain\n space.domains = existing_space_domains\n\n space.update!\n end\n end\n end", "def create_dependencies\n create_course_bin()\n\n #create 8 default semesters\n Semester.create_semesters(start_sem, start_year.to_i, 8) {|semester| semesters.concat semester} \n end", "def create_dependencies(con)\n @resolving_dependencies = true\n dependencies.each do |_, d|\n fail CircularDependencyError.new(name, d.name) if d.resolving_dependencies\n d.create_dependencies(con)\n d.create_or_update!(con)\n end\n @resolving_dependencies = false\n end", "def create\n @crm_domain = Crm::Domain.new(crm_domain_params)\n\n respond_to do |format|\n if @crm_domain.save\n format.html { redirect_to @crm_domain, notice: 'Domain was successfully created.' }\n format.json { render :show, status: :created, location: @crm_domain }\n else\n format.html { render :new }\n format.json { render json: @crm_domain.errors, status: :unprocessable_entity }\n end\n end\n end", "def setup_dns(domain)\n# TODO should we just use the ID instead of the full href?\n [email protected]\n @dns = SharedDns.new(domain)\n raise \"Unable to reserve DNS\" unless @dns.reserve_dns(owner)\n @dns.set_dns_inputs(@deployment)\n end", "def setup\n @subject = Fog::Compute[:google].servers\n @factory = ServersFactory.new(namespaced_name)\n @servers = ServersFactory.new(namespaced_name)\n @disks = DisksFactory.new(namespaced_name)\n end", "def create(domain)\n \n r = whoisgem(domain) # returns the ruby whois response aka the Whois::Record object\n \n # preparing the contact objects\n registrant = Contact.new(r.registrant_contact)\n admin = Contact.new(r.admin_contact)\n tech = Contact.new(r.technical_contact)\n \n # preparing the nameservers\n dns = Nameservers.new(r.nameservers)\n \n # building the record that will be inserted in couch\n #@_id = domain\n @domain_id = r.domain_id\n @domain_name = r.domain\n @status = r.status\n @available = r.available?\n @registered = r.registered?\n @created_on = r.created_on\n @updated_on = r.updated_on\n @expires_on = r.expires_on\n @last_update = r.last_update\n @registrar = Registrar.new(r.registrar.id,\n r.registrar.name,\n r.registrar.organization)\n @registrant = registrant\n @admin = admin\n @technical = tech\n @nameservers = dns\n \n # not implemented yet\n @watchlist = nil\n \n CouchPotato.database.save_document! self\n \n # the boolean is an indicator wether the insertion succeeded or not\n # return boolean\n end", "def standard_domain_setup(domain)\n set_appserver_domain_template(domain)\n set_user_prefs_dir(domain)\n set_tmpdir(domain)\n disable_update_tool(domain)\n enable_implicit_cdi(domain)\n setup_default_admin(domain)\n add_default_file_realm(domain, 'file')\n set_default_auth_realm(domain, 'file')\n disable_classloading_delegation(domain)\n disable_autodeploy(domain)\n disable_dynamic_reload(domain)\n disable_non_portable_jndi_names(domain)\n domain.ports << 8080\n end", "def create\n in_directory do\n raise(\"Please use a specific Condom class, not Base.\")\n end\n end", "def setup_dns(domain)\n # TODO should we just use the ID instead of the full href?\n [email protected]\n @dns = SharedDns.new(domain)\n raise \"Unable to reserve DNS\" unless @dns.reserve_dns(owner)\n @dns.set_dns_inputs(@deployment)\n end", "def setup_dns(domain)\n # TODO should we just use the ID instead of the full href?\n [email protected]\n @dns = SharedDns.new(domain)\n raise \"Unable to reserve DNS\" unless @dns.reserve_dns(owner)\n @dns.set_dns_inputs(@deployment)\n end", "def check_and_create_third_domain(metadata)\n if (metadata.length == 13)\n @domain_z = DataDomain.new(metadata[9], metadata[10], \\\n metadata[11], metadata[12])\n else\n @domain_z = nil\n end\n end", "def register domain_model_instnace\n\t\t@domain_model_instnaces.push domain_model_instnace\n\tend", "def create\n # TODO: refactor models so that externaluserdomain is in portal namespace? \n # @external_user_domain = Portal::ExternalUserDomain.new(params[:external_user_domain])\n @external_user_domain = ExternalUserDomain.new(params[:external_user_domain])\n respond_to do |format|\n if @external_user_domain.save\n flash[:notice] = 'Portal::ExternalUserDomain was successfully created.'\n format.html { redirect_to(@external_user_domain) }\n format.xml { render :xml => @external_user_domain, :status => :created, :location => @external_user_domain }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @external_user_domain.errors, :status => :unprocessable_entity }\n end\n end\n end", "def run\n super\n\n entity_name = _get_entity_name\n check_and_create entity_name\n\n # trello strips out periods, so handle dns records differently\n if _get_entity_type_string == \"Domain\"\n check_and_create entity_name.split(\".\").first\n check_and_create entity_name.gsub(\".\",\"\")\n end\n\n end", "def domain_create(args)\n raise ArgumentError, \"You can't create a domain with ns records, you must do an update afterwards\" if args.key?(:ns)\n raise ArgumentError, \"You can't create a domain with ds or key records, you must do an update afterwards\" if args.key?(:dsData) || args.key?(:keyData)\n super\n end", "def create\n sys_bds_array = BridgeDomainVNI.string_to_array(system_bridge_domain)\n if (@bd_ids_list - sys_bds_array).any?\n add_bds = Utils\n .array_to_str((@bd_ids_list - sys_bds_array), false)\n config_set('bridge_domain_vni', 'system_bridge_domain', oper: 'add',\n bd: add_bds)\n end\n config_set('bridge_domain_vni', 'create', bd: @bd_ids)\n end", "def initialize(name, domain)\n raise Exception.new(\"#{name} is not a valid service name\") unless valid_directory_name(name.to_s)\n \n @name = name\n @booted = false\n @domain = domain\n @full_name = \"#{domain.name}::#{@name}\"\n @path = \"#{SERVICES_PATH}/#{domain.name}/#{@name}\"\n @port_in = $port_start+=1\n @port_out = $port_start+=1\n @status = \"stopped\"\n @runtime = JJRuby.newInstance()\n \n # Create the domain directory if not present\n FileUtils.mkdir_p(@path, :mode => 0755)\n \n # And finally set the meta-data for the service\n @meta_data = ServiceMetaData.new(self)\n ContainerLogger.debug \"Service added #{domain.name}::#{name}\" \n end", "def create_domain_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: DomainApi.create_domain ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling DomainApi.create_domain\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling DomainApi.create_domain\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/domain'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['comment'] = opts[:'comment'] if !opts[:'comment'].nil?\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'DomainResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"DomainApi.create_domain\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: DomainApi#create_domain\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def build_domain(user_storage)\n Complexy::Domain::User.new(user_storage.id, user_storage.first_name, user_storage.last_name)\n end", "def initialize\n @top_level = Domains.new\n end", "def initialize\n @domains = []\n end", "def run\n super\n\n # Set the dns_record to the appropriate suffix\n dns_record = @entity.name\n\n # Handle cases of *.test.com (pretty common when grabbing\n # DNSRecords from SSLCertificates)\n if dns_record[0..1] == \"*.\"\n dns_record = dns_record[2..-1]\n end\n \n if @options[:subdomain_list]\n subdomain_list = @options['subdomain_list']\n else\n # use the deepmagic list\n subdomain_list = IO.readlines(\"#{Rails.root}/data/dns_sub.list\")\n # Add a builtin domain list \n #subdomain_list = [\"mx\", \"mx1\", \"mx2\", \"www\", \"ww2\", \"ns1\", \"ns2\", \"ns3\", \"test\", \"mail\", \"owa\", \"vpn\", \"admin\",\n # \"gateway\", \"secure\", \"admin\", \"service\", \"tools\", \"doc\", \"docs\", \"network\", \"help\", \"en\", \"sharepoint\", \"portal\",\n # \"public\", \"private\", \"pub\", \"zeus\", \"mickey\", \"time\", \"web\", \"it\", \"my\", \"photos\", \"safe\", \"download\", \"dl\", \n # \"search\", \"staging\"]\n end\n\n @task_logger.good \"Using subdomain list: #{subdomain_list}\"\n\n begin\n # Check for wildcard DNS, modify behavior appropriately. (Only create entities\n # when we know there's a new host associated)\n if Resolv.new.getaddress(\"noforkingway#{rand(100000)}.#{dns_record}\")\n wildcard_domain = true \n @task_logger.error \"WARNING! Wildcard domain detected, only saving validated domains/hosts.\"\n end\n rescue Resolv::ResolvError\n @task_logger.good \"Looks like no wildcard dns. Moving on.\"\n end\n\n subdomain_list.each do |sub|\n sub = sub.chomp\n begin\n # Calculate the domain name\n if @options[:mashed_domains]\n # blatently stolen from HDM's webinar on password stealing, try without a dot to see\n # if this domain has been hijacked by someone - great for finding phishing attempts\n domain = \"#{sub}#{dns_record}\"\n else \n domain = \"#{sub}.#{dns_record}\"\n end\n\n # Try to resolve\n resolved_address = Resolv.new.getaddress(domain)\n @task_logger.good \"Resolved Address #{resolved_address} for #{domain}\" if resolved_address\n \n # If we resolved, create the right entities\n if resolved_address\n unless wildcard_domain\n @task_logger.good \"Creating domain and host entities...\"\n # create new host and domain entitys\n d = create_entity(Entities::DnsRecord, {:name => domain })\n h = create_entity(Entities::Host, {:name => resolved_address})\n else\n # Check to make sure we don't already have this host, if we don't \n # we probably want to save the domain as a new entity (and the host)\n if Entities::Host.where(:name => resolved_address).count == 0\n d = create_entity(Entities::DnsRecord, {:name => domain })\n h = create_entity(Entities::Host, {:name => resolved_address})\n end\n end\n end\n rescue Exception => e\n @task_logger.error \"Hit exception: #{e}\"\n end\n end\n end", "def domain; end", "def domain; end", "def domain; end", "def domain; end", "def setup\n # Create a standard project (30 days)\n @Project1 = Project.new(Date.new(2000, 1, 1), Date.new(2000, 1, 10))\n # Create a standard calendar (30 days, 1 hour per day)\n @Calendar1 = {}\n 10.times do |iIdx|\n @Calendar1[Date.new(2000, 1, iIdx+1)] = 1\n end\n # Create standard resources\n @Resource1 = Resource.new('R1', @Calendar1)\n end", "def initialize(name, container)\n raise Exception.new(\"#{name} is not a valid domain name\") unless valid_directory_name(name.to_s)\n \n @name = name\n @services = Hash.new\n @container = container\n \n # Create the domain directory if not present\n FileUtils.mkdir_p(\"#{SERVICES_PATH}/#{@name}\", :mode => 0755)\n \n ContainerLogger.debug \"Domain added #{name}\"\n end", "def create_everything\n create_users\n create_user_keys\n create_comments\n create_filters\n create_columns\n create_organizations\n create_approvals\n create_whitelists\n create_user_key_columns\n create_user_key_organizations\n end", "def process_domains\n domains.each do |domain|\n params = options\n params[:host] = configuration.host\n params[:server] = servers[domain][\"server\"]\n compiler = YMDP::Compiler::Base.new(domain, git_hash, params)\n \n compiler.process_all\n end\n end", "def create(name, attributes)\n attributes = attributes.dup\n\n # Add the objectclasses\n attributes[\"objectClass\"] = objectclasses.collect { |o| o.to_s }\n attributes[\"objectClass\"] << \"top\" unless attributes[\"objectClass\"].include?(\"top\")\n\n attributes[rdn.to_s] = [name]\n\n # Generate any new values we might need.\n generate(attributes)\n\n # And create our resource.\n connect { |conn| conn.add dn(name), attributes }\n end", "def create!\n create || raise(\"Errors in the package creation\")\n end", "def setup\n @subject = Fog::Compute[:google].servers\n @factory = ServersFactory.new(namespaced_name)\n end", "def setup\n @subject = Fog::Compute[:google].servers\n @factory = ServersFactory.new(namespaced_name)\n end", "def create\n Puppet.debug \"starting create #{self.class.to_s}\"\n dns_service = get_dns_service(get_zone)\n dns_service.create_zone(get_zone, get_email, get_ttl) if dns_service != nil\n Puppet.debug \"done with create #{self.class.to_s}\"\n end", "def prepare_domain(domain)\n domain.start unless domain.active\n wait_for_ip_address(domain)\n end", "def create\n @timestamp = '%10.6f' % Time.now.to_f\n @handle = @timestamp.sub('.', '')\n\n @registrant = Registrant.new(registrant_params)\n @build_detail_temp = @registrant.build_detail(registrant_params[:detail_attributes])\n\n @registrant.handle = @handle\n\n # raise @reg_temp.inspect\n @order = Order.create(user_id:current_user.id)\n @order.save\n\n # @domain = current_user.domains.new(domain_params)\n @build_domain_temp = @registrant.build_domain(registrant_params[:domain_attributes])\n\n @build_domain_temp.user_id = current_user.id\n @build_domain_temp.registrant_id = @registrant.id\n @build_domain_temp.order_id = @order.id\n\n respond_to do |format|\n if @registrant.save\n @build_detail_temp.save\n @build_domain_temp.save\n\n format.html { redirect_to domains_path, notice: 'Registrant was successfully created.' }\n format.json { render :show, status: :created, location: @registrant }\n else\n format.html { render :new }\n format.json { render json: @registrant.errors, status: :unprocessable_entity }\n end\n end\n end", "def initialize\n @domains = {}\n content = File.read \"#{Driver.config_dir}/root_registry_sea1.json\"\n store_domain(Driver.parse_json(content)['resolveDomainResponse'], 'urn:theplatform:auth:root')\n end", "def initialize(domain)\n super()\n\n @domain = domain\n\n @f = Gem::SpecFetcher.fetcher\n\n @always_install = []\n @ignore_dependencies = false\n @ignore_installed = false\n @local = {}\n @local_source = Gem::Source::Local.new\n @remote_set = Gem::Resolver::BestSet.new\n @force = false\n @specs = {}\n end", "def create(state)\n info(\"Creating instance #{instance.name}\")\n return if state[:server_id]\n\n domain = create_domain\n state[:server_id] = domain.id\n state[:hostname] = domain.public_ip_address\n\n instance.transport.connection(state).wait_until_ready\n\n info(\"Libvirt instance #{domain.name} created.\")\n end", "def create\n @domain = Domain.new(params[:domain])\n# domain_regex = /[a-z0-9]*\\.[a-z0-9]*/\n# @domain.name = @domain.name.match(domain_regex)[0] \n respond_to do |format|\n if @domain.save\n format.html { redirect_to @domain, notice: 'Domain was successfully created.' }\n format.json { render json: @domain, status: :created, location: @domain }\n else\n format.html { render action: \"new\" }\n format.json { render json: @domain.errors, status: :unprocessable_entity }\n end\n end\n end", "def clone_to_domain(objs, resource)\n objs.each do |obj|\n hash = obj.attributes\n hash[:id] = nil\n hash[:domain_id] = self.id\n resource.create!(hash)\n end\n end", "def create_fedora_objects(attributes)\n fc = factory_class(model)\n f = fc.new(attributes, files_directory)\n f.run\n end", "def domain_class\n self.class.const_get(:DSL)\n end", "def create_schema\n Apartment::Database.create(subdomain)\n end", "def create\n @email_domain = Email::Domain.new(params[:email_domain])\n\n respond_to do |format|\n if @email_domain.save\n format.html { redirect_to(@email_domain, :notice => 'Domain was successfully created.') }\n format.xml { render :xml => @email_domain, :status => :created, :location => @email_domain }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @email_domain.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n create_hiera_template\n create_manifests_node\n create_node_checkpoint\n end", "def set_domain\n if check_fields_google_domain? # google domain\n @domain.push(:Google)\n elsif check_fields_nas_domain? # NAS domain\n @domain.push(:NAS)\n else \n @domain.push(:Cross)\n end\n notify \"DOMAIN : #{@domain}\"\n end", "def create\n create_directories\n end", "def new\n @domain = DOMAIN.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @domain }\n end\n end", "def domains; end", "def create_database\n DATA[:accounts].each do |account|\n CalendarCoordinator::AccountService.create(data: account).save\n end\n\n account = CalendarCoordinator::Account.first\n DATA[:calendars].each do |calendar|\n account.add_owned_calendar(calendar)\n end\n end", "def create_domain(create_domain_options, opts = {})\n data, _status_code, _headers = create_domain_with_http_info(create_domain_options, opts)\n data\n end", "def build_domain \n unless self.domain\n self.domain = URI.parse(self.url).host \n self.save\n end\n end", "def add_domain\n url = args.shift\n create_dependency(url)\n puts \"Added #{url} to the monitoring list\"\n end", "def create_class(domain, klass, mindevcount)\n modify_class(domain, klass, mindevcount, :create)\n end", "def create_company\n company = Company.create!()\n company\n end", "def validate\n @domains.each do |d|\n raise 'domain definition error' unless d.class == Domain\n end\n @domains.map(&:validate)\n end", "def setup()\n create_directories\n end", "def generate!\n controllers = (Rails.application.routes.routes.map do |route|\n controller_name = \"#{route.requirements[:controller].camelize}Controller\"\n ActiveSupport::Dependencies.ref(controller_name).get\n end).insert(0, ApplicationController).uniq\n\n valid_controllers = valid_controllers()\n valid_locales = valid_locales()\n\n controllers.each do |controller|\n controller_path = controller.controller_path\n if controller.respond_to?(:managable_content_for) && valid_controllers.include?(controller_path)\n Page.transaction do\n valid_locales.each do |locale|\n # Create Page if it does not exist yet\n page = Page.where(:controller_path => controller_path, :locale => locale).first || Page.new()\n if page.new_record?\n page.controller_path = controller_path\n page.locale = locale\n page.save!\n end\n\n # Create PageContent if it does not exist yet\n contents = (controller == ApplicationController) ? controller.managable_layout_content_for : controller.managable_content_for\n contents.each do |key|\n if page.page_contents.where(:key => key).first.nil?\n page_content = page.page_contents.build\n page_content.key = key\n page_content.save!\n end\n end\n end\n end\n end\n end\n end", "def make; end", "def test_dynamic_do_build\r\n\t\tVCR.use_cassette('dynamic_do_build') do\r\n\t\t\t# A. get digital object\r\n\t\t\tcdo = CordraRestClient::DigitalObject.find(API_URL, \"#{CORDRA_PREFIX}/B100003484\")\r\n\t\t\t# Check object id and type\r\n\t\t\tassert_equal \"#{CORDRA_PREFIX}/B100003484\", cdo.id\r\n\t\t \tassert_equal \"DigitalSpecimen\", cdo.type\r\n\t\t\t# B. get schema\r\n\t\t\t# The schema will be used to build a DO class dinamically\r\n\t\t\tdo_schema=CordraRestClient::DigitalObject.get_schema(API_URL, cdo.type.gsub(\" \",\"%20\"))\r\n\t\t\t# check that the result is saved\r\n\t\t\tassert_equal \"object\", do_schema[\"type\"]\r\n\t\t\tassert_equal \"DigitalSpecimen\", do_schema[\"title\"]\r\n\t\t\t# C. build new class using schema\r\n\t\t\tdo_properties = do_schema[\"properties\"].keys\r\n\t\t\tdo_c = CordraRestClient::DigitalObjectFactory.create_class cdo.type.gsub(\" \",\"\"), do_properties\r\n\t\t\tnew_ds = do_c.new\r\n\t\t\t# the DO contents are a hash\r\n\t\t\tassert_equal Hash, cdo.content.class\r\n\t\t\t# assing object values in content to class\r\n\t\t\tCordraRestClient::DigitalObjectFactory.assing_attributes new_ds, cdo.content\r\n\t\t\tcdo.content.each do |field, arg|\r\n\t\t\t\tinstance_var = field.gsub('/','_')\r\n\t\t\t\tinstance_var = instance_var.gsub(' ','_')\r\n\t\t\t\tassert_equal arg, new_ds.instance_variable_get(\"@#{instance_var}\")\r\n\t\t\tend\r\n\t\tend\r\n\tend", "def domain(base_name)\n Domain.new(self, base_name, @uid)\n end", "def d(*args)\n Dependency.new(*args)\n end", "def add_dependent_entities\n (NUM_DEPENDENT_FORM_ENTITIES - @resource.creators.length).times do\n @resource.creators.build\n end\n (NUM_DEPENDENT_FORM_ENTITIES - @resource.extents.length).times do\n @resource.extents.build\n end\n (NUM_DEPENDENT_FORM_ENTITIES - @resource.resource_dates.length).times do\n @resource.resource_dates.build\n end\n (NUM_DEPENDENT_FORM_ENTITIES - @resource.resource_notes.length).times do\n @resource.resource_notes.build\n end\n (NUM_DEPENDENT_FORM_ENTITIES - @resource.subjects.length).times do\n @resource.subjects.build\n end\n end", "def save\n # Get the spec, since if we undefine the domain later, we won't be\n # able to.\n definable = spec\n\n # To modify an existing domain, we actually undefine and redefine it.\n # We can't use `set_domain` here since that will clear the `domain`\n # pointer, which we need to get the proper domain spec.\n domain.undefine if domain\n\n # At this point, assuming the virtuoso settings are correct, we\n # should have a bootable VM spec, so define it and reload the VM\n # information.\n set_domain(connection.domains.define(definable))\n end", "def test_00_create\n\t\tprintTestHeader \"Test creation of Joints without JointGroups\"\n\n\t\tassert_raises(ScriptError) { ODE::Joint::new } \n\n\t\tJointClasses.each {|klass|\n\t\t\tjoint = nil\n\n\t\t\tdebugMsg \"Create: Testing the #{klass.name} class.\"\n\t\t\tassert_raises(ArgumentError) { klass.new } \n\t\t\tassert_nothing_raised { joint = klass.new(@world) }\n\t\t\tassert_instance_of( klass, joint )\n\n\t\t\tdebugMsg \"Clobbering joint\"\n\t\t\t$stderr.flush\n\t\t\tjoint = nil\n\t\t\tcollectGarbage()\n\t\t}\n\tend" ]
[ "0.68713975", "0.6569175", "0.6260142", "0.62500805", "0.6244911", "0.6242045", "0.6226356", "0.61096287", "0.6078012", "0.60581654", "0.6028687", "0.6008312", "0.59861284", "0.5936764", "0.59363437", "0.5932347", "0.5929594", "0.58943474", "0.583743", "0.5825705", "0.5784049", "0.5783703", "0.57560134", "0.5685002", "0.5681077", "0.56642824", "0.56642824", "0.5638339", "0.5585881", "0.5576818", "0.5571997", "0.5571562", "0.5571533", "0.5553333", "0.5548555", "0.55382127", "0.5521741", "0.55125636", "0.5502364", "0.54947305", "0.5491326", "0.5490887", "0.5490887", "0.54794407", "0.54772335", "0.5467384", "0.54334414", "0.54269147", "0.5402929", "0.5370773", "0.5369103", "0.5350345", "0.53421724", "0.53378403", "0.53226715", "0.5293882", "0.5293882", "0.5293882", "0.5293882", "0.5284676", "0.5273477", "0.5245612", "0.52414614", "0.5236397", "0.5219168", "0.521688", "0.521688", "0.521426", "0.5201859", "0.51857513", "0.51811093", "0.5180279", "0.5178647", "0.5170732", "0.51610994", "0.51592547", "0.5152025", "0.5145131", "0.5144411", "0.5140528", "0.5132496", "0.5126247", "0.5126238", "0.510856", "0.51055306", "0.5102413", "0.51020354", "0.509679", "0.5092677", "0.507958", "0.50761604", "0.50749135", "0.5066627", "0.5062939", "0.5061454", "0.5055788", "0.50528336", "0.5051719", "0.5046953", "0.503484" ]
0.6419313
2
Prepares the domain for SSH connections
def prepare_domain(domain) domain.start unless domain.active wait_for_ip_address(domain) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_host\n uri = Addressable::URI.parse(self.url)\n self.site = uri.site\n self.domain_tld = [uri.domain,uri.tld].join('.')\n end", "def host=(_); end", "def setup(credentials = {})\n requires :public_key, :public_ip_address, :username\n\n credentials[:password] = password unless self.password.nil?\n credentails[:key_data] = [private_key] if self.private_key\n\n commands = [\n %{mkdir .ssh},\n ]\n if public_key\n commands << %{echo \"#{public_key}\" >> ~/.ssh/authorized_keys}\n end\n\n # wait for domain to be ready\n Timeout::timeout(360) do\n begin\n Timeout::timeout(8) do\n Fog::SSH.new(public_ip_address, username, credentials.merge(:timeout => 4)).run('pwd')\n end\n rescue Errno::ECONNREFUSED\n sleep(2)\n retry\n rescue Net::SSH::AuthenticationFailed, Timeout::Error\n retry\n end\n end\n Fog::SSH.new(public_ip_address, username, credentials).run(commands)\n end", "def build_domain \n unless self.domain\n self.domain = URI.parse(self.url).host \n self.save\n end\n end", "def normalized_host; end", "def set_server_name; self.name = domain; end", "def make_host\n unless self.host.host\n self.host.update(:host => true)\n end\n end", "def initialize_ssh; end", "def domain\n server_name || http_host\n end", "def set_hostname(host, domain)\n set_hostname = <<SCRIPT\n sudo hostname #{host}.#{domain}\nSCRIPT\nend", "def host\n domain\n end", "def host=(new_host); end", "def resolve_domain\n domain = prompt(\"example.com\", \"Add domain where app will run (will be used in nginx configuration)\\n\")\n @app_config.write(\"domain\", domain)\n end", "def setup(credentials = {})\n requires :public_key, :ssh_ip_address, :username\n\n credentials[:proxy]= ssh_proxy unless ssh_proxy.nil?\n credentials[:password] = password unless self.password.nil?\n credentials[:key_data] = [private_key] if self.private_key\n\n commands = [\n %{mkdir .ssh},\n # %{passwd -l #{username}}, #Not sure if we need this here\n # %{echo \"#{Fog::JSON.encode(attributes)}\" >> ~/attributes.json}\n ]\n if public_key\n commands << %{echo \"#{public_key}\" >> ~/.ssh/authorized_keys}\n end\n\n # wait for domain to be ready\n Timeout::timeout(360) do\n begin\n Timeout::timeout(8) do\n Fog::SSH.new(ssh_ip_address, username, credentials.merge(:timeout => 4)).run('pwd')\n end\n rescue Errno::ECONNREFUSED\n sleep(2)\n retry\n rescue Net::SSH::AuthenticationFailed, Timeout::Error\n retry\n end\n end\n Fog::SSH.new(ssh_ip_address, username, credentials).run(commands)\n end", "def establish_hostname_and_domain\n ShellSpinner \"# checking whether hostname and domain have been set\", false do\n\n hostname, domain = nil\n\n @config[:route53][:hostname] = find_with_context(:hostname, :user_data_template_variables) if find_with_context(:hostname, :user_data_template_variables)\n @config[:route53][:domain] = find_with_context(:domain, :user_data_template_variables) if find_with_context(:domain, :user_data_template_variables)\n @config[:route53][:hostname] = find_with_context(:hostname, :route53) if find_with_context(:hostname, :route53)\n @config[:route53][:domain] = find_with_context(:domain, :route53) if find_with_context(:domain, :route53)\n\n help = <<-HEREDOC.strip_heredoc\n # \n # checked:\n # 'common', 'user_data_template_variables',\n # and 'route53' sections of config\n # --common-variables, --route53-variables,\n # and --user-data-template-variables\n #\n # route53 dynamic DNS will not be updated!\n HEREDOC\n\n domain = @config[:route53][:domain]\n hostname = @config[:route53][:hostname]\n\n if domain.nil? and hostname.nil?\n debug <<-HEREDOC.strip_heredoc\n # WARNING: hostname and domain not found\"\n #{help}\n\n HEREDOC\n\n elsif domain and hostname.nil?\n debug <<-HEREDOC.strip_heredoc\n # WARNING: hostname not found\n #{help}\n\n HEREDOC\n\n elsif domain.nil? and hostname\n debug <<-HEREDOC.strip_heredoc\n # WARNING: domain not found\n #{help}\n\n HEREDOC\n\n else\n debug <<-HEREDOC.strip_heredoc\n # found hostname and domain:\n hostname: #{hostname}\n domain: #{domain}\n\n HEREDOC\n\n @config[:route53][:new_dns_records] = {\n :public => {\n :alias => \"#{hostname}.#{domain}.\",\n :target => nil\n },\n :private => {\n :alias => \"#{hostname}-private.#{domain}.\",\n :target => nil\n }\n }\n end\n end\n\n puts\n end", "def remember_host!; end", "def host\n @host = self.hostuser\n end", "def setup_dns(domain)\n# TODO should we just use the ID instead of the full href?\n [email protected]\n @dns = SharedDns.new(domain)\n raise \"Unable to reserve DNS\" unless @dns.reserve_dns(owner)\n @dns.set_dns_inputs(@deployment)\n end", "def set_domain\n if check_fields_google_domain? # google domain\n @domain.push(:Google)\n elsif check_fields_nas_domain? # NAS domain\n @domain.push(:NAS)\n else \n @domain.push(:Cross)\n end\n notify \"DOMAIN : #{@domain}\"\n end", "def initialize()\n @remote_host = \"192.168.1.1\"\n @rsa_key = \"~/.ssh/router_rsa\"\n @dns_source = \"http://www.netflixdnscodes.com\"\n end", "def setup_dns(domain)\n # TODO should we just use the ID instead of the full href?\n [email protected]\n @dns = SharedDns.new(domain)\n raise \"Unable to reserve DNS\" unless @dns.reserve_dns(owner)\n @dns.set_dns_inputs(@deployment)\n end", "def setup_dns(domain)\n # TODO should we just use the ID instead of the full href?\n [email protected]\n @dns = SharedDns.new(domain)\n raise \"Unable to reserve DNS\" unless @dns.reserve_dns(owner)\n @dns.set_dns_inputs(@deployment)\n end", "def run\n super\n\n # Set the dns_record to the appropriate suffix\n dns_record = @entity.name\n\n # Handle cases of *.test.com (pretty common when grabbing\n # DNSRecords from SSLCertificates)\n if dns_record[0..1] == \"*.\"\n dns_record = dns_record[2..-1]\n end\n \n if @options[:subdomain_list]\n subdomain_list = @options['subdomain_list']\n else\n # use the deepmagic list\n subdomain_list = IO.readlines(\"#{Rails.root}/data/dns_sub.list\")\n # Add a builtin domain list \n #subdomain_list = [\"mx\", \"mx1\", \"mx2\", \"www\", \"ww2\", \"ns1\", \"ns2\", \"ns3\", \"test\", \"mail\", \"owa\", \"vpn\", \"admin\",\n # \"gateway\", \"secure\", \"admin\", \"service\", \"tools\", \"doc\", \"docs\", \"network\", \"help\", \"en\", \"sharepoint\", \"portal\",\n # \"public\", \"private\", \"pub\", \"zeus\", \"mickey\", \"time\", \"web\", \"it\", \"my\", \"photos\", \"safe\", \"download\", \"dl\", \n # \"search\", \"staging\"]\n end\n\n @task_logger.good \"Using subdomain list: #{subdomain_list}\"\n\n begin\n # Check for wildcard DNS, modify behavior appropriately. (Only create entities\n # when we know there's a new host associated)\n if Resolv.new.getaddress(\"noforkingway#{rand(100000)}.#{dns_record}\")\n wildcard_domain = true \n @task_logger.error \"WARNING! Wildcard domain detected, only saving validated domains/hosts.\"\n end\n rescue Resolv::ResolvError\n @task_logger.good \"Looks like no wildcard dns. Moving on.\"\n end\n\n subdomain_list.each do |sub|\n sub = sub.chomp\n begin\n # Calculate the domain name\n if @options[:mashed_domains]\n # blatently stolen from HDM's webinar on password stealing, try without a dot to see\n # if this domain has been hijacked by someone - great for finding phishing attempts\n domain = \"#{sub}#{dns_record}\"\n else \n domain = \"#{sub}.#{dns_record}\"\n end\n\n # Try to resolve\n resolved_address = Resolv.new.getaddress(domain)\n @task_logger.good \"Resolved Address #{resolved_address} for #{domain}\" if resolved_address\n \n # If we resolved, create the right entities\n if resolved_address\n unless wildcard_domain\n @task_logger.good \"Creating domain and host entities...\"\n # create new host and domain entitys\n d = create_entity(Entities::DnsRecord, {:name => domain })\n h = create_entity(Entities::Host, {:name => resolved_address})\n else\n # Check to make sure we don't already have this host, if we don't \n # we probably want to save the domain as a new entity (and the host)\n if Entities::Host.where(:name => resolved_address).count == 0\n d = create_entity(Entities::DnsRecord, {:name => domain })\n h = create_entity(Entities::Host, {:name => resolved_address})\n end\n end\n end\n rescue Exception => e\n @task_logger.error \"Hit exception: #{e}\"\n end\n end\n end", "def set_site_domain\n begin\n @enable_games = false\n @enable_topics = false\n @enable_wiki = false\n @site_domain = 'lefiores.com'\n @facebook_url = 'https://www.facebook.com/www.gamerz.wiki'\n @twitter_url = 'https://twitter.com/gamerzwiki'\n @google_plus_url = 'https://plus.google.com/116093313158973278682/about'\n\n if Rails.env.production? \n @site_domain = 'lefiores.com' \n \n else\n @site_domain = 'localhost:3000' \n end\n rescue \n @site_domain = 'lefiores.com' \n end\n end", "def merge_host\n\t\t\t\"#{client.account}.#{client.hostname}\"\n\t\tend", "def change_host\n @parameters[:hosts] = @parameters[:hosts].sort_by { rand } if @parameters[:randomize]\n\n # Set first as master and send it to the end of array\n current_host = @parameters[:hosts].shift\n @parameters[:hosts] << current_host\n\n @ssl = current_host[:ssl]\n @host = current_host[:host]\n @port = current_host[:port] || Connection::default_port(@ssl)\n @login = current_host[:login] || \"\"\n @passcode = current_host[:passcode] || \"\"\n end", "def domain=(nd)\n self['SERVER_NAME'] = self['HTTP_HOST'] = nd\n end", "def set_host host\n @host = host\n end", "def get_server_domain\n @hostname ||= Socket.gethostname\n end", "def parse_domain_name\n if @options[:domain].blank? && !@options[:username].blank?\n if @options[:username].include?('\\\\')\n @options[:domain], @options[:username] = username.split('\\\\')\n elsif @options[:username].include?('/')\n @options[:domain], @options[:username] = username.split('/')\n end\n end\n end", "def initialize_host\n self.host = (Host.find(:name => settings['host']) || Host.new(:name => settings['host']))\n\n current_host_group_names = (host.host_groups || []).map(&:name)\n current_template_names = (host.templates || []).map(&:name)\n\n host_groups_to_add, templates_to_add = [], []\n\n (self.host_groups || []).each do |hg|\n host_groups_to_add << hg unless current_host_group_names.include?(hg.name)\n end\n\n (self.templates || []).each do |t|\n templates_to_add << t unless current_template_names.include?(t.name)\n end\n\n host.host_groups = ((host.host_groups || []) + host_groups_to_add).flatten.compact.uniq\n host.templates = ((host.templates || []) + templates_to_add).flatten.compact.uniq\n host.save\n host\n end", "def determine_hostname\n @info[:hostname] = @shell.query('HOST', 'hostname')\n end", "def initialize host, options={}\n super $stdout, options\n\n @host, @user = host.split(\"@\").reverse\n\n @user ||= options[:user]\n\n @rsync_flags = [\"-azrP\"]\n @rsync_flags.concat [*options[:rsync_flags]] if options[:rsync_flags]\n\n @ssh_flags = [\n \"-o ControlMaster=auto\",\n \"-o ControlPath=~/.ssh/sunshine-%r@%h:%p\"\n ]\n @ssh_flags.concat [\"-l\", @user] if @user\n @ssh_flags.concat [*options[:ssh_flags]] if options[:ssh_flags]\n\n @parent_pid = nil\n\n self.class.register self\n end", "def build_hostname\n hostname\n end", "def ssh_uri\n unless @uri.host\n raise(InvalidConfig,\"URI does not have a host: #{@uri}\",caller)\n end\n\n new_uri = @uri.host\n new_uri = \"#{@uri.user}@#{new_uri}\" if @uri.user\n\n return new_uri\n end", "def homeserver\n port_s = port ? \":#{port}\" : ''\n domain ? domain + port_s : ''\n end", "def homeserver\n port_s = port ? \":#{port}\" : ''\n domain ? domain + port_s : ''\n end", "def host=(_arg0); end", "def host=(_arg0); end", "def host_or_domain=(value)\n @host_or_domain = value\n end", "def set_hostpath\n @hostpath = \"#{request.scheme}://#{request.host}:#{request.port}\"\n end", "def initialize_host_groups\n self.host_groups = settings['host_groups'].split(',').flatten.compact.map(&:strip).uniq.map { |group_name | HostGroup.find_or_create(:name => group_name.strip) }\n end", "def set_domain\n if params[:domain]\n session[:domain] = params[:domain]\n end\n end", "def standard_domain_setup(domain)\n set_appserver_domain_template(domain)\n set_user_prefs_dir(domain)\n set_tmpdir(domain)\n disable_update_tool(domain)\n enable_implicit_cdi(domain)\n setup_default_admin(domain)\n add_default_file_realm(domain, 'file')\n set_default_auth_realm(domain, 'file')\n disable_classloading_delegation(domain)\n disable_autodeploy(domain)\n disable_dynamic_reload(domain)\n disable_non_portable_jndi_names(domain)\n domain.ports << 8080\n end", "def process_domains\n domains.each do |domain|\n params = options\n params[:host] = configuration.host\n params[:server] = servers[domain][\"server\"]\n compiler = YMDP::Compiler::Base.new(domain, git_hash, params)\n \n compiler.process_all\n end\n end", "def setup_domains\n authorize unless @heroku\n each_heroku_app do |heroku_env, app_name, repo|\n # get the domains that we are aiming towards\n domains = @config.domains(heroku_env)\n\n # get the domains that are already on the servers\n existing_domains = (@heroku.list_domains(app_name) || []).map{|a| a[:domain]}\n\n # remove the domains that need to be removed\n existing_domains.each do |existing_domain|\n # check to see if we need to delete this domain\n unless domains.include?(existing_domain)\n # delete this domain if they arent on the approved list\n destroy_command \"heroku domains:remove #{existing_domain} --app #{app_name}\"\n end\n end\n\n # add the domains that dont exist already\n domains.each do |domain|\n # check to see if we need to add this domain\n unless existing_domains.include?(domain)\n # add this domain if they are not already added\n creation_command \"heroku domains:add #{domain} --app #{app_name}\"\n end\n end\n\n # display the destructive commands\n output_destroy_commands(app_name)\n end\n end", "def getHostName()\n ENV['domain'] = ''\n if File.file?('domain.txt')\n ENV['domain'] = File.read('domain.txt')\n end\n\n if ENV['domain'] == ''\n puts \"Please enter a domain (it should follow the sitename.alphawerk.co.uk format!): \"\n ENV['domain'] = URI.escape(STDIN.gets.chomp)\n File.open(\"domain.txt\", \"w\") {|f| f.write(ENV['domain']) }\n end\n\n if ENV['domain'] == ''\n getHostName()\n else\n if ENV['domain'] !~ /^(.+)\\.alphawerk\\.co\\.uk$/\n puts \"Unconventional hostname format. sitename.alphawerk.co.uk format not followed\"\n end\n\n puts \"Setting up VM with domain: \"+ENV['domain']\n end\nend", "def set_hostname(server)\n\tserver.vm.provision 'shell', inline: \"hostname #{server.vm.hostname}\"\nend", "def canonical_hostname(domain)\n # Allow hostname overrides\n return $override_dashboard if $override_dashboard && domain == 'studio.code.org'\n return $override_pegasus if $override_pegasus && domain == 'code.org'\n\n return \"#{name}.#{domain}\" if ['console', 'hoc-levels'].include?($node_name)\n return domain if $node_env == 'production'\n\n # our HTTPS wildcard certificate only supports *.code.org\n # 'env', 'studio.code.org' over https must resolve to 'env-studio.code.org' for non-prod environments\n sep = (domain.include?('.code.org')) ? '-' : '.'\n return \"localhost#{sep}#{domain}\" if $node_env == 'development'\n return \"translate#{sep}#{domain}\" if $node_name == 'crowdin'\n \"#{$node_env}#{sep}#{domain}\"\nend", "def hostname(h)\n @config[:host] = h\n end", "def sshhost\n \"#{script.cluster}.osc.edu\"\n end", "def set_hostname(server)\r\n\tserver.vm.provision 'shell', inline: \"hostname #{server.vm.hostname}\"\r\nend", "def set_domain\n\t\tfname= \"#{self.class.name}.#{__method__}\"\n\t\tLOG.debug(fname) {\">>>>\"}\n\t\tif params[:domain]\n\t\t\tsession[:domain] = params[:domain]\n\t\tend\n\t\tLOG.debug(fname) {\"<<<<session[:domain]=#{session[:domain]}\"}\n\tend", "def parse_url_host\n url = self.url.gsub(/^https?\\:\\/\\//, '')\n url = url.gsub(/www\\./, '') unless (url.match(/www\\./).blank? && url.gsub(/www\\./, '').match(/[A-Za-z]/))\n self.url = \"https://\" + url\n end", "def set_hostname(server)\n server.vm.provision 'shell', inline: \"hostname #{server.vm.hostname}\"\nend", "def set_default_domain\n if GlobalPreference.get(:domain).blank?\n GlobalPreference.set!(:domain, request.host_with_port)\n end\n end", "def copy_name_fields\n self.host_name = self.host.host_name rescue nil\n self.hostgroup_name = self.hostgroup.hostgroup_name rescue nil\n end", "def base_hostname\n @username.match(/.com/) ? @username : \"#{@username}.tumblr.com\"\n end", "def initialize( username, domain = nil )\n if (domain)\n @username = username\n @domain = domain\n else\n (@username, @domain) = username.split('@')\n end\n replace( \"#{@username}\\@#{@domain}\".downcase )\n end", "def initialize(username, password, adshost, domain, tld)\n @username = username\n @password = password\n @adshost = adshost\n @domain = domain\n @tld = tld\n end", "def with_hostname(hostname)\n @hostname = hostname\n self\n end", "def new_hostname\n host || incremented_hostname || local_host_name\n end", "def hostname; end", "def hostname; end", "def ssh_uri\n new_uri = @uri.host\n new_uri = \"#{@uri.user}@#{new_uri}\" if @uri.user\n\n return new_uri\n end", "def initialize(domain); @domain = domain; end", "def set_url_host \n @url_host = request.protocol() + request.host_with_port() + '/' \n end", "def set_hostname\n @hostname = Hostname.find_by_name(params[:name])\n end", "def domain\n try_opt(:domain)\n end", "def host; config[:host]; end", "def setup\r\n setup_wiki\r\n setup_host_map\r\n setup_host\r\n end", "def host=(host)\n if host != @host\n @host = host\n @net_ldap = nil\n end\n end", "def ensure_site_host_setup\n @site_host = \"localhost:3000\"\n @site_host = Rails.application.routes.default_url_options[:host] ||= @site_host\n end", "def scheme_with_host(domain=nil)\n ['http://', domain || self.name, '/'].join(\"\")\n end", "def create\n\t\t@domain = Domain.new(:hostname => params[:hostname])\n\n\t\t# Attempt to save the domain, and return the appropriate JSON or Error\n\t\trespond_to do |format|\n\t\t\tif @domain.save\n\t\t\t\tformat.json { render json: @domain, status: :created }\n\t\t\telse\n\t\t\t\tformat.json { render json: @domain.errors, status: :unprocessable_entity }\n\t\t\tend\n\t\tend\n\t\t\n\t\t# Fetch the hostname IP address and update the record in a new thread\n\t\tt1=Thread.new{fetch_origin_ip()}\n\t\tt1.join\n\tend", "def base_domain(response)\n if response.respond_to? :request\n host = response.request.host.sub /:\\d+$/, ''\n return if host =~ /^([\\d.]+|localhost)$/\n\n host =~ /([^.]*)\\.([^.]*|..\\...|...\\...|..\\....)$/\n \".#{$1}.#{$2}\"\n end\n end", "def config_ssh(username, host, options = nil)\n @ssh = SSH.new(username, host, options)\n end", "def prepare(server); end", "def ssh_string\n uri = ssh_url\n user = \"#{uri.userinfo}@\" if uri.userinfo\n port = \":#{uri.port}\" if uri.port\n [user,uri.host,port].join\n end", "def ssh_string\n uri = ssh_url\n user = \"#{uri.userinfo}@\" if uri.userinfo\n port = \":#{uri.port}\" if uri.port\n [user,uri.host,port].join\n end", "def ssh_string\n uri = ssh_url\n user = \"#{uri.userinfo}@\" if uri.userinfo\n port = \":#{uri.port}\" if uri.port\n [user,uri.host,port].join\n end", "def fqdn\n ssh.exec!(\"hostname --fqdn\").chomp\n end", "def tls_hostname=(_arg0); end", "def canonical_instance_host(opennebula_instance)\n fail 'Instance object not provided!' unless opennebula_instance\n hosts = []\n\n opennebula_instance.each('HISTORY_RECORDS/HISTORY') { |history| hosts << history['HOSTNAME'] }\n hosts.compact!\n\n Egi::Fedcloud::Vmhound::Log.debug \"[#{self.class}] Assigning hosts #{hosts.inspect} \" \\\n \"to #{opennebula_instance['ID'].inspect}\"\n hosts.last\n end", "def parse_user_domain(hostname)\n return hostname.split('.').first if Rails.configuration.url_host.empty?\n Rails.configuration.url_host.split(',').each do |url_host|\n return hostname.chomp(url_host).chomp('.') if hostname.include?(url_host)\n end\n ''\n end", "def hosts=(_arg0); end", "def hosts=(_arg0); end", "def initialize(domain)\n\t\t\t@domain = domain\n\t\t\t@sdb = AwsSdb::Service.new(:logger=>LogDuck.new)\n\t\t\tcreate_domain unless domain_exist?\n\t\tend", "def prepare_env(load_stage = stage)\n \n load_stage = load_stage.to_s\n \n if !db_config \n Wpcap::Utility.error(\"No Database Configurations Found\")\n abort \n end\n \n if remote_config(:db_priv_pass).nil?\n Wpcap::Utility.error \"This no privileged user for this server found in servers ssh environment profile (did you set it up with wpcap?)\" \n abort\n end\n \n set(:local_dump) { \"/tmp/#{application}.sql.bz2\" }\n \n if db_config[load_stage]\n \n set(:db_priv_user) { remote_config(:db_priv_user).nil? ? db_config[load_stage][\"username\"] : remote_config(:db_priv_user) }\n set(:db_priv_pass) { remote_config(:db_priv_pass).nil? ? db_config[load_stage][\"password\"] : remote_config(:db_priv_pass) }\n set(:db_host) { db_config[load_stage][\"host\"] }\n set(:db_database) { db_config[load_stage][\"database\"] }\n set(:db_username) { db_config[load_stage][\"username\"] }\n set(:db_password) { db_config[load_stage][\"password\"] }\n set(:db_encoding) { db_config[load_stage][\"encoding\"] }\n set(:db_prefix) { db_config[load_stage][\"prefix\"] } \n \n else\n\n set :db_priv_user , remote_config(:db_priv_user) \n set :db_priv_pass , remote_config(:db_priv_pass) \n set :db_username , \"#{application.split(\".\").first}_#{stage}\"\n set :db_database , \"#{application.split(\".\").first}_#{stage}\"\n set :db_password , random_password(16)\n set :db_prefix , \"wp_\" || db_config[\"development\"][\"prefix\"]\n \n run \"mkdir -p #{shared_path}/config\"\n template \"mysql.yml.erb\", \"#{shared_path}/config/database.yml\"\n server_yaml = capture \"cat #{shared_path}/config/database.yml\"\n server_mysql_config_yaml = YAML.load(server_yaml)\n update_db_config(server_mysql_config_yaml)\n db_config(true)\n end\n \n end", "def signup_domain\n 'signup.' + base_domain\n end", "def ssh_host_name( host )\n # This is included here for expected Space-wide policy settings.\n host[ :internet_name ] || host[ :internet_ip ] || host.name\n end", "def setup\n # Settings, common_codes, entities, menus, terminologies\n return if self.exist_initial_data\n debug_print \"Progressing setup initial data ...\"\n sys_dom = Domain.system_domain\n\n debug_print \"Finding initial data from System Domain ...\"\n ori_codes = CommonCode.where(\"domain_id = #{sys_dom.id} and parent_id is null\")\n ori_entities = Entity.where(\"domain_id = #{sys_dom.id}\")\n ori_settings = Setting.where(\"domain_id = #{sys_dom.id}\")\n\n debug_print \"Copying initial data to Domain (#{self.name})...\"\n self.clone_code_to_domain(ori_codes)\n self.clone_entity_to_domain(ori_entities)\n self.clone_to_domain(ori_settings, Setting)\n\n debug_print \"Completed setup initial data ...\"\n end", "def domain\n domain = request.host\n domain << \":#{request.port}\" unless [80, 443].include?(request.port)\n domain\n end", "def create_domain\n unless domain_exists?\n require 'chef/win32/version'\n version = Chef::ReservedNames::Win32::Version.new\n\n Chef::Log.info(\"Configuring network interface settings and creating domain\")\n if version.windows_server_2012?\n code =<<-EOH\n#{network_interface_code}\n\t\t\t\t$DCPromoFile = @\"\n\t\t\t\t[DCINSTALL]\n\t\t\t\tInstallDNS=yes\n\t\t\t\tNewDomain=forest\n\t\t\t\tNewDomainDNSName=#{new_resource.dns_name}\n\t\t\t\tDomainNetBiosName=#{new_resource.netbios_name}\n\t\t\t\tSiteName=#{new_resource.site_name}\n\t\t\t\tReplicaorNewDomain=domain\n\t\t\t\tForestLevel=5\n\t\t\t\tDomainLevel=5\n\t\t\t\tConfirmGC=Yes\n\t\t\t\tSafeModeAdminPassword=\"#{new_resource.restore_mode_password}\"\n\t\t\t\tRebootonCompletion=Yes\n\t\t\t\t\"@\n\t\t\t\t$DCPromoFile | out-file c:/dcpromoanswerfile.txt -Force\n\t\t\t\tdcpromo.exe /unattend:c:/dcpromoanswerfile.txt\n EOH\n elsif version.windows_server_2012_r2?\n code =<<-EOH\n#{network_interface_code}\n\t\t\t\tInstall-ADDSForest -DomainName #{new_resource.dns_name} -SafeModeAdministratorPassword (convertto-securestring '#{new_resource.restore_mode_password}' -asplaintext -force) -DomainMode Win2012R2 -DomainNetbiosName #{new_resource.netbios_name} -ForestMode Win2012R2 -Confirm:$false -Force\n\t\t\t\tStop-Process -ProcessName sshd -force -ErrorAction SilentlyContinue\n EOH\n # cmd = powershell_out(\"Install-ADDSForest -DomainName #{new_resource.dns_name} -SafeModeAdministratorPassword (convertto-securestring '#{new_resource.restore_mode_password}' -asplaintext -force) -DomainMode Win2012R2 -DomainNetbiosName #{new_resource.netbios_name} -ForestMode Win2012R2 -Confirm:$false -Force\")\n end\n Chef::Log.info(\"Creating Active Directory Domain #{new_resource.dns_name}\")\n cmd = powershell_out(code)\n kill_ssh\n Chef::Application.fatal!(\"Failed to create Active Directory Domain #{new_resource.dns_name}\") if cmd.exitstatus != 0\n reboot \"Active Directory Domain #{new_resource.dns_name} created\" do\n action :reboot_now\n reason \"Active Directory Domain #{new_resource.dns_name} created\"\n end\n end\nend", "def setup_dns(node)\n # Set up /etc/hosts\n node.vm.provision \"setup-hosts\", :type => \"shell\", :path => \"ubuntu/vagrant/setup-hosts.sh\" do |s|\n s.args = [\"enp0s8\", node.vm.hostname]\n end\n # Set up DNS resolution\n node.vm.provision \"setup-dns\", type: \"shell\", :path => \"ubuntu/update-dns.sh\"\nend", "def swap_session_domain\n session_options[:original_session_domain] = session_options[:session_domain] || session_options[:domain]\n session_options[:session_domain] = session_options[:domain] = parse_session_domain unless request.forwarded_hosts.empty?\n begin\n yield\n ensure\n session_options[:session_domain] = session_options[:domain] = session_options[:original_session_domain]\n end\n end", "def host; end", "def host; end", "def host; end", "def host; end" ]
[ "0.60419273", "0.59539175", "0.5933824", "0.59235036", "0.592104", "0.5908033", "0.5894757", "0.5872454", "0.5823063", "0.5801983", "0.578535", "0.57543963", "0.5749146", "0.57369584", "0.56678265", "0.56541187", "0.5648907", "0.5644008", "0.5643464", "0.5642489", "0.5607054", "0.5607054", "0.56065613", "0.55919194", "0.5591422", "0.55882794", "0.5587124", "0.55685705", "0.5568232", "0.556764", "0.55676234", "0.5551647", "0.5547221", "0.5531058", "0.5530908", "0.54965144", "0.54965144", "0.5487653", "0.5487653", "0.54817617", "0.5480481", "0.547722", "0.54727286", "0.546718", "0.5462257", "0.5458794", "0.5448389", "0.54373556", "0.54362303", "0.54356885", "0.5434249", "0.5414958", "0.5413656", "0.54066074", "0.5396929", "0.5387787", "0.5381359", "0.53752595", "0.5375113", "0.5362163", "0.5347137", "0.533838", "0.53139853", "0.53139853", "0.5305277", "0.53033984", "0.5302001", "0.52929217", "0.5288545", "0.5280278", "0.52755916", "0.526782", "0.5265915", "0.5256672", "0.5252107", "0.5243415", "0.5233909", "0.52315915", "0.5224869", "0.5224869", "0.5224869", "0.52201366", "0.5218539", "0.5210681", "0.5196698", "0.518521", "0.518521", "0.51824504", "0.5179569", "0.51756394", "0.5167596", "0.516048", "0.51600873", "0.51590407", "0.5146971", "0.5146518", "0.5142175", "0.5142175", "0.5142175", "0.5142175" ]
0.64611936
0
Create the domain, and all its dependencies
def destroy_domain(domain) debug("Destroying domain #{domain.id}") domain.halt if domain.active debug("Removing volumes for domain #{domain.id}") volume_cleanup(domain) domain.destroy end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_agent_domain\n return false unless validate_params\n puts '########## CREATING DOMAIN ##########'\n dme.create_domain(params[:domain])\n puts '########## CREATING DEFAULT RECORDS ##########'\n create_default_records\n puts '########## CREATING ADDITIONAL RECORDS ##########'\n create_additional_records\n puts '########## RENDERING DATA TO CLIENT##########'\n show_domain\n end", "def new\n @domain = Domain.new\n end", "def create_domain\n debug(\"Creating domain #{domain_name}\")\n debug(\"Using options: #{domain_options}\")\n domain = client.servers.create(domain_options)\n prepare_domain(domain)\n domain\n end", "def new_domain(domain)\n d = Domain.new\n d.name = domain\n d.type = \"NATIVE\"\n\n d.save\n return d\n end", "def create\n # create registrant -> create order -> create domain\n @order = Order.create(user_id:current_user.id)\n @order.save\n\n if @order.save\n puts \"order created with id #{@order.id}\"\n else\n puts 'order creation failed'\n end\n\n @domain = current_user.domains.new(domain_params)\n\n @domain.user_id = current_user.id\n @domain.registrant_id = Registrant.last.id\n @domain.order_id = @order.id\n\n # create an order first\n respond_to do |format|\n if @domain.save\n format.html { redirect_to @domain, notice: 'Domain was successfully created.' }\n format.json { render :show, status: :created, location: @domain }\n else\n format.html { render :new }\n format.json { render json: @domain.errors, status: :unprocessable_entity }\n end\n end\n end", "def start\n\n\t\tdebug \"Starting domain %s\" % [resource[:name]]\n\n\t\tif exists? && status != \"running\"\n\t\t\tdom.create # Start the domain\n\t\telsif status == \"absent\"\n\t\t\tinstall\n\t\tend\n\n\tend", "def create_types\n\t[Domain]\nend", "def create_domain(domain)\n raise MogileFS::ReadOnlyError if readonly?\n res = @backend.create_domain :domain => domain\n res ? res['domain'] : nil\n end", "def start\n\n\t\tdebug \"Starting domain %s\" % [resource[:name]]\n\n\t\tif exists? && status == \"stopped\"\n\t\t\tdom.create # Start the domain\n\t\telse\n\t\t\tinstall\n\t\tend\n\n\tend", "def build(domain_name)\n domain = Domain.new(:name => domain_name,\n :ttl => self.ttl,\n :authority_type => Domain::MASTER)\n\n record_templates.dup.each do |template|\n record = template.build(domain_name)\n\n domain.records << record\n domain.soa_record = record if record.is_a?(SOA)\n end\n\n domain\n end", "def create\n @domain = Domain.new(domain_params)\n\n if @domain.save\n render json: @domain, status: :created, location: @domain\n else\n render json: @domain.errors, status: :unprocessable_entity\n end\n end", "def domain_create(args)\n response = send_request(domain_create_xml(args))\n\n get_result(:xml => response, :callback => :domain_create_process)\n end", "def setup\n # Settings, common_codes, entities, menus, terminologies\n return if self.exist_initial_data\n debug_print \"Progressing setup initial data ...\"\n sys_dom = Domain.system_domain\n\n debug_print \"Finding initial data from System Domain ...\"\n ori_codes = CommonCode.where(\"domain_id = #{sys_dom.id} and parent_id is null\")\n ori_entities = Entity.where(\"domain_id = #{sys_dom.id}\")\n ori_settings = Setting.where(\"domain_id = #{sys_dom.id}\")\n\n debug_print \"Copying initial data to Domain (#{self.name})...\"\n self.clone_code_to_domain(ori_codes)\n self.clone_entity_to_domain(ori_entities)\n self.clone_to_domain(ori_settings, Setting)\n\n debug_print \"Completed setup initial data ...\"\n end", "def create_domain(domain_name)\n Adapter.create_domain(domain_name)\n self[domain_name]\n end", "def create\n @domain = DOMAIN.new(params[:domain])\n @domain.current_user = current_user\n\n respond_to do |format|\n if @domain.save\n flash[:notice] = 'Domain was successfully created.'\n format.html { redirect_to(domain_url(@domain.id)) }\n format.xml { render :xml => @domain, :status => :created, :location => domain_url(@domain.id) + \".xml\" }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @domain.errors, :status => :unprocessable_entity }\n end\n end\n end", "def domain_create(domain, fields)\n unless ([ :period, :registrant, :admin, :tech, :billing, :nservers ] - fields.keys).empty?\n raise ArgumentError, \"Required fields not found\"\n end\n query :domain_register, {\n domain: domain,\n period: (fields[:period] * 12),\n owner_c: fields[:registrant],\n admin_c: fields[:admin],\n tech_c: fields[:tech],\n billing_c: fields[:billing],\n ns_list: fields[:nservers].join(':')\n }\n end", "def create_dependancies\n create_course_plan()\n end", "def run\n\tsuper\n \n ## Read the TLD List and pre-process it to remove crud\n f = File.open Rails.root + \"data/tld.list\"\n tld_list_with_comments = f.read\n tld_list = []\n tld_list_with_comments.each_line {|line| tld_list << line unless line =~ /^\\/\\// }\n tld_list.each {|tld| tld.chomp!}\n \n tld_list = [\"com\",\"net\", \"org\"]\n \n ###\n # Organization\n ###\n \n\tif @object.kind_of? Organization\n\t\tif @object.name\n\t\t tld_list.each do |tld|\n \n\t\t\tbegin\n\t\t\t\t domain = \"#{@object.name}.#{tld}\"\t\t\n\t\t\t\t\tresolved_address = Resolv.new.getaddress(domain)\n\t\t\t\n\t\t\t\t\tif resolved_address\n puts \"Resolved Address: #{resolved_address}\"\n create_object Device, :ip_address => resolved_address, \n :name => domain, \n :organization => @object\n create_object Domain, :name => domain, \n :organization => @object\n end\n \n \t rescue Exception => e\n\t\t\t puts e\n\t\t\t end\n\t\t end\n \n\t\telse\n\t\t\tputs \"Error, object has no name to look up!\"\n\t\tend\n\tend\n\n ###\n # Domain!\n ### \n\n\tif @object.kind_of? Domain\n\t\tif @object.name\n\t\t tld_list.each do |tld|\n\n\t\t\tbegin\n\t\t\t\t domain = \"#{@object.name}.#{tld}\"\t\t\n\t\t\t\t\tresolved_address = Resolv.new.getaddress(domain)\n\n\t\t\t\t\tif resolved_address\n create_object Device, :ip_address => resolved_address, \n :name => domain, \n :domain_id => @object.id,\n :organization_id => @object.organization.id\n end\n\n \t rescue Exception => e\n\t\t\t puts e\n\t\t\t end\n\t\t end\n\n\t\telse\n\t\t\traise \"Error, object has no name to look up!\"\n\t\tend\n\t\n\t\tnil\n\tend\nend", "def create\n Puppet.debug \"starting create #{self.class.to_s}\"\n dns_service = get_dns_service(get_fqdn)\n dns_service.create_record(get_fqdn, get_type, get_ip) if dns_service != nil\n Puppet.debug \"done with create #{self.class.to_s}\"\n end", "def initialize(domain); @domain = domain; end", "def create\n authorize! :create, BudgetDomain\n service = BudgetDomainCreator.call(create_params)\n @budget_domain = service.budget_domain\n if service.status == :ok\n redirect_to @budget_domain, notice: 'Budget domain was successfully created.'\n else\n render :new\n end\n end", "def create_domain(opts = {})\n data, _status_code, _headers = create_domain_with_http_info(opts)\n data\n end", "def create\n create_checkpoints\n create_config_base\n generate_deploy_files\n generate_hiera_template\n end", "def initialize(domain)\n\t\t\t@domain = domain\n\t\t\t@sdb = AwsSdb::Service.new(:logger=>LogDuck.new)\n\t\t\tcreate_domain unless domain_exist?\n\t\tend", "def create\n\t\t@domain = Domain.new(:hostname => params[:hostname])\n\n\t\t# Attempt to save the domain, and return the appropriate JSON or Error\n\t\trespond_to do |format|\n\t\t\tif @domain.save\n\t\t\t\tformat.json { render json: @domain, status: :created }\n\t\t\telse\n\t\t\t\tformat.json { render json: @domain.errors, status: :unprocessable_entity }\n\t\t\tend\n\t\tend\n\t\t\n\t\t# Fetch the hostname IP address and update the record in a new thread\n\t\tt1=Thread.new{fetch_origin_ip()}\n\t\tt1.join\n\tend", "def create_domain_with_http_info(create_domain_options, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: DomainControllerApi.create_domain ...'\n end\n # verify the required parameter 'create_domain_options' is set\n if @api_client.config.client_side_validation && create_domain_options.nil?\n fail ArgumentError, \"Missing the required parameter 'create_domain_options' when calling DomainControllerApi.create_domain\"\n end\n # resource path\n local_var_path = '/domains'\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['*/*'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:body] || @api_client.object_to_http_body(create_domain_options) \n\n # return_type\n return_type = opts[:return_type] || 'DomainDto' \n\n # auth_names\n auth_names = opts[:auth_names] || ['API_KEY']\n\n new_options = opts.merge(\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: DomainControllerApi#create_domain\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def new_spec\n Libvirt::Spec::Domain.new\n end", "def new_spec\n Libvirt::Spec::Domain.new\n end", "def create\n sys_bds_array = BridgeDomain.bd_ids_to_array(system_bridge_domain)\n inp_bds_array = BridgeDomain.bd_ids_to_array(@bd_ids)\n if (inp_bds_array - sys_bds_array).any?\n add_bds = BridgeDomain.bd_list_to_string(inp_bds_array - sys_bds_array)\n config_set('bridge_domain', 'system_bridge_domain', oper: 'add',\n bd: add_bds)\n end\n config_set('bridge_domain', 'create', bd: @bd_ids)\n end", "def create_domain\n unless domain_exists?\n require 'chef/win32/version'\n version = Chef::ReservedNames::Win32::Version.new\n\n Chef::Log.info(\"Configuring network interface settings and creating domain\")\n if version.windows_server_2012?\n code =<<-EOH\n#{network_interface_code}\n\t\t\t\t$DCPromoFile = @\"\n\t\t\t\t[DCINSTALL]\n\t\t\t\tInstallDNS=yes\n\t\t\t\tNewDomain=forest\n\t\t\t\tNewDomainDNSName=#{new_resource.dns_name}\n\t\t\t\tDomainNetBiosName=#{new_resource.netbios_name}\n\t\t\t\tSiteName=#{new_resource.site_name}\n\t\t\t\tReplicaorNewDomain=domain\n\t\t\t\tForestLevel=5\n\t\t\t\tDomainLevel=5\n\t\t\t\tConfirmGC=Yes\n\t\t\t\tSafeModeAdminPassword=\"#{new_resource.restore_mode_password}\"\n\t\t\t\tRebootonCompletion=Yes\n\t\t\t\t\"@\n\t\t\t\t$DCPromoFile | out-file c:/dcpromoanswerfile.txt -Force\n\t\t\t\tdcpromo.exe /unattend:c:/dcpromoanswerfile.txt\n EOH\n elsif version.windows_server_2012_r2?\n code =<<-EOH\n#{network_interface_code}\n\t\t\t\tInstall-ADDSForest -DomainName #{new_resource.dns_name} -SafeModeAdministratorPassword (convertto-securestring '#{new_resource.restore_mode_password}' -asplaintext -force) -DomainMode Win2012R2 -DomainNetbiosName #{new_resource.netbios_name} -ForestMode Win2012R2 -Confirm:$false -Force\n\t\t\t\tStop-Process -ProcessName sshd -force -ErrorAction SilentlyContinue\n EOH\n # cmd = powershell_out(\"Install-ADDSForest -DomainName #{new_resource.dns_name} -SafeModeAdministratorPassword (convertto-securestring '#{new_resource.restore_mode_password}' -asplaintext -force) -DomainMode Win2012R2 -DomainNetbiosName #{new_resource.netbios_name} -ForestMode Win2012R2 -Confirm:$false -Force\")\n end\n Chef::Log.info(\"Creating Active Directory Domain #{new_resource.dns_name}\")\n cmd = powershell_out(code)\n kill_ssh\n Chef::Application.fatal!(\"Failed to create Active Directory Domain #{new_resource.dns_name}\") if cmd.exitstatus != 0\n reboot \"Active Directory Domain #{new_resource.dns_name} created\" do\n action :reboot_now\n reason \"Active Directory Domain #{new_resource.dns_name} created\"\n end\n end\nend", "def build\n raise FedoraMigrate::Errors::MigrationError, \"No qualified targets found in #{source.pid}\" if target.nil?\n\n # create target, and apply depositor metadata\n obj = target.new\n\n obj.apply_depositor_metadata @depositor_utln\n obj.visibility = Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC\n\n user = User.find_by_user_key(@depositor_utln)\n# CurationConcerns::Workflow::ActivateObject.call(target: obj, comment: 'activate object', user: user)\n\n create_and_add_payload(obj, @payload_primary, @depositor_utln)\n\n #deal with 2 primary datastream objects, storing second object in a new file set\n create_and_add_payload(obj, @payload_secondary, @depositor_utln) unless @payload_secondary.nil?\n\n #handle a case of bad hand created data on old records\n create_and_add_payload(obj, \"ARCHIVAL_SOUND\", @depositor_utln) if @payload_primary == \"ARCHIVAL_WAV\"\n\n # back up old data\n #create_and_add_fcrepo3_set obj\n\n process_desc_metadata obj\n process_admin_metadata obj\n process_technical_metadata obj\n process_relsext_metadata obj\n\n# obj.save\n\n process_collection_metadata obj\n\n active_workflow = Sipity::Workflow.find(2)\n Sipity::Entity.create!(proxy_for_global_id: obj.to_global_id.to_s,\n workflow: active_workflow,\n workflow_state: nil)\n\n obj\n end", "def test_service_creation\n JavaBuildpack::Container::Payara::ServiceBindingsHandler.create_service_definitions_from_file_set(\n @payara_complete_domain_configs_yml,\n @config_cache_root,\n @payara_complete_domain_configs_props)\n JavaBuildpack::Container::Payara::ServiceBindingsHandler.create_service_definitions_from_bindings(\n @app_services_config,\n @payara_complete_domain_configs_props)\n\n log('Done generating Domain Configuration Property file for WLST: '\\\n \"#{@payara_complete_domain_configs_props}\")\n log('--------------------------------------')\n end", "def create\n @domain = Domain.new(domain_params.merge({:user_id => current_user.id}))\n respond_to do |format|\n if @domain.save\n format.html { redirect_to @domain, notice: 'Domain was successfully created.' }\n format.json { render action: 'show', status: :created, location: @domain }\n else\n format.html { render action: 'new' }\n format.json { render json: @domain.errors, status: :unprocessable_entity }\n end\n end\n end", "def create(name, org_guid, domain_wildcard, space_guid = nil)\n domain_exist = @client.domains.find { |domain|\n domain.name == name }\n\n org = @client.organization(org_guid)\n\n # if domain doesn't exist will create it and put the organization as owning org\n if (domain_exist == nil)\n domain = @client.domain\n domain.owning_organization = org\n domain.name = name\n domain.wildcard = domain_wildcard\n domain.create!\n else\n domain = domain_exist\n existing_org_domains = org.domains\n\n # if domain exist will check that the org and domain exists, if not will add it\n if (domain.owning_organization != org)\n raise CFoundry::DomainInvalid, \"Domain already exists\"\n elsif (!existing_org_domains.include?(domain))\n existing_org_domains << domain\n org.domains = existing_org_domains\n\n org.update!\n end\n end\n\n # if a space guid is provided will add a connection between domain and space\n if space_guid != nil\n space = @client.space(space_guid)\n existing_space_domains = space.domains\n if (!existing_space_domains.include?(domain))\n existing_space_domains << domain\n space.domains = existing_space_domains\n\n space.update!\n end\n end\n end", "def create_dependencies\n create_course_bin()\n\n #create 8 default semesters\n Semester.create_semesters(start_sem, start_year.to_i, 8) {|semester| semesters.concat semester} \n end", "def create_dependencies(con)\n @resolving_dependencies = true\n dependencies.each do |_, d|\n fail CircularDependencyError.new(name, d.name) if d.resolving_dependencies\n d.create_dependencies(con)\n d.create_or_update!(con)\n end\n @resolving_dependencies = false\n end", "def create\n @crm_domain = Crm::Domain.new(crm_domain_params)\n\n respond_to do |format|\n if @crm_domain.save\n format.html { redirect_to @crm_domain, notice: 'Domain was successfully created.' }\n format.json { render :show, status: :created, location: @crm_domain }\n else\n format.html { render :new }\n format.json { render json: @crm_domain.errors, status: :unprocessable_entity }\n end\n end\n end", "def setup_dns(domain)\n# TODO should we just use the ID instead of the full href?\n [email protected]\n @dns = SharedDns.new(domain)\n raise \"Unable to reserve DNS\" unless @dns.reserve_dns(owner)\n @dns.set_dns_inputs(@deployment)\n end", "def setup\n @subject = Fog::Compute[:google].servers\n @factory = ServersFactory.new(namespaced_name)\n @servers = ServersFactory.new(namespaced_name)\n @disks = DisksFactory.new(namespaced_name)\n end", "def create(domain)\n \n r = whoisgem(domain) # returns the ruby whois response aka the Whois::Record object\n \n # preparing the contact objects\n registrant = Contact.new(r.registrant_contact)\n admin = Contact.new(r.admin_contact)\n tech = Contact.new(r.technical_contact)\n \n # preparing the nameservers\n dns = Nameservers.new(r.nameservers)\n \n # building the record that will be inserted in couch\n #@_id = domain\n @domain_id = r.domain_id\n @domain_name = r.domain\n @status = r.status\n @available = r.available?\n @registered = r.registered?\n @created_on = r.created_on\n @updated_on = r.updated_on\n @expires_on = r.expires_on\n @last_update = r.last_update\n @registrar = Registrar.new(r.registrar.id,\n r.registrar.name,\n r.registrar.organization)\n @registrant = registrant\n @admin = admin\n @technical = tech\n @nameservers = dns\n \n # not implemented yet\n @watchlist = nil\n \n CouchPotato.database.save_document! self\n \n # the boolean is an indicator wether the insertion succeeded or not\n # return boolean\n end", "def standard_domain_setup(domain)\n set_appserver_domain_template(domain)\n set_user_prefs_dir(domain)\n set_tmpdir(domain)\n disable_update_tool(domain)\n enable_implicit_cdi(domain)\n setup_default_admin(domain)\n add_default_file_realm(domain, 'file')\n set_default_auth_realm(domain, 'file')\n disable_classloading_delegation(domain)\n disable_autodeploy(domain)\n disable_dynamic_reload(domain)\n disable_non_portable_jndi_names(domain)\n domain.ports << 8080\n end", "def create\n in_directory do\n raise(\"Please use a specific Condom class, not Base.\")\n end\n end", "def setup_dns(domain)\n # TODO should we just use the ID instead of the full href?\n [email protected]\n @dns = SharedDns.new(domain)\n raise \"Unable to reserve DNS\" unless @dns.reserve_dns(owner)\n @dns.set_dns_inputs(@deployment)\n end", "def setup_dns(domain)\n # TODO should we just use the ID instead of the full href?\n [email protected]\n @dns = SharedDns.new(domain)\n raise \"Unable to reserve DNS\" unless @dns.reserve_dns(owner)\n @dns.set_dns_inputs(@deployment)\n end", "def check_and_create_third_domain(metadata)\n if (metadata.length == 13)\n @domain_z = DataDomain.new(metadata[9], metadata[10], \\\n metadata[11], metadata[12])\n else\n @domain_z = nil\n end\n end", "def register domain_model_instnace\n\t\t@domain_model_instnaces.push domain_model_instnace\n\tend", "def create\n # TODO: refactor models so that externaluserdomain is in portal namespace? \n # @external_user_domain = Portal::ExternalUserDomain.new(params[:external_user_domain])\n @external_user_domain = ExternalUserDomain.new(params[:external_user_domain])\n respond_to do |format|\n if @external_user_domain.save\n flash[:notice] = 'Portal::ExternalUserDomain was successfully created.'\n format.html { redirect_to(@external_user_domain) }\n format.xml { render :xml => @external_user_domain, :status => :created, :location => @external_user_domain }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @external_user_domain.errors, :status => :unprocessable_entity }\n end\n end\n end", "def run\n super\n\n entity_name = _get_entity_name\n check_and_create entity_name\n\n # trello strips out periods, so handle dns records differently\n if _get_entity_type_string == \"Domain\"\n check_and_create entity_name.split(\".\").first\n check_and_create entity_name.gsub(\".\",\"\")\n end\n\n end", "def domain_create(args)\n raise ArgumentError, \"You can't create a domain with ns records, you must do an update afterwards\" if args.key?(:ns)\n raise ArgumentError, \"You can't create a domain with ds or key records, you must do an update afterwards\" if args.key?(:dsData) || args.key?(:keyData)\n super\n end", "def create\n sys_bds_array = BridgeDomainVNI.string_to_array(system_bridge_domain)\n if (@bd_ids_list - sys_bds_array).any?\n add_bds = Utils\n .array_to_str((@bd_ids_list - sys_bds_array), false)\n config_set('bridge_domain_vni', 'system_bridge_domain', oper: 'add',\n bd: add_bds)\n end\n config_set('bridge_domain_vni', 'create', bd: @bd_ids)\n end", "def initialize(name, domain)\n raise Exception.new(\"#{name} is not a valid service name\") unless valid_directory_name(name.to_s)\n \n @name = name\n @booted = false\n @domain = domain\n @full_name = \"#{domain.name}::#{@name}\"\n @path = \"#{SERVICES_PATH}/#{domain.name}/#{@name}\"\n @port_in = $port_start+=1\n @port_out = $port_start+=1\n @status = \"stopped\"\n @runtime = JJRuby.newInstance()\n \n # Create the domain directory if not present\n FileUtils.mkdir_p(@path, :mode => 0755)\n \n # And finally set the meta-data for the service\n @meta_data = ServiceMetaData.new(self)\n ContainerLogger.debug \"Service added #{domain.name}::#{name}\" \n end", "def create_domain_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: DomainApi.create_domain ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling DomainApi.create_domain\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling DomainApi.create_domain\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/domain'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['comment'] = opts[:'comment'] if !opts[:'comment'].nil?\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'DomainResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"DomainApi.create_domain\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: DomainApi#create_domain\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def build_domain(user_storage)\n Complexy::Domain::User.new(user_storage.id, user_storage.first_name, user_storage.last_name)\n end", "def initialize\n @top_level = Domains.new\n end", "def initialize\n @domains = []\n end", "def run\n super\n\n # Set the dns_record to the appropriate suffix\n dns_record = @entity.name\n\n # Handle cases of *.test.com (pretty common when grabbing\n # DNSRecords from SSLCertificates)\n if dns_record[0..1] == \"*.\"\n dns_record = dns_record[2..-1]\n end\n \n if @options[:subdomain_list]\n subdomain_list = @options['subdomain_list']\n else\n # use the deepmagic list\n subdomain_list = IO.readlines(\"#{Rails.root}/data/dns_sub.list\")\n # Add a builtin domain list \n #subdomain_list = [\"mx\", \"mx1\", \"mx2\", \"www\", \"ww2\", \"ns1\", \"ns2\", \"ns3\", \"test\", \"mail\", \"owa\", \"vpn\", \"admin\",\n # \"gateway\", \"secure\", \"admin\", \"service\", \"tools\", \"doc\", \"docs\", \"network\", \"help\", \"en\", \"sharepoint\", \"portal\",\n # \"public\", \"private\", \"pub\", \"zeus\", \"mickey\", \"time\", \"web\", \"it\", \"my\", \"photos\", \"safe\", \"download\", \"dl\", \n # \"search\", \"staging\"]\n end\n\n @task_logger.good \"Using subdomain list: #{subdomain_list}\"\n\n begin\n # Check for wildcard DNS, modify behavior appropriately. (Only create entities\n # when we know there's a new host associated)\n if Resolv.new.getaddress(\"noforkingway#{rand(100000)}.#{dns_record}\")\n wildcard_domain = true \n @task_logger.error \"WARNING! Wildcard domain detected, only saving validated domains/hosts.\"\n end\n rescue Resolv::ResolvError\n @task_logger.good \"Looks like no wildcard dns. Moving on.\"\n end\n\n subdomain_list.each do |sub|\n sub = sub.chomp\n begin\n # Calculate the domain name\n if @options[:mashed_domains]\n # blatently stolen from HDM's webinar on password stealing, try without a dot to see\n # if this domain has been hijacked by someone - great for finding phishing attempts\n domain = \"#{sub}#{dns_record}\"\n else \n domain = \"#{sub}.#{dns_record}\"\n end\n\n # Try to resolve\n resolved_address = Resolv.new.getaddress(domain)\n @task_logger.good \"Resolved Address #{resolved_address} for #{domain}\" if resolved_address\n \n # If we resolved, create the right entities\n if resolved_address\n unless wildcard_domain\n @task_logger.good \"Creating domain and host entities...\"\n # create new host and domain entitys\n d = create_entity(Entities::DnsRecord, {:name => domain })\n h = create_entity(Entities::Host, {:name => resolved_address})\n else\n # Check to make sure we don't already have this host, if we don't \n # we probably want to save the domain as a new entity (and the host)\n if Entities::Host.where(:name => resolved_address).count == 0\n d = create_entity(Entities::DnsRecord, {:name => domain })\n h = create_entity(Entities::Host, {:name => resolved_address})\n end\n end\n end\n rescue Exception => e\n @task_logger.error \"Hit exception: #{e}\"\n end\n end\n end", "def domain; end", "def domain; end", "def domain; end", "def domain; end", "def setup\n # Create a standard project (30 days)\n @Project1 = Project.new(Date.new(2000, 1, 1), Date.new(2000, 1, 10))\n # Create a standard calendar (30 days, 1 hour per day)\n @Calendar1 = {}\n 10.times do |iIdx|\n @Calendar1[Date.new(2000, 1, iIdx+1)] = 1\n end\n # Create standard resources\n @Resource1 = Resource.new('R1', @Calendar1)\n end", "def initialize(name, container)\n raise Exception.new(\"#{name} is not a valid domain name\") unless valid_directory_name(name.to_s)\n \n @name = name\n @services = Hash.new\n @container = container\n \n # Create the domain directory if not present\n FileUtils.mkdir_p(\"#{SERVICES_PATH}/#{@name}\", :mode => 0755)\n \n ContainerLogger.debug \"Domain added #{name}\"\n end", "def create_everything\n create_users\n create_user_keys\n create_comments\n create_filters\n create_columns\n create_organizations\n create_approvals\n create_whitelists\n create_user_key_columns\n create_user_key_organizations\n end", "def process_domains\n domains.each do |domain|\n params = options\n params[:host] = configuration.host\n params[:server] = servers[domain][\"server\"]\n compiler = YMDP::Compiler::Base.new(domain, git_hash, params)\n \n compiler.process_all\n end\n end", "def create(name, attributes)\n attributes = attributes.dup\n\n # Add the objectclasses\n attributes[\"objectClass\"] = objectclasses.collect { |o| o.to_s }\n attributes[\"objectClass\"] << \"top\" unless attributes[\"objectClass\"].include?(\"top\")\n\n attributes[rdn.to_s] = [name]\n\n # Generate any new values we might need.\n generate(attributes)\n\n # And create our resource.\n connect { |conn| conn.add dn(name), attributes }\n end", "def create!\n create || raise(\"Errors in the package creation\")\n end", "def setup\n @subject = Fog::Compute[:google].servers\n @factory = ServersFactory.new(namespaced_name)\n end", "def setup\n @subject = Fog::Compute[:google].servers\n @factory = ServersFactory.new(namespaced_name)\n end", "def create\n Puppet.debug \"starting create #{self.class.to_s}\"\n dns_service = get_dns_service(get_zone)\n dns_service.create_zone(get_zone, get_email, get_ttl) if dns_service != nil\n Puppet.debug \"done with create #{self.class.to_s}\"\n end", "def prepare_domain(domain)\n domain.start unless domain.active\n wait_for_ip_address(domain)\n end", "def create\n @timestamp = '%10.6f' % Time.now.to_f\n @handle = @timestamp.sub('.', '')\n\n @registrant = Registrant.new(registrant_params)\n @build_detail_temp = @registrant.build_detail(registrant_params[:detail_attributes])\n\n @registrant.handle = @handle\n\n # raise @reg_temp.inspect\n @order = Order.create(user_id:current_user.id)\n @order.save\n\n # @domain = current_user.domains.new(domain_params)\n @build_domain_temp = @registrant.build_domain(registrant_params[:domain_attributes])\n\n @build_domain_temp.user_id = current_user.id\n @build_domain_temp.registrant_id = @registrant.id\n @build_domain_temp.order_id = @order.id\n\n respond_to do |format|\n if @registrant.save\n @build_detail_temp.save\n @build_domain_temp.save\n\n format.html { redirect_to domains_path, notice: 'Registrant was successfully created.' }\n format.json { render :show, status: :created, location: @registrant }\n else\n format.html { render :new }\n format.json { render json: @registrant.errors, status: :unprocessable_entity }\n end\n end\n end", "def initialize\n @domains = {}\n content = File.read \"#{Driver.config_dir}/root_registry_sea1.json\"\n store_domain(Driver.parse_json(content)['resolveDomainResponse'], 'urn:theplatform:auth:root')\n end", "def initialize(domain)\n super()\n\n @domain = domain\n\n @f = Gem::SpecFetcher.fetcher\n\n @always_install = []\n @ignore_dependencies = false\n @ignore_installed = false\n @local = {}\n @local_source = Gem::Source::Local.new\n @remote_set = Gem::Resolver::BestSet.new\n @force = false\n @specs = {}\n end", "def create(state)\n info(\"Creating instance #{instance.name}\")\n return if state[:server_id]\n\n domain = create_domain\n state[:server_id] = domain.id\n state[:hostname] = domain.public_ip_address\n\n instance.transport.connection(state).wait_until_ready\n\n info(\"Libvirt instance #{domain.name} created.\")\n end", "def create\n @domain = Domain.new(params[:domain])\n# domain_regex = /[a-z0-9]*\\.[a-z0-9]*/\n# @domain.name = @domain.name.match(domain_regex)[0] \n respond_to do |format|\n if @domain.save\n format.html { redirect_to @domain, notice: 'Domain was successfully created.' }\n format.json { render json: @domain, status: :created, location: @domain }\n else\n format.html { render action: \"new\" }\n format.json { render json: @domain.errors, status: :unprocessable_entity }\n end\n end\n end", "def clone_to_domain(objs, resource)\n objs.each do |obj|\n hash = obj.attributes\n hash[:id] = nil\n hash[:domain_id] = self.id\n resource.create!(hash)\n end\n end", "def create_fedora_objects(attributes)\n fc = factory_class(model)\n f = fc.new(attributes, files_directory)\n f.run\n end", "def domain_class\n self.class.const_get(:DSL)\n end", "def create\n @email_domain = Email::Domain.new(params[:email_domain])\n\n respond_to do |format|\n if @email_domain.save\n format.html { redirect_to(@email_domain, :notice => 'Domain was successfully created.') }\n format.xml { render :xml => @email_domain, :status => :created, :location => @email_domain }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @email_domain.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create_schema\n Apartment::Database.create(subdomain)\n end", "def create\n create_hiera_template\n create_manifests_node\n create_node_checkpoint\n end", "def set_domain\n if check_fields_google_domain? # google domain\n @domain.push(:Google)\n elsif check_fields_nas_domain? # NAS domain\n @domain.push(:NAS)\n else \n @domain.push(:Cross)\n end\n notify \"DOMAIN : #{@domain}\"\n end", "def new\n @domain = DOMAIN.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @domain }\n end\n end", "def create\n create_directories\n end", "def domains; end", "def create_database\n DATA[:accounts].each do |account|\n CalendarCoordinator::AccountService.create(data: account).save\n end\n\n account = CalendarCoordinator::Account.first\n DATA[:calendars].each do |calendar|\n account.add_owned_calendar(calendar)\n end\n end", "def build_domain \n unless self.domain\n self.domain = URI.parse(self.url).host \n self.save\n end\n end", "def create_domain(create_domain_options, opts = {})\n data, _status_code, _headers = create_domain_with_http_info(create_domain_options, opts)\n data\n end", "def add_domain\n url = args.shift\n create_dependency(url)\n puts \"Added #{url} to the monitoring list\"\n end", "def create_class(domain, klass, mindevcount)\n modify_class(domain, klass, mindevcount, :create)\n end", "def create_company\n company = Company.create!()\n company\n end", "def validate\n @domains.each do |d|\n raise 'domain definition error' unless d.class == Domain\n end\n @domains.map(&:validate)\n end", "def setup()\n create_directories\n end", "def generate!\n controllers = (Rails.application.routes.routes.map do |route|\n controller_name = \"#{route.requirements[:controller].camelize}Controller\"\n ActiveSupport::Dependencies.ref(controller_name).get\n end).insert(0, ApplicationController).uniq\n\n valid_controllers = valid_controllers()\n valid_locales = valid_locales()\n\n controllers.each do |controller|\n controller_path = controller.controller_path\n if controller.respond_to?(:managable_content_for) && valid_controllers.include?(controller_path)\n Page.transaction do\n valid_locales.each do |locale|\n # Create Page if it does not exist yet\n page = Page.where(:controller_path => controller_path, :locale => locale).first || Page.new()\n if page.new_record?\n page.controller_path = controller_path\n page.locale = locale\n page.save!\n end\n\n # Create PageContent if it does not exist yet\n contents = (controller == ApplicationController) ? controller.managable_layout_content_for : controller.managable_content_for\n contents.each do |key|\n if page.page_contents.where(:key => key).first.nil?\n page_content = page.page_contents.build\n page_content.key = key\n page_content.save!\n end\n end\n end\n end\n end\n end\n end", "def make; end", "def test_dynamic_do_build\r\n\t\tVCR.use_cassette('dynamic_do_build') do\r\n\t\t\t# A. get digital object\r\n\t\t\tcdo = CordraRestClient::DigitalObject.find(API_URL, \"#{CORDRA_PREFIX}/B100003484\")\r\n\t\t\t# Check object id and type\r\n\t\t\tassert_equal \"#{CORDRA_PREFIX}/B100003484\", cdo.id\r\n\t\t \tassert_equal \"DigitalSpecimen\", cdo.type\r\n\t\t\t# B. get schema\r\n\t\t\t# The schema will be used to build a DO class dinamically\r\n\t\t\tdo_schema=CordraRestClient::DigitalObject.get_schema(API_URL, cdo.type.gsub(\" \",\"%20\"))\r\n\t\t\t# check that the result is saved\r\n\t\t\tassert_equal \"object\", do_schema[\"type\"]\r\n\t\t\tassert_equal \"DigitalSpecimen\", do_schema[\"title\"]\r\n\t\t\t# C. build new class using schema\r\n\t\t\tdo_properties = do_schema[\"properties\"].keys\r\n\t\t\tdo_c = CordraRestClient::DigitalObjectFactory.create_class cdo.type.gsub(\" \",\"\"), do_properties\r\n\t\t\tnew_ds = do_c.new\r\n\t\t\t# the DO contents are a hash\r\n\t\t\tassert_equal Hash, cdo.content.class\r\n\t\t\t# assing object values in content to class\r\n\t\t\tCordraRestClient::DigitalObjectFactory.assing_attributes new_ds, cdo.content\r\n\t\t\tcdo.content.each do |field, arg|\r\n\t\t\t\tinstance_var = field.gsub('/','_')\r\n\t\t\t\tinstance_var = instance_var.gsub(' ','_')\r\n\t\t\t\tassert_equal arg, new_ds.instance_variable_get(\"@#{instance_var}\")\r\n\t\t\tend\r\n\t\tend\r\n\tend", "def domain(base_name)\n Domain.new(self, base_name, @uid)\n end", "def d(*args)\n Dependency.new(*args)\n end", "def add_dependent_entities\n (NUM_DEPENDENT_FORM_ENTITIES - @resource.creators.length).times do\n @resource.creators.build\n end\n (NUM_DEPENDENT_FORM_ENTITIES - @resource.extents.length).times do\n @resource.extents.build\n end\n (NUM_DEPENDENT_FORM_ENTITIES - @resource.resource_dates.length).times do\n @resource.resource_dates.build\n end\n (NUM_DEPENDENT_FORM_ENTITIES - @resource.resource_notes.length).times do\n @resource.resource_notes.build\n end\n (NUM_DEPENDENT_FORM_ENTITIES - @resource.subjects.length).times do\n @resource.subjects.build\n end\n end", "def save\n # Get the spec, since if we undefine the domain later, we won't be\n # able to.\n definable = spec\n\n # To modify an existing domain, we actually undefine and redefine it.\n # We can't use `set_domain` here since that will clear the `domain`\n # pointer, which we need to get the proper domain spec.\n domain.undefine if domain\n\n # At this point, assuming the virtuoso settings are correct, we\n # should have a bootable VM spec, so define it and reload the VM\n # information.\n set_domain(connection.domains.define(definable))\n end", "def test_00_create\n\t\tprintTestHeader \"Test creation of Joints without JointGroups\"\n\n\t\tassert_raises(ScriptError) { ODE::Joint::new } \n\n\t\tJointClasses.each {|klass|\n\t\t\tjoint = nil\n\n\t\t\tdebugMsg \"Create: Testing the #{klass.name} class.\"\n\t\t\tassert_raises(ArgumentError) { klass.new } \n\t\t\tassert_nothing_raised { joint = klass.new(@world) }\n\t\t\tassert_instance_of( klass, joint )\n\n\t\t\tdebugMsg \"Clobbering joint\"\n\t\t\t$stderr.flush\n\t\t\tjoint = nil\n\t\t\tcollectGarbage()\n\t\t}\n\tend" ]
[ "0.6871099", "0.6570468", "0.6418222", "0.62601084", "0.624928", "0.62451583", "0.62402195", "0.62262994", "0.6110158", "0.6078027", "0.6057667", "0.6026131", "0.60088444", "0.5985398", "0.5936049", "0.59339637", "0.5931633", "0.5931287", "0.5892692", "0.5837641", "0.5825237", "0.5782683", "0.5782335", "0.57552916", "0.56846577", "0.56810343", "0.56644255", "0.56644255", "0.5636273", "0.5585748", "0.5578443", "0.5571598", "0.5571251", "0.5569867", "0.5551853", "0.55457175", "0.5537533", "0.5521153", "0.55126756", "0.5501548", "0.5494642", "0.54908764", "0.54902315", "0.54902315", "0.5478583", "0.54762137", "0.5466387", "0.5433439", "0.5424796", "0.540056", "0.53711736", "0.5369527", "0.5350711", "0.53426504", "0.5338416", "0.53234804", "0.5295796", "0.5295796", "0.5295796", "0.5295796", "0.52846247", "0.5272446", "0.5242987", "0.52422696", "0.52347904", "0.5218785", "0.521702", "0.521702", "0.52134675", "0.5201567", "0.51853114", "0.5182635", "0.5180484", "0.5177938", "0.5170737", "0.5161191", "0.515855", "0.5154863", "0.5144222", "0.51440996", "0.513878", "0.51328915", "0.5127268", "0.51242316", "0.5109783", "0.51038635", "0.51030844", "0.5100058", "0.5096278", "0.50909066", "0.5078904", "0.5078199", "0.507498", "0.50671065", "0.5064312", "0.5060981", "0.5057235", "0.5053551", "0.5049664", "0.50490147", "0.5035157" ]
0.0
-1
Create a new volume from the source image
def clone_volume(source, target) debug("Creating Libvirt volume #{target}") debug("Cloning volume from #{source}") # Attempt to locate the target or source volume source_image = client.volumes.get(source) if source_image.name =~ /^fog-\d+/ error("Could not find target image: #{source}.") end # Clone the source volume source_image.clone_volume(target) client.volumes.all.find { |vol| vol.name == target } end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def volume_create(name)\n @log.info \"Creating volume #{name} from offering id #{DISK_OFFERING}...\"\n ret = @cloud_stack.create_volume(name, ZONE, DISK_OFFERING)\n id = ret[\"createvolumeresponse\"][\"jobid\"]\n wait_for_job id\n vol_id = ret[\"createvolumeresponse\"][\"id\"]\n @log.info \"Created volume id: #{vol_id}\"\n vol_id\n end", "def volume_create_from_snap(source, name, snapshot_id)\n retries = 3\n begin \n @log.info \"Creating volume #{name} from snapshot id #{snapshot_id}...\"\n ret = @cloud_stack.create_volume(name, ZONE, nil, snapshot_id)\n id = ret[\"createvolumeresponse\"][\"jobid\"]\n wait_for_job id\n rescue Exception => e\n retries -= 1\n if retries > 0\n @log.error \"Failed. #{e.message}. Retrying...\"\n retry\n end\n raise e\n end\n vol_id = ret[\"createvolumeresponse\"][\"id\"]\n @log.info \"Created volume id: #{vol_id}\"\n vol_id\n end", "def create_volume(options)\n # Creating the volume is part of the server creation\n end", "def create_volume(options)\n # Creating the volume is part of the server creation\n end", "def create_volume(options = {})\n options[:capacity] = options[:capacity] * GB if options[:capacity] < 100000\n vol = pool.create_volume_xml(Volume.to_xml(options))\n Volume.new vol, self\n end", "def volume_from(name)\n volumes << VolumeFrom.new(name)\n end", "def create_volume(size_or_snapshot_id, options = {})\n options = {:device => '/dev/sdh'}.merge(options)\n if size_or_snapshot_id.is_a?(Numeric)\n volume = @ec2.create_volume availability_zone, :size => size_or_snapshot_id\n else\n volume = @ec2.create_volume availability_zone, :snapshot_id => size_or_snapshot_id\n end\n if options[:tags]\n @ec2.create_tags(volume.id, options[:tags])\n end\n while volume.status != 'available'\n volume.reload\n end\n if options[:device]\n attach volume, options[:device]\n end\n volume\n end", "def create_volume(availability_zone, options = {})\n raise ArgumentError.new('You must specify a size if not creating a volume from a snapshot') if options[:snapshot_id].blank? && options[:size].blank?\n\n action = 'CreateVolume'\n params = {\n 'Action' => action,\n 'AvailabilityZone' => availability_zone\n }\n params['Size'] = options[:size] unless options[:size].blank?\n params['SnapshotId'] = options[:snapshot_id] unless options[:snapshot_id].blank?\n\n response = send_query_request(params)\n parser = Awsum::Ec2::VolumeParser.new(self)\n volume = parser.parse(response.body)[0]\n if options[:tags] && options[:tags].size > 0\n create_tags volume.id, options[:tags]\n end\n volume\n end", "def create_volume( options = {} )\n options = { :availability_zone => '' }.merge(options)\n raise ArgumentError, \"No :availability_zone provided\" if options[:availability_zone].nil? || options[:availability_zone].empty?\n options = { :size => '' }.merge(options)\n options = { :snapshot_id => '' }.merge(options)\n params = {\n \"AvailabilityZone\" => options[:availability_zone],\n \"Size\" => options[:size],\n \"SnapshotId\" => options[:snapshot_id]\n }\n return response_generator(:action => \"CreateVolume\", :params => params)\n end", "def create_volume(snapshot_id, size, availability_zone, timeout, volume_type, piops)\n availability_zone ||= instance_availability_zone\n\n # Sanity checks so we don't shoot ourselves.\n raise \"Invalid volume type: #{volume_type}\" unless ['standard', 'gp2', 'io1'].include?(volume_type)\n\n # PIOPs requested. Must specify an iops param and probably won't be \"low\".\n if volume_type == 'io1'\n raise 'IOPS value not specified.' unless piops >= 100\n end\n\n # Shouldn't see non-zero piops param without appropriate type.\n if piops > 0\n raise 'IOPS param without piops volume type.' unless volume_type == 'io1'\n end\n\n create_volume_opts = { :volume_type => volume_type }\n # TODO: this may have to be casted to a string. rightaws vs aws doc discrepancy.\n create_volume_opts[:iops] = piops if volume_type == 'io1'\n\n nv = ec2.create_volume(snapshot_id, size, availability_zone, create_volume_opts)\n Chef::Log.debug(\"Created new volume #{nv[:aws_id]}#{snapshot_id ? \" based on #{snapshot_id}\" : \"\"}\")\n\n # block until created\n begin\n Timeout::timeout(timeout) do\n while true\n vol = volume_by_id(nv[:aws_id])\n if vol && vol[:aws_status] != \"deleting\"\n if [\"in-use\", \"available\"].include?(vol[:aws_status])\n Chef::Log.info(\"Volume #{nv[:aws_id]} is available\")\n break\n else\n Chef::Log.debug(\"Volume is #{vol[:aws_status]}\")\n end\n sleep 3\n else\n raise \"Volume #{nv[:aws_id]} no longer exists\"\n end\n end\n end\n rescue Timeout::Error\n raise \"Timed out waiting for volume creation after #{timeout} seconds\"\n end\n\n nv[:aws_id]\n end", "def create_volume(volume_name, config:, **kwargs)\n object = { name: volume_name }.merge(config).merge(kwargs)\n log.info \"Creating volume: #{object}\"\n volumes << object\n end", "def create_storage_volume(create_opts={})\n create_resource :storage_volume, create_opts\n end", "def create\n properties = [ resource[:name],\n resource[:user],\n resource[:group],\n resource[:config],\n resource[:mode],\n ]\n\n qmgmt(['volume', 'create'] + properties)\n end", "def volume_client_create\n options = {\n :provider => :openstack,\n :openstack_api_key => @options[:openstack_api_key],\n :openstack_username => @options[:openstack_username],\n :openstack_auth_url => @options[:openstack_auth_url],\n :openstack_tenant => @options[:openstack_tenant],\n :openstack_region => @options[:openstack_region],\n }\n @volume_client ||= Fog::Volume.new(options)\n unless @volume_client\n raise \"Unable to create OpenStack Volume instance\"\\\n \" (api_key: #{@options[:openstack_api_key]},\"\\\n \" username: #{@options[:openstack_username]},\"\\\n \" auth_url: #{@options[:openstack_auth_url]},\"\\\n \" tenant: #{@options[:openstack_tenant]})\"\n end\n end", "def volume(volume_name, attrs={}, &block)\n volumes[volume_name] ||= Ironfan::Volume.new(:parent => self, :name => volume_name)\n volumes[volume_name].configure(attrs, &block)\n volumes[volume_name]\n end", "def new_from_image value\n pixel = (Vips::Image.black(1, 1) + value).cast(format)\n image = pixel.embed 0, 0, width, height, extend: :copy\n image.copy interpretation: interpretation, xres: xres, yres: yres,\n xoffset: xoffset, yoffset: yoffset\n end", "def create_default_volume()\n # Create a default application_volume using the volume attributes from the cookbook\n create_node_volume(:application_volume)\n end", "def createvolume\n if not checkRequirements([\"thezone\",\"thevolume\"])\n return false\n end\n checkToken(@thezone)\n req = {}\n req[\"name\"] = \"oe-#{@thevolume.name}\"\n req[\"description\"] = @thevolume.description\n req[\"sizeGb\"] = @thevolume.size\n submit = queryGCE(:path => '/compute/v1beta15/projects/#{@thezone.name}/zones/#{@thevolume.azone.name}/disks', :method => 'post', :options => '', :data => req.to_json, :access_token => @thezone.toekn )\n d = checkQuery(:type => 'zone', :token => @thezone.token, :projectname => @thezone.name, :zonename => @thevolume.azone.name, :operationname => submit[\"name\"])\n data = queryGCE(:path => '/compute/v1beta15/projects/#{@thezone.name}/zones/#{@thevolume.azone.name}/disks/#{req[\"name\"]}', :method => 'get', :options => '', :access_token => @thezone.token) if d\n data ? data[\"name\"] : false\n end", "def volume(options)\n options[:width] * options[:height] * options[:depth]\nend", "def create\n\t\tregion = resource[:availability_zone].to_s.gsub(/.$/,'') \n\t\tcompute = Fog::Compute.new(:provider => 'aws', :region => \"#{region}\")\n\t\tprint \"ebsvol[aws]->create: Region is #{region}\\n\" if $debug\n\t\tprint \"ebsvol[aws]->create: Availability_zone is #{resource[:availability_zone]}\\n\" if $debug\n\t\t# create the requested volume\n\t\tresponse = compute.create_volume(resource[:availability_zone],resource[:size],resource[:snapshot])\t\n\t\tif (response.status == 200)\n\t\t\tvolumeid = response.body['volumeId']\n\t\t\tprint \"ebsvol[aws]->create: I created volume #{volumeid}.\\n\" if $debug\n\t\t\t# now tag the volume with volumename so we can identify it by name\n\t\t\t# and not the volumeid\n\t\t\tresponse = compute.create_tags(volumeid,{ :Name => resource[:volume_name] })\n\t\t\tif (response.status == 200)\n\t\t\t\tprint \"ebsvol[aws]->create: I tagged #{volumeid} with Name = #{resource[:volume_name]}\\n\" if $debug\n\t\t\tend\n\t\t\t# Check if I need to attach it to an ec2 instance.\n\t\t\tattachto = resource[:attached_to].to_s\n\t\t\tprint \"attachto is #{attachto}\\n\" if $debug\n\t\t\tif ( attachto != '' )\n\t\t\t\tif ( attachto == 'me')\n\t\t\t\t\tinstance = instanceinfo(compute,myname(compute))\n\t\t\t\telse\n\t\t\t\t\tinstance = instanceinfo(compute,attachto)\n\t\t\t\tend\n\t\t\t\tif ( resource[:device] != nil )\n\t\t\t\t\t# try to attach the volume to requested instance\n\t\t\t\t\tprint \"attach the volume\\n\" if $debug\n\t\t\t\t\tvolume = volinfo(compute,resource[:volume_name])\n\t\t\t\t\tattachvol(compute,volume,instance,resource[:device])\n\t\t\t\telse\n\t\t\t\t\traise \"ebsvol[aws]->create: Sorry, I can't attach a volume with out a device to attach to!\"\n\t\t\t\tend\n\t\t\tend\n\t\telse\n\t\t\traise \"ebsvol[aws]->create: I couldn't create the ebs volume, sorry!\"\n\t\tend\n\tend", "def volume (l, w, h)\n\tl * w * h\nend", "def addVolume(dev, size, type: \"gp2\")\n if @cloud_id.nil? or @cloud_id.empty?\n MU.log \"#{self} didn't have a cloud id, couldn't determine 'active?' status\", MU::ERR\n return true\n end\n az = nil\n MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_instances(\n instance_ids: [@cloud_id]\n ).reservations.each { |resp|\n if !resp.nil? and !resp.instances.nil?\n resp.instances.each { |instance|\n az = instance.placement.availability_zone\n instance.block_device_mappings.each { |vol|\n if vol.device_name == dev\n MU.log \"A volume #{dev} already attached to #{self}, skipping\", MU::NOTICE\n return\n end\n }\n }\n end\n }\n MU.log \"Creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n creation = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).create_volume(\n availability_zone: az,\n size: size,\n volume_type: type\n )\n begin\n sleep 3\n creation = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_volumes(volume_ids: [creation.volume_id]).volumes.first\n if ![\"creating\", \"available\"].include?(creation.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n end while creation.state != \"available\"\n\n if @deploy\n MU::MommaCat.listStandardTags.each_pair { |key, value|\n MU::MommaCat.createTag(creation.volume_id, key, value, region: @config['region'], credentials: @config['credentials'])\n }\n MU::MommaCat.createTag(creation.volume_id, \"Name\", \"#{MU.deploy_id}-#{@config[\"name\"].upcase}-#{dev.upcase}\", region: @config['region'], credentials: @config['credentials'])\n end\n\n attachment = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).attach_volume(\n device: dev,\n instance_id: @cloud_id,\n volume_id: creation.volume_id\n )\n\n begin\n sleep 3\n attachment = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_volumes(volume_ids: [attachment.volume_id]).volumes.first.attachments.first\n if ![\"attaching\", \"attached\"].include?(attachment.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n end while attachment.state != \"attached\"\n end", "def set_volume\n @volume = Volume.find(params[:id])\n end", "def set_volume\n @volume = Volume.find(params[:id])\n end", "def determine_volume\n vol = currently_attached_volume(instance_id, new_resource.device)\n vol_id = new_resource.volume_id || volume_id_in_node_data || ( vol ? vol[:aws_id] : nil )\n raise \"volume_id attribute not set and no volume id is set in the node data for this resource (which is populated by action :create) and no volume is attached at the device\" unless vol_id\n\n # check that volume exists\n vol = volume_by_id(vol_id)\n raise \"No volume with id #{vol_id} exists\" unless vol\n\n vol\n end", "def volume_calc(length,width,height)\n volume = length * width * height\n end", "def create body = {}\n @connection.request(method: :post, path: \"/volumes/create\", headers: {\"Content-Type\": \"application/json\"}, body: body.to_json)\n end", "def set_volume\n @volume = services.block_storage.get_volume(params[:id])\n end", "def create(name, image_name, client = Client.new)\n image = client.snapshots.all.find { |x| x.name == image_name }\n\n raise \"Found a droplet with name #{name} WTF\" if exist?(name, client)\n\n new(client.droplets.create(new_droplet(name, image, client)), client)\n end", "def create\n @volume = Volume.new(volume_params)\n\n respond_to do |format|\n if @volume.save\n format.html { redirect_to @volume, notice: 'Volume was successfully created.' }\n format.json { render :show, status: :created, location: @volume }\n else\n format.html { render :new }\n format.json { render json: @volume.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_lvm_volumes( opts = {} )\n opts = deep_merge_hashes( @aws_default_instance_options, opts )\n unless exist?( opts[ :lvm_volumes ].first[1] )\n create_lvm_volumes!( opts )\n end\n end", "def volume\n @length * @width * @depth\n end", "def create\n @volume = Volume.new(volume_params)\n\n respond_to do |format|\n if @volume.save\n format.html { redirect_to @volume, notice: 'Volume was successfully created.' }\n format.json { render action: 'show', status: :created, location: @volume }\n else\n format.html { render action: 'new' }\n format.json { render json: @volume.errors, status: :unprocessable_entity }\n end\n end\n end", "def create!\n set_id = generate_set_id\n\n @volume_ids.each do |id|\n snapshot = @fog.snapshots.new\n\n snapshot.description = \"#{@hostname.split(\".\")[0]} #{@mount} (#{self.needed_types.join(\", \")}) (#{set_id})\"\n snapshot.volume_id = id\n\n # Actually do the snapshot\n snapshot.save\n\n # Reload to get snapshot.id so we can add tags\n snapshot.reload\n\n @fog.tags.create(:resource_id => snapshot.id, :key => \"Host\", :value => @hostname)\n @fog.tags.create(:resource_id => snapshot.id, :key => \"Mount\", :value => @mount)\n @fog.tags.create(:resource_id => snapshot.id, :key => \"SetID\", :value => set_id)\n @fog.tags.create(:resource_id => snapshot.id, :key => \"Type\", :value => self.needed_types.join(\",\"))\n end\n end", "def createVolume\n require 'rest_client'\n require 'uri'\n\n if @role.nil? and !current_actor.superadmin\n json_response({ message: \"You don't have permission to view the clusters in this project\" }, :unauthorized)\n return\n end\n\n # Service name in the query\n volumeName = params[\"volume_name\"]\n\n # Env variables for Manager host and port\n serviceManagerHost = Settings.service_manager_host\n serviceManagerPort = Settings.service_manager_port.to_s\n serviceManagerURI = 'http://' + serviceManagerHost + ':' + serviceManagerPort + '/v1/volume'\n\n # Create request for Service Manager\n stack = {\n 'name' => volumeName,\n 'engine-url' => @cluster.endpoint,\n 'ca-cert' => @cluster.ca,\n 'cert' => @cluster.cert,\n 'cert-key' => @cluster.key\n }.to_json\n\n begin\n response = RestClient.post(\n serviceManagerURI,\n stack,\n 'Accept' => 'application/json',\n 'Content-Type' => 'application/json'\n )\n puts \"Deploy Response: \" + response\n json_response(response, :created)\n rescue Exception => e\n # If error, respond with it\n puts e\n json_response({message: e}, :unprocessable_entity)\n end\n end", "def create\n @actual_volume = ActualVolume.new(actual_volume_params)\n\n respond_to do |format|\n if @actual_volume.save\n format.html { redirect_to @actual_volume, notice: 'Actual volume was successfully created.' }\n format.json { render :show, status: :created, location: @actual_volume }\n else\n format.html { render :new }\n format.json { render json: @actual_volume.errors, status: :unprocessable_entity }\n end\n end\n end", "def find_or_create_level_source_image(level_image, level_source_id, upgrade=false)\n level_source_image = nil\n # Store the image only if the image is set, and either the image has not been\n # saved or the saved image is smaller than the provided image\n if level_image && level_source_id\n level_source_image = LevelSourceImage.find_by(level_source_id: level_source_id)\n upgradable = false\n if upgrade && level_source_image\n old_image_size = ImageSize.path(level_source_image.s3_url)\n new_image_size = ImageSize.new(Base64.decode64(level_image))\n upgradable = new_image_size.width > old_image_size.width &&\n new_image_size.height > old_image_size.height\n end\n if !level_source_image || upgradable\n level_source_image = LevelSourceImage.new(level_source_id: level_source_id)\n unless level_source_image.save_to_s3(Base64.decode64(level_image))\n level_source_image = nil\n end\n end\n end\n level_source_image\n end", "def create_image(vdu)\n\t\tname = get_resource_name\n\t\t\n\t\traise CustomException::NoExtensionError, \"#{vdu['vm_image']} does not have a file extension\" if vdu['vm_image_format'].empty?\n\t\traise CustomException::InvalidExtensionError, \"#{vdu['vm_image']} has an invalid extension. Allowed extensions: ami, ari, aki, vhd, vmdk, raw, qcow2, vdi and iso\" unless ['ami', 'ari', 'aki', 'vhd', 'vmdk', 'raw', 'qcow2', 'vdi', 'iso'].include? vdu['vm_image_format']\n\n\t\[email protected]_list << Image.new(name, vdu['vm_image_format'], vdu['vm_image'])\n\t\tname\n\tend", "def create\n @volume = Volume.new(params[:volume])\n\n respond_to do |format|\n if @volume.save\n format.html { redirect_to(@volume, :notice => 'Volume was successfully created.') }\n format.xml { render :xml => @volume, :status => :created, :location => @volume }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @volume.errors, :status => :unprocessable_entity }\n end\n end\n end", "def createEBSSnapshot(client=nil,description='',volume_id=nil)\n return false if volume_id.nil? || client.nil?\n # Fetch the Volume Name. This will be used in the description of the snapshot\n resp = client.describe_volumes({dry_run: false, volume_ids: [volume_id] })\n resp.volumes[0].tags.each do |t|\n if t.key=='Name'\n description = t.value unless t.value.empty?\n break\n end\n end\n # puts \"Taking snapshot of volume #{volume_id}...\"\n return client.create_snapshot({\n dry_run: false,\n volume_id: volume_id,\n description: description\n })\nend", "def create_volume(options={}) \n raise \"Volume nickname required\" unless options[:nickname]\n params = {:nickname => options[:nickname],:size => options[:size], :api_version => 1.0}\n params[:description] = options[:description] if options[:description]\n #STDERR.puts \"HERE IS THE URL: #{@api_url}/create_ebs_volume.js (PARAMS: #{params.inspect})\"\n body = RestClient.post @api_url+\"/create_ebs_volume.js\",params\n json = JSON.load(body)\n STDERR.puts \"CREATED_VOLUME: #{json}\"\n json\n rescue => e\n display_exception(e, \"create_volume: #{options.inspect}\")\n end", "def set_actual_volume\n @actual_volume = ActualVolume.find(params[:id])\n end", "def create_snapshot_bundle\n # we shouldn't specify -k $EC2_PRIVATE_KEY since we assume private keys are already appended to /root/.ssh/authorized_keys\n # but it's a required parameter -- doh!\n run \"#{ec2_cmd('ec2-bundle-vol')} -v #{volume_to_bundle} -d #{bundling_directory} -k $EC2_PRIVATE_KEY -u #{@ec2_user_id} -s #{volume_size}\"\n end", "def create\n tmp = Puppet::FileSystem::Uniquefile.new('quobyte_volume_config')\n tmp.write(resource[:content])\n tmp.flush()\n\n qmgmt(['volume', 'config', 'import', [resource[:name]], tmp.path])\n end", "def volume_create\n help = [\n '',\n \"Use: #{me} volume create --name=VOLUME --git=URL\",\n \"Use: #{me} volume create --name=VOLUME --dir=PATH\",\n '',\n 'Creates a volume named VOLUME from a git clone of URL',\n 'Creates a volume named VOLUME from a copy of PATH'\n ]\n # asked for help?\n if [nil,'help','--help'].include? ARGV[2]\n show help\n exit failed\n end\n # unknown arguments?\n knowns = ['name','git','dir']\n unknown = ARGV[2..-1].select do |argv|\n knowns.none? { |known| argv.start_with?('--' + known + '=') }\n end\n if unknown != []\n show help\n unknown.each { |arg| puts \"FAILED: unknown argument [#{arg.split('=')[0]}]\" }\n exit failed\n end\n # required known arguments\n args = ARGV[2..-1]\n vol = get_arg('--name', args)\n url = get_arg('--git', args)\n dir = get_arg('--dir', args)\n if vol.nil? || (url.nil? && dir.nil?)\n show help\n exit failed\n end\n if vol.length == 1\n msg = 'volume names must be at least two characters long. See https://github.com/docker/docker/issues/20122'\n puts \"FAILED: [volume create --name=#{vol}] #{msg}\"\n exit failed\n end\n if volume_exists? vol\n msg = \"#{vol} already exists\"\n puts \"FAILED: [volume create --name=#{vol}] #{msg}\"\n exit failed\n end\n # cyber-dojo.sh does actual [volume create]\nend", "def create_image(params)\n instance_id = params['InstanceId']\n instance_id = instance_id.split('-')[1]\n\n vm = VirtualMachine.new(\n VirtualMachine.build_xml(instance_id),\n @client)\n\n rc = vm.info\n if OpenNebula::is_error?(rc)\n rc.ec2_code = \"InvalidInstanceID.NotFound\"\n return rc\n end\n\n image_id = vm.disk_saveas(1,\n params[\"Name\"],\n OpenNebula::Image::IMAGE_TYPES[0])\n\n # TODO Add AMI Tags\n # TODO A new persistent image should be created for each instance\n\n if OpenNebula::is_error?(image_id)\n return image_id\n end\n\n erb_version = params['Version']\n\n response = ERB.new(File.read(@config[:views]+\"/create_image.erb\"))\n return response.result(binding), 200\n end", "def addVolume(dev, size, type: \"gp2\", delete_on_termination: false)\n\n if setDeleteOntermination(dev, delete_on_termination)\n MU.log \"A volume #{dev} already attached to #{self}, skipping\", MU::NOTICE\n return\n end\n\n MU.log \"Creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n creation = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).create_volume(\n availability_zone: cloud_desc.placement.availability_zone,\n size: size,\n volume_type: type\n )\n\n MU.retrier(wait: 3, loop_if: Proc.new {\n creation = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).describe_volumes(volume_ids: [creation.volume_id]).volumes.first\n if ![\"creating\", \"available\"].include?(creation.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n creation.state != \"available\"\n })\n\n\n if @deploy\n MU::Cloud::AWS.createStandardTags(\n creation.volume_id,\n region: @region,\n credentials: @credentials,\n optional: @config['optional_tags'],\n nametag: @mu_name+\"-\"+dev.upcase,\n othertags: @config['tags']\n )\n end\n\n MU.log \"Attaching #{creation.volume_id} as #{dev} to #{@cloud_id} in #{@region} (credentials #{@credentials})\"\n attachment = nil\n MU.retrier([Aws::EC2::Errors::IncorrectState], wait: 15, max: 4) {\n attachment = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).attach_volume(\n device: dev,\n instance_id: @cloud_id,\n volume_id: creation.volume_id\n )\n }\n\n begin\n att_resp = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).describe_volumes(volume_ids: [attachment.volume_id])\n if att_resp and att_resp.volumes and !att_resp.volumes.empty? and\n att_resp.volumes.first.attachments and\n !att_resp.volumes.first.attachments.empty?\n attachment = att_resp.volumes.first.attachments.first\n if !attachment.nil? and ![\"attaching\", \"attached\"].include?(attachment.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n end\n end while attachment.nil? or attachment.state != \"attached\"\n\n # Set delete_on_termination, which for some reason is an instance\n # attribute and not on the attachment\n setDeleteOntermination(dev, delete_on_termination)\n end", "def get_volume\n @volume\n end", "def convert_image(catalog_vapp_template, account_name)\n name = catalog_vapp_template['name']\n #much fudging ensues\n #arch = name.scan(/(36|24).bit/).first\n #k enuf o'that now!\n arch = \"n/a\" #Leaving out entirely as we don't get one from terremark (could parse but its a fudge)\n Image.new( {\n :id => catalog_vapp_template['href'].split('/').last,\n :name => catalog_vapp_template['name'],\n :architecture => arch,\n :owner_id => account_name,\n :description => catalog_vapp_template['name']\n })\n end", "def create\n @admissive_volume = AdmissiveVolume.new(params[:admissive_volume])\n\n respond_to do |format|\n if @admissive_volume.save\n format.html { redirect_to @admissive_volume, notice: 'Admissive volume was successfully created.' }\n format.json { render json: @admissive_volume, status: :created, location: @admissive_volume }\n else\n format.html { render action: \"new\" }\n format.json { render json: @admissive_volume.errors, status: :unprocessable_entity }\n end\n end\n end", "def load_volume cluster_node_id, volume_cfg\n cluster_vol_id = cluster_node_id + '-' + volume_cfg[:device]\n cluster_vol_params = {\n :cluster => self,\n :cluster_vol_id => cluster_vol_id, :cluster_node_id => cluster_node_id,\n }.merge(\n volume_cfg.slice(:mount_point, :size, :from_snapshot_id, :availability_zone, :device))\n @all_volumes[cluster_vol_id] = Volume.new(cluster_vol_params)\n end", "def restore_from_snap(last_snapshot, options = {})\n options[:device] = \"/dev/sdk\" unless options[:device]\n options[:vol_nickname] = last_snapshot[\"nickname\"] unless options[:vol_nickname]\n \n # 5 - Unmount and detach the current EBS volume (forcing to detach the device we're gonna need later for attching ours...)\n umount_and_detach_device({:device => options[:device]})\n # 6- Create the volume from the latest snapshot, attach it to the instance and then mount it\n STDERR.puts \"Creating new DB volume from snapshot #{last_snapshot['aws_id']}\"\n vol = ( options[:new_size_gb] ? create_volume_from_snap_size_gb(last_snapshot[\"aws_id\"],options[:vol_nickname],options[:new_size_gb] ) : create_volume_from_snap(last_snapshot[\"aws_id\"],options[:vol_nickname] ) )\n unless vol.nil?\n \tSTDERR.puts \"Attaching new DB volume: #{vol['aws_id']}\"\n \tatt = attach_volume(vol['aws_id'], options[:device])\n \twait_for_attachment(options[:device])\n \tFileUtils.mkdir_p self.MountPoint\n \tres = `mount -t xfs -o noatime #{options[:device]} #{self.MountPoint}`\n \traise EBSRemoteExecException.new(nil,$?,\"Error mounting newly created volume (#{vol['aws_id']}) on #{options[:device]}:\\n\"+res) if $? != 0 \n else\n\t raise \"create volume failed from snapshot\"\n end\n end", "def create options = {}\n resp = case\n when options[:instance_id]\n client.create_image(options)\n when options[:image_location] || options[:root_device_name]\n if kernel = options.delete(:kernel)\n options[:kernel_id] = kernel.id\n end\n if ramdisk = options.delete(:ramdisk)\n options[:ramdisk_id] = ramdisk.id\n end\n options[:block_device_mappings] =\n translate_block_device_mappings(options[:block_device_mappings]) if\n options[:block_device_mappings]\n client.register_image(options)\n else\n raise(ArgumentError,\n \"expected instance_id, image_location, \" +\n \"or root_device_name\")\n end\n Image.new(resp.image_id, :config => config)\n end", "def volume\n return @width * @height / @depth\n end", "def mount_kvm_volume(name)\n dev = available_dev\n enable_netblockdev(name, dev)\n vol_grp = lvm_volume_group(\n lvm_partition(dev)\n )\n root = lvm_root(vol_grp)\n lvm_enable(vol_grp) unless lvm_enabled?(root)\n mount(name, root)\n dev\nend", "def adopt_existing_volumes!\n Volume.all.each do |ec2_vol|\n next if ec2_vol.deleted? || ec2_vol.deleting?\n instance = Instance.find(ec2_vol.attached_instance_id) ; p instance ; next unless instance\n cluster_node_id = instance.get_cluster_node_id(self.name) ; next unless cluster_node_id\n cluster_vol_id = cluster_node_id + '-' + ec2_vol.device\n volume_in_cluster = @all_volumes[cluster_vol_id] ; next unless volume_in_cluster\n ec2_vol.update! volume_in_cluster.logical_attributes\n @all_volumes[cluster_vol_id] = ec2_vol\n end\n end", "def new \n @image = Image.new\n end", "def vm_build_image (image_name, template)\n output_box = template[:box]\n unless File.directory?(Packer::OUTPUT_DIR)\n mkdir(Packer::OUTPUT_DIR)\n end\n if !$dry_run then\n build_image(template)\n else\n touch output_box\n end\n\n version_entry = {\n \"version\" => \"#{template[:vars]['image_version']}\",\n \"providers\" => [{\n \"name\" => \"virtualbox\",\n \"url\" => File.absolute_path(output_box),\n \"checksum_type\" => \"sha1\",\n \"checksum\" => Digest::SHA1.file(output_box).hexdigest\n }]\n }\n\n # Insert the new version entry into its correspondent vagrant JSON file\n vagrant_json_update(image_name, version_entry)\nend", "def create_ec2_ebs_volume opts\n Ec2EbsVolume.create opts.merge :account => self\n end", "def new\n\t\t@image = Image.new\n\tend", "def new\n\t\t@image = Image.new\n\tend", "def new\n @image = Image.new\n end", "def image_copy(another_image)\n\n java_import Java::edu.stanford.cfuller.imageanalysistools.image.ImageFactory\n\n ImageFactory.create(another_image)\n\n end", "def add_image \n image = CARMD.image(self.vin)\n if image == \"\"\n image = \"http://clipart-library.com/img/2050778.png\"\n end\n self.auto_image = image \n self.save \n end", "def provision_storage host, vm\n if host['volumes']\n # Lazily create the volume client if needed\n volume_client_create\n host['volumes'].keys.each_with_index do |volume, index|\n @logger.debug \"Creating volume #{volume} for OpenStack host #{host.name}\"\n\n # The node defintion file defines volume sizes in MB (due to precedent\n # with the vagrant virtualbox implementation) however OpenStack requires\n # this translating into GB\n openstack_size = host['volumes'][volume]['size'].to_i / 1000\n\n # Create the volume and wait for it to become available\n vol = @volume_client.volumes.create(\n :size => openstack_size,\n :display_name => volume,\n :description => \"Beaker volume: host=#{host.name} volume=#{volume}\",\n )\n vol.wait_for { ready? }\n\n # Fog needs a device name to attach as, so invent one. The guest\n # doesn't pay any attention to this\n device = \"/dev/vd#{('b'.ord + index).chr}\"\n vm.attach_volume(vol.id, device)\n end\n end\n end", "def set_volume(volume)\n puts \"Setting volume to #{volume}\" if $verbose\n v='AG'+(('000'+volume.to_s)[-3..-1])+';'\n puts v if $verbose\n ret=send_cmd(v,'AG;',v,0.5,1.5,3)\n if(ret)\n return(ret.gsub(/^AG/,'').gsub(/;$/,'').to_i)\n else\n return(nil)\n end\nend", "def volume \n puts \"The volume is #{@length * @width * @height}\"\n end", "def create_volume_with_http_info(id, volume, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: VolumesApi.create_volume ...'\n end\n # verify the required parameter 'id' is set\n if @api_client.config.client_side_validation && id.nil?\n fail ArgumentError, \"Missing the required parameter 'id' when calling VolumesApi.create_volume\"\n end\n # verify the required parameter 'volume' is set\n if @api_client.config.client_side_validation && volume.nil?\n fail ArgumentError, \"Missing the required parameter 'volume' when calling VolumesApi.create_volume\"\n end\n # resource path\n local_var_path = '/projects/{id}/storage'.sub('{' + 'id' + '}', CGI.escape(id.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:debug_body] || @api_client.object_to_http_body(volume)\n\n # return_type\n return_type = opts[:debug_return_type] || 'Volume'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['x_auth_token']\n\n new_options = opts.merge(\n :operation => :\"VolumesApi.create_volume\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VolumesApi#create_volume\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def volume_up_action\n VolumeUp.new(:component_id => component_id, :call_id => call_id).tap do |s|\n s.original_component = self\n end\n end", "def volume(volume, server)\n new_volume = { server => { 'volume' => volume }}\n unless STREAM_DATA.stream_data[server].nil?\n m = STREAM_DATA.stream_data.merge!(new_volume) { |_key, left, right| left.merge!(right) }\n STREAM_DATA.add(m)\n else\n m = STREAM_DATA.stream_data.merge!(new_volume) { |_key, left, right| left.merge!(right) }\n STREAM_DATA.add(m)\n end\n end", "def volume_compatible_with_resource_definition?(volume)\n if new_resource.snapshot_id =~ /vol/\n new_resource.snapshot_id(find_snapshot_id(new_resource.snapshot_id, new_resource.most_recent_snapshot))\n end\n (new_resource.size.nil? || new_resource.size == volume[:aws_size]) &&\n (new_resource.availability_zone.nil? || new_resource.availability_zone == volume[:zone]) &&\n (new_resource.snapshot_id.nil? || new_resource.snapshot_id == volume[:snapshot_id])\n end", "def generate\n if !self.exists?\n if !self.authenticated\n raise ManagedImage::AuthenticationError, \"The ManagedImage::Variant has not been properly authenticated\"\n end\n magick_image = parent_image.magick_image\n magick_image.crop!(*magick_crop_rect)\n # Only resize if the dimensions are incorrect\n if self.width != magick_image.columns || self.height != magick_image.rows\n magick_image.resize!(self.width, self.height)\n end\n storage.create(path, magick_image.to_blob)\n end\n self\n end", "def volume(volume_id)\n volumes(volume_id)[0]\n end", "def image_creation(image_name)\n image_path = Rails.root.join('public', 'images', image_name).to_s\n uploader = Dragonfly[:images]\n uploaded_image = uploader.fetch_file(image_path)\n image = Image.create image: uploaded_image\n\n rescue Error => e\n print \"Error creating image: \" + e\nend", "def volume_up_action\n VolumeUp.new(:component_id => component_id, :target_call_id => target_call_id).tap do |s|\n s.original_component = self\n end\n end", "def add(src_image_path, image_svc_path, extra)\n set_image_svc_path(image_svc_path)\n\n begin\n # Get full path\n fullpath = File.expand_path(src_image_path)\n # Get filename\n @filename = File.basename(fullpath)\n\n logger.debug \"fullpath: #{fullpath}\"\n logger.debug \"filename: #@filename\"\n logger.debug \"mount path: #{mount_path}\"\n\n # Make sure file exists\n return cleanup([false,\"File does not exist\"]) unless File.exist?(fullpath)\n\n # Make sure it has an .iso extension\n return cleanup([false,\"File is not an ISO\"]) if @filename[-4..-1] != \".iso\"\n\n File.size(src_image_path)\n\n # Confirm a mount doesn't already exist\n unless is_mounted?(fullpath)\n unless mount(fullpath)\n logger.error \"Could not mount #{fullpath} on #{mount_path}\"\n return cleanup([false,\"Could not mount\"])\n end\n end\n\n # Determine if there is an existing image path for iso\n if is_image_path?\n ## Remove if there is\n remove_dir_completely(image_path)\n end\n\n ## Create image path\n unless create_image_path\n logger.error \"Cannot create image path: #{image_path}\"\n return cleanup([false, \"Cannot create image path: #{image_path}\"])\n end\n\n # Attempt to copy from mount path to image path\n copy_to_image_path\n\n # Verify diff between mount / image paths\n # For speed/flexibility reasons we just verify all files exists and not their contents\n @verification_hash = get_dir_hash(image_path)\n mount_hash = get_dir_hash(mount_path)\n unless mount_hash == @verification_hash\n logger.error \"Image copy failed verification: #{@verification_hash} <> #{mount_hash}\"\n return cleanup([false, \"Image copy failed verification: #{@verification_hash} <> #{mount_hash}\"])\n end\n\n rescue => e\n logger.error e.message\n return cleanup([false,e.message])\n end\n\n cleanup([true ,\"\"])\n end", "def create_volumes(volume_definitions)\n volume_definitions.each { |volume| client.volumes.create(volume) }\n end", "def add_mk(new_image, iso_path, image_path)\n new_image.add(iso_path, image_path)\n end", "def create_thumbnails()\n # {{{\n begin\n @@logger.log('IMAGE UP | Importing image')\n image_renderer = @@image_renderer.new(@media_asset)\n \n id = @media_asset.media_asset_id\n ext = @media_asset.extension.dup.downcase\n path = Aurita.project_path(:public, :assets, \"asset_#{id}.#{ext}\")\n \n @@logger.log(\"IMAGE UP | Path is #{path}\")\n # Every image needs a jpeg base image (esp. needed for PDF): \n STDERR.puts \"Importing #{path} using #{image_renderer.class.inspect}\"\n image_renderer.import(path)\n image_renderer.create_image_variants(@@variants)\n\n if ext == 'pdf' then\n image_renderer.create_pdf_preview()\n elsif @media_asset.is_video? then\n dest = Aurita.project_path(:public, :assets, \"asset_#{id}.jpg\")\n # File.open(source, 'w')\n # system \"ffmpeg -i #{path} -ar 22050 -ab 32 -acodec mp3\n # -s 480x360 -vcodec flv -r 25 -qscale 8 -f flv -y #{ dest }\"\n system(\"ffmpeg -i '#{path}' -ss 00:00:10 -vframes 1 -f image2 -vcodec mjpeg '#{dest}'\")\n ext = 'jpg'\n end\n rescue ::Exception => e\n STDERR.puts('Error when trying to create image versions: ' << e.message)\n e.backtrace.each { |m| \n STDERR.puts(m)\n }\n end\n end", "def attach_blank_volume opts\n device = opts.delete :device\n opts = {:ec2_availability_zone => ec2_availability_zone }.merge opts\n volume = account.create_ec2_ebs_volume opts\n attach_volume volume, device\n end", "def get_volume(volume_id)\n get(\"cloud-instances/#{guid}/volumes/#{volume_id}\")\n end", "def add_image\n image_url = @row[:image]\n return if image_url.blank?\n\n variant = Spree::Variant.find_by(is_master: true, product_id: @product.id)\n raise 'Product variant not found' unless variant\n\n image = Spree::Image.find_or_initialize_by viewable_id: variant.id\n return if image.id\n\n local_image = get_local_image image_url\n image.attachment = File.open(local_image, 'r')\n image.viewable_type = 'Spree::Variant'\n image.save\n end", "def create_image_with_local(name, type, container_uuid, path)\n @logger.debug(\"Creating image from file path #{path}\")\n container_id = JSON.parse(\n @client.get('v2.0', \"storage_containers/#{container_uuid}\")\n )['id'].split('::').last\n @logger.debug(\"Container ID is #{container_id}\")\n spec = { name: name, image_type: type, annotation: name }\n @logger.debug(\"Image creation specs => #{spec}\")\n task = JSON.parse(@client.post('v2.0', 'images', spec.to_json))\n task_uuid = task['task_uuid']\n image_uuid = TaskManager.wait_on_task(task_uuid, @client, @logger)\n task = JSON.parse(\n @client.put('v0.8', \"images/#{image_uuid}/upload\",\n File.open(path), nil,\n { 'X-Nutanix-Destination-Container' => container_id })\n )\n task_uuid = task['task_uuid']\n # Timeout is set to 60 minutes as image upload may take time\n TaskManager.wait_on_task(task_uuid, @client, @logger, 60 * 60)\n image_uuid\n rescue => e\n raise e\n end", "def attach_node_volume (volume_label)\n # XXX should check whether this device name is already allocated,\n # and if so throw an exception\n # Helper method, attach an arbitrary volume using an arbitrary label that must be preconfigured in nodes\n Chef::Log.info(\"In attach_node_volume with volume_label #{volume_label}\")\n mount_device = node.application_attributes[volume_label].mount_device\n volume_id = node.application_attributes[volume_label].volume_id\n\n if mount_device.nil?\n Chef::Log.fatal(\"No mount device for volume label #{volume_label}.\tMust supply a volume label configured in nodes\")\n raise\n end\n\n attach_volume(volume_label, volume_id, mount_device)\n end", "def set_bunny_volume vol\n @exch.publish(vol.to_i) \n end", "def attach_volume( options = {} )\n options = { :volume_id => '' }.merge(options)\n options = { :instance_id => '' }.merge(options)\n options = { :device => '' }.merge(options)\n raise ArgumentError, \"No :volume_id provided\" if options[:volume_id].nil? || options[:volume_id].empty?\n raise ArgumentError, \"No :instance_id provided\" if options[:instance_id].nil? || options[:instance_id].empty?\n raise ArgumentError, \"No :device provided\" if options[:device].nil? || options[:device].empty?\n\n params = {\n \"VolumeId\" => options[:volume_id],\n \"InstanceId\" => options[:instance_id],\n \"Device\" => options[:device]\n }\n return response_generator(:action => \"AttachVolume\", :params => params)\n end", "def volume_by_id(ec2,volid)\n ec2.describe_volumes.body['volumeSet'].each do |volume|\n if volume['volumeId'] == volid\n x= {\n :type => volume['volumeType'],\n :status => volume['status'],\n :az => volume['availabilityZone'],\n :size => volume['size']\n }\n #puts \"VOLINFO #{volid}\\n\"+volume.to_yaml\n x[:instance_id] = volume['attachmentSet'].first['instanceId'] if volume['attachmentSet'].length>0 && volume['attachmentSet'].first.has_key?('instanceId')\n return x\n end\n end\n return {}\nend", "def volume_mount(local_dir, container_dir)\n local_dir = File.expand_path(local_dir, reference_dir)\n volumes << VolumeMount.new(local_dir, container_dir)\n end", "def create_ebs_stripe(nickname, new_volume_size_in_gb, options = {})\n self.execute_terminate_volumes if options[:force]\n devices = @disk.generate_physical_device_names(options[:stripe])\n each_volume_size = (new_volume_size_in_gb / options[:stripe].to_f).ceil\n devices.each do |d| \n vol = self.create_volume(:nickname => \"#{nickname}-#{d}\", \n :description => \"Created by RS tools to initialize new EBS stripe volume\",\n :size => each_volume_size)\n raise vol unless vol['aws_id']\n \"Attaching new EBS volume: #{vol['aws_id']}\"\n att = self.attach_volume(vol['aws_id'], d)\n end\n devices.each {|d| self.wait_for_attachment(d) }\n\n @disk.initialize_stripe(devices)\n end", "def find_volume(name)\n Volume.new pool.lookup_volume_by_name(name), self\n rescue Libvirt::RetrieveError\n nil\n end", "def item_to_item_vol_transfer(volume:, key:, to_item:, from_item:)\n from_obj_to_obj_provenance(to_item: to_item, from_item: from_item,\n additional_relation_data: { key => volume })\n end", "def update_volume_info\n\t\t#\t\t@pool = Pool.find(self.pool_id)\n\t\t#\t\t@host = Host.find(@pool.host_id)\n\t\t#\n\t\t#\t\tconnection = ConnectionsManager.instance\n\t\t#\t\tconnection_hash = connection.get(@host.name)\n\t\t#\t\tconn = connection_hash[:conn]\n\n\t\t# get pool reference in order to get a reference to the volume\n\t\t@pool = @conn.lookup_storage_pool_by_name(@pool.name)\n\t\tvolume = @pool.lookup_volume_by_name(self.name)\n\t\tvolume_info = volume.info\n\t\t\n\t\t# add some stats to pool object\n\t\tdivide_to_gigabytes = (1024 * 1024 * 1024).to_f\n\t\tself.allocation = (volume_info.allocation.to_f / divide_to_gigabytes).to_f\n\tend", "def volume_by_id(volume_id)\n ec2.describe_volumes.find{|v| v[:aws_id] == volume_id}\n end", "def create_volume_dir(volume,olddirname)\n\t\tfname= \"#{self.class.name}.#{__method__}\"\n\t\tif !olddirname.blank? && volume.dir_name != olddirname\n\t\t\tret=nil\n\t\t\tself.errors.add :base, \"The directory of database volume can't be moved\"\n\t\telse\n\t\t\tret = vol_table_name(volume)\n\t\t\tVolumeTablesBinary.init_table(ret)\n\t\tend\n\t\tLOG.debug(fname) {\"ret=#{ret}\"}\n\t\tret\n\tend", "def create_size src,dest,params\n return nil if !File.exists?(src)\n i = Image.new(src)\n params[:to] = dest\n FileUtils.mkdir_p File.dirname(dest)\n i.out params\n @hooks.create(:original => src,\n :sized => dest,\n :not_found => params[:not_found],\n :autogenerated => params[:autogenerated],\n :width => params[:width],\n :height => params[:height]) if @hooks && !params[:skiphook]\n end", "def create_version\n v_stream= [60, \"Microsoft.Container.DataSpaces\".bytes.to_a, 1, 1, 1].flatten!.pack VERSION_PACKING\n Storage.new('Version', :data=>v_stream, :size=>v_stream.size)\n end", "def setup_lvm_on_partition(part)\n return unless part.lvm\n\n pvol = \"/dev/disk/by-partlabel/#{part.label}\"\n execute!(\"pvcreate -y #{pvol}\")\n execute!(\"vgcreate -y #{part.lvm.vg_name} #{pvol}\")\n\n # any \"open ended\" volumes (no size specified), we deal with last\n unspec_vol = nil\n\n notice(\"Creating LVM partitions\")\n part.lvm.volumes.each do |vol|\n if not vol.size_mb.is_a?(Integer)\n unspec_vol = vol\n next\n end\n\n info(\"Creating #{vol.label} volume\")\n execute!(\"lvcreate -y --name #{vol.label} --size #{vol.size_mb}MiB #{part.lvm.vg_name}\")\n next if not vol.fs\n\n create_filesystem(vol.fs, \"/dev/#{part.lvm.vg_name}/#{vol.label}\", vol.label)\n end\n\n if unspec_vol\n vol = unspec_vol\n info(\"Creating #{vol.label} volume\")\n execute!(\"lvcreate -y --name #{vol.label} -l 100%FREE #{part.lvm.vg_name}\")\n if vol.fs\n create_filesystem(vol.fs, \"/dev/#{part.lvm.vg_name}/#{vol.label}\", vol.label)\n end\n end\n end", "def test_should_create_image\n lightsabers_image = substruct_fixture_file(\"lightsabers.jpg\")\n\n an_image = Image.new\n an_image.upload = lightsabers_image\n assert an_image.save\n \n # We must erase the record and its files by hand, just calling destroy.\n assert an_image.destroy\n end", "def set_volume(volume)\n %x{#{echo()} volume #{volume} 1 > #{fifo()}} if running?\n end", "def initialize(source, opts={})\n if source.is_a?(IO)\n image = MiniMagick::Image.read(source)\n elsif source.is_a?(ActiveFedora::Datastream)\n if Image.derivable?(source.mimeType)\n image = MiniMagick::Image.read(source.read)\n else\n raise DulHydra::Error, \"Datastream content is not an image.\"\n end\n elsif source.is_a?(String)\n if source.size > 1024\n raise DulHydra::Error, \"String too large! Do not pass binary content to this method.\"\n end\n if File.exists?(source)\n content_type = MIME::Types.type_for(source).first.to_s rescue nil\n if Image.derivable?(content_type)\n image = MiniMagick::Image.open(source)\n else\n raise DulHydra::Error, \"The file is not an image or unable to determine content type.\"\n end\n else\n raise DulHydra::Error, \"File not found at path specified.\"\n end\n else\n raise DulHydra::Error, \"Not a valid source for image derivative generation.\"\n end\n if opts.has_key?(:height) || opts.has_key?(:width)\n height = opts[:height] || opts[:width]\n width = opts[:width] || opts[:height]\n size = \"#{height}x#{width}\"\n # Do not preserve aspect ratio if :height and :width are explicitly set options\n size << \"!\" if opts.has_key?(:height) && opts.has_key?(:width)\n image.resize size\n end\n image.format opts[:format] if opts[:format]\n @derivative = image\n end" ]
[ "0.6528484", "0.64485526", "0.6294666", "0.6294666", "0.6188785", "0.60870904", "0.6076734", "0.6000879", "0.5948535", "0.5922403", "0.5920687", "0.5887592", "0.58557296", "0.5841752", "0.5839586", "0.58380365", "0.5810351", "0.5785783", "0.5745057", "0.5722244", "0.56802994", "0.56772435", "0.56734234", "0.56734234", "0.5644871", "0.56350476", "0.5634535", "0.5595585", "0.5588403", "0.5584051", "0.55689126", "0.5555521", "0.5526453", "0.5496639", "0.5471502", "0.5471089", "0.54673076", "0.54665285", "0.5458927", "0.5449287", "0.54084986", "0.53963107", "0.5391784", "0.5385878", "0.5384154", "0.53723115", "0.53669614", "0.5366747", "0.5352324", "0.53506804", "0.5336198", "0.5321071", "0.53190416", "0.53097034", "0.5296959", "0.529155", "0.52659595", "0.5239108", "0.5234354", "0.5228223", "0.5228223", "0.52194035", "0.5207198", "0.52059835", "0.5197528", "0.5189183", "0.5176671", "0.51664734", "0.5157423", "0.51571685", "0.51564056", "0.5155873", "0.5144376", "0.5137243", "0.5135905", "0.51327544", "0.5128678", "0.51273847", "0.51165795", "0.5111839", "0.51042044", "0.50990087", "0.5098822", "0.5093828", "0.5084326", "0.5079235", "0.5057959", "0.5052821", "0.5049025", "0.50391614", "0.5036429", "0.50362116", "0.50198716", "0.5009863", "0.5008209", "0.5005467", "0.5002292", "0.5000306", "0.49991298", "0.4998927" ]
0.71551806
0
Create the extra disks
def create_volumes(volume_definitions) volume_definitions.each { |volume| client.volumes.create(volume) } end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def attach_disks_specs\n attach_disk_array = []\n extraconfig = []\n attach_spod_array = []\n attach_spod_disk_info = {}\n\n pos = { :ide => 0, :scsi => 0 }\n disks_each(:no_exists?) do |disk|\n disk.one_item['TYPE'] == 'CDROM' ? k = :ide : k = :scsi\n\n if disk.storpod?\n spec = calculate_add_disk_spec(disk.one_item, pos[k])\n attach_spod_array << spec\n\n controller_key = spec[:device].controllerKey\n unit_number = spec[:device].unitNumber\n\n unit_ctrl = \"#{controller_key}-#{unit_number}\"\n attach_spod_disk_info[unit_ctrl] = disk.id\n else\n aspec = calculate_add_disk_spec(disk.one_item, pos[k])\n extra_key = \"opennebula.mdisk.#{disk.one_item['DISK_ID']}\"\n extra_value = aspec[:device].key.to_s\n\n attach_disk_array << aspec\n extraconfig << { :key => extra_key, :value => extra_value }\n end\n\n pos[k]+=1\n end\n\n { :disks => attach_disk_array,\n :spods => attach_spod_array,\n :spod_info => attach_spod_disk_info,\n :extraconfig => extraconfig }\n end", "def create_iscsi_disks(vbox)\n unless controller_exists(ISCSI_NAME, 'SATA Controller')\n vbox.customize ['storagectl', :id,\n '--name', 'SATA Controller',\n '--add', 'sata']\n end\n\n (1..10).each do |i|\n id = i.to_s.rjust(2, '0')\n disk = \"./tmp/disk#{i}.vdi\"\n\n unless File.exist?(disk)\n vbox.customize ['createmedium', 'disk',\n '--filename', disk,\n '--size', '100',\n '--format', 'VDI',\n '--variant', 'fixed']\n end\n\n vbox.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', i,\n '--type', 'hdd',\n '--medium', disk\n ]\n vbox.customize [\n 'setextradata', :id,\n \"VBoxInternal/Devices/ahci/0/Config/Port#{i}/SerialNumber\",\n \"081118FC1221NCJ6G8#{id}\"\n ]\n end\nend", "def add_disk(server, size)\n host = server.to_s\n\n # Increment disk id\n if !DISKS.key?(host) then\n DISKS[host] = 0\n else\n DISKS[host] += 1\n end\n disk_id = DISKS[host]\n disk_filename = \".vagrant/disks/\" + host + \"_\" + disk_id.to_s + \".vdi\"\n\n server.vm.provider \"virtualbox\" do |v|\n # Create disk if it not exist\n unless File.exist?(disk)\n v.customize [\"createhd\", \"--filename\", disk_filename, \"--size\", size * 1024 * 1024]\n end\n v.customize ['storageattach', :id, '--storagectl', 'SATA Controller', '--port', disk_id, '--device', 0, '--type', 'hdd', '--medium', disk]\n end\nend", "def configure_disks(vb, server, hostname, name)\n vminfo = vm_info(name)\n disks = server['disks'] || {}\n unless vminfo =~ /Storage Controller Name \\(1\\): *SATA Controller/\n # puts \"Attaching SATA Controller\"\n vb.customize [\n 'storagectl', :id,\n '--name', 'SATA Controller',\n '--add', 'sata',\n '--portcount', disks.size\n ]\n # else\n # puts 'SATA Controller already attached'\n end\n\n disks.each_with_index do |disk, i|\n disk_name = disk.first\n disk_size = disk.last['size']\n disk_uuid = disk.last['uuid']\n real_uuid = \"00000000-0000-0000-0000-#{disk_uuid.rjust(12,'0')}\"\n if server['cluster']\n disk_filename = File.join(VAGRANT_ROOT, \"#{disk_name}_#{server['cluster']}.vdi\")\n else\n disk_filename = File.join(VAGRANT_ROOT, \"#{disk_name}.vdi\")\n end\n\n if File.file?(disk_filename)\n # puts \"Disk #{disk_filename} already created\"\n disk_hash = `VBoxManage showmediuminfo \"#{disk_filename}\"`.scan(/(.*): *(.*)/).to_h\n current_uuid = disk_hash['UUID']\n else\n # puts \"Creating disk #{disk_filename}\"\n current_uuid = '0'\n if server['cluster']\n vb.customize [\n 'createhd',\n '--filename', disk_filename,\n '--size', disk_size.to_s,\n '--variant', 'Fixed'\n ]\n vb.customize [\n 'modifyhd', disk_filename,\n '--type', 'shareable'\n ]\n else\n vb.customize [\n 'createhd',\n '--filename', disk_filename,\n '--size', disk_size.to_s,\n '--variant', 'Standard'\n ]\n end\n end\n\n # Conditional for adding disk_uuid\n if server['cluster'] && current_uuid == real_uuid\n # puts \"Attaching shareable disk #{disk_filename}\"\n vb.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', (i + 1).to_s,\n '--device', 0,\n '--type', 'hdd',\n '--medium', disk_filename,\n '--mtype', 'shareable'\n ]\n elsif server['cluster']\n # puts \"Attaching shareable disk #{disk_filename}, adding UUID #{real_uuid}\"\n vb.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', (i + 1).to_s,\n '--device', 0,\n '--type', 'hdd',\n '--medium', disk_filename,\n '--mtype', 'shareable',\n '--setuuid', real_uuid\n ]\n elsif current_uuid == real_uuid\n # puts \"Attaching normal disk #{disk_filename}\"\n vb.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', (i + 1).to_s,\n '--device', 0,\n '--type', 'hdd',\n '--medium', disk_filename\n ]\n else\n # puts \"Attaching normal disk #{disk_filename}, adding UUID #{real_uuid}\"\n vb.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', (i + 1).to_s,\n '--device', 0,\n '--type', 'hdd',\n '--medium', disk_filename,\n '--setuuid', real_uuid\n ]\n end\n end\nend", "def create_iscsi_disks(vbox, name)\n unless controller_exists(name, 'SATA Controller')\n vbox.customize ['storagectl', :id,\n '--name', 'SATA Controller',\n '--add', 'sata']\n end\n\n dir = \"#{ENV['HOME']}/VirtualBox\\ VMs/vdisks\"\n Dir.mkdir dir unless File.directory?(dir)\n\n osts = (1..20).map { |x| [\"OST#{x}\", '5120'] }\n\n [\n %w[mgt 512],\n %w[mdt0 5120]\n ].concat(osts).each_with_index do |(name, size), i|\n file_to_disk = \"#{dir}/#{name}.vdi\"\n port = (i + 1).to_s\n\n unless File.exist?(file_to_disk)\n vbox.customize ['createmedium',\n 'disk',\n '--filename',\n file_to_disk,\n '--size',\n size,\n '--format',\n 'VDI',\n '--variant',\n 'fixed']\n end\n\n vbox.customize ['storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', port,\n '--type', 'hdd',\n '--medium', file_to_disk,\n '--device', '0']\n\n vbox.customize ['setextradata', :id,\n \"VBoxInternal/Devices/ahci/0/Config/Port#{port}/SerialNumber\",\n name.ljust(20, '0')]\n end\nend", "def add_physical_disks\n add_collection(physical_infra, :physical_disks) do |builder|\n builder.add_properties(\n :manager_ref => %i(physical_storage ems_ref),\n :manager_ref_allowed_nil => %i(ems_ref)\n )\n end\n end", "def create_delta_disks\n begin\n disks =\n @item['config.hardware.device']\n .grep(RbVmomi::VIM::VirtualDisk)\n disk_without_snapshots = disks.select do |x|\n x.backing.parent.nil?\n end\n rescue StandardError\n error = 'Cannot extract existing disks on template.'\n use_linked_clones = false\n return error, use_linked_clones\n end\n\n if !disk_without_snapshots.empty?\n\n begin\n if self['config.template']\n @item.MarkAsVirtualMachine(\n :pool => resource_pool,\n :host => self['runtime.host']\n )\n end\n rescue StandardError => e\n @item.MarkAsTemplate()\n error = 'Cannot mark the template as a VirtualMachine. '\\\n 'Not using linked clones. '\\\n \"Reason: #{e.message}/#{e.backtrace}\"\n use_linked_clones = false\n return error, use_linked_clones\n end\n\n begin\n spec = {}\n spec[:deviceChange] = []\n\n disk_without_snapshots.each do |disk|\n remove_disk_spec =\n {\n :operation => :remove,\n :device => disk\n }\n spec[:deviceChange] << remove_disk_spec\n\n add_disk_spec =\n {\n :operation => :add,\n :fileOperation => :create,\n :device => disk.dup.tap do |x|\n x.backing =\n x.backing.dup\n x.backing.fileName =\n \"[#{disk.backing.datastore.name}]\"\n x.backing.parent =\n disk.backing\n end\n }\n spec[:deviceChange] << add_disk_spec\n end\n\n @item\n .ReconfigVM_Task(\n :spec => spec\n ).wait_for_completion unless spec[:deviceChange].empty?\n rescue StandardError => e\n error = 'Cannot create the delta disks on top '\\\n \"of the template. Reason: #{e.message}.\"\n\n if VCenterDriver::CONFIG[:debug_information]\n error += \"\\n\\n#{e.backtrace}\"\n end\n\n use_linked_clones = false\n return error, use_linked_clones\n end\n\n begin\n @item.MarkAsTemplate()\n rescue StandardError => e\n error = 'Cannot mark the VirtualMachine as '\\\n 'a template. Not using linked clones.' \\\n \" Reason: #{e.message}.\"\n\n if VCenterDriver::CONFIG[:debug_information]\n error += \"\\n\\n#{e.backtrace}\"\n end\n\n use_linked_clones = false\n return error, use_linked_clones\n end\n end\n\n error = nil\n use_linked_clones = true\n\n [error, use_linked_clones]\n end", "def add_instance_disk(array, instance, disk)\n if instance.managed_disk?\n disk_type = 'managed'\n disk_location = disk.managed_disk.id\n managed_disk = @managed_disks.find { |d| d.id.casecmp(disk_location).zero? }\n\n if managed_disk\n disk_size = managed_disk.properties.disk_size_gb.gigabytes\n mode = managed_disk.try(:sku).try(:name)\n else\n _log.warn(\"Unable to find disk information for #{instance.name}/#{instance.resource_group}\")\n disk_size = nil\n mode = nil\n end\n else\n disk_type = 'unmanaged'\n disk_location = disk.try(:vhd).try(:uri)\n disk_size = disk.try(:disk_size_gb).try(:gigabytes)\n\n if disk_location\n uri = Addressable::URI.parse(disk_location)\n storage_name = uri.host.split('.').first\n container_name = File.dirname(uri.path)\n blob_name = uri.basename\n\n storage_acct = @storage_accounts.find { |s| s.name.casecmp(storage_name).zero? }\n mode = storage_acct.try(:sku).try(:name)\n\n if @options.get_unmanaged_disk_space && disk_size.nil?\n storage_keys = @sas.list_account_keys(storage_acct.name, storage_acct.resource_group)\n storage_key = storage_keys['key1'] || storage_keys['key2']\n blob_props = storage_acct.blob_properties(container_name, blob_name, storage_key)\n disk_size = blob_props.content_length.to_i\n end\n end\n end\n\n disk_record = {\n :device_type => 'disk',\n :controller_type => 'azure',\n :device_name => disk.name,\n :location => disk_location,\n :size => disk_size,\n :disk_type => disk_type,\n :mode => mode\n }\n\n array << disk_record\n end", "def attach_volumes(node, disk_sizes)\n if $provider == :virtualbox\n node.vm.provider :virtualbox do |v, override|\n disk_num = 0\n disk_sizes.each do |disk_size|\n disk_num += 1\n diskname = File.join(File.dirname(File.expand_path(__FILE__)), \".virtualbox\", \"#{node.vm.hostname}-#{disk_num}.vdi\")\n unless File.exist?(diskname)\n v.customize ['createhd', '--filename', diskname, '--size', disk_size * 1024]\n end\n v.customize ['storageattach', :id, '--storagectl', 'SATA Controller', '--port', disk_num, '--device', 0, '--type', 'hdd', '--medium', diskname]\n end\n end\n end\n\n if $provider == :vmware_fusion\n node.vm.provider :vmware_fusion do |v, override|\n vdiskmanager = '/Applications/VMware\\ Fusion.app/Contents/Library/vmware-vdiskmanager'\n unless File.exist?(vdiskmanager)\n dir = File.join(File.dirname(File.expand_path(__FILE__)), \".vmware\")\n unless File.directory?( dir )\n Dir.mkdir dir\n end\n\n disk_num = 0\n disk_sizes.each do |disk_size|\n disk_num += 1\n diskname = File.join(dir, \"#{node.vm.hostname}-#{disk_num}.vmdk\")\n unless File.exist?(diskname)\n `#{vdiskmanager} -c -s #{disk_size}GB -a lsilogic -t 1 #{diskname}`\n end\n\n v.vmx[\"scsi0:#{disk_num}.filename\"] = diskname\n v.vmx[\"scsi0:#{disk_num}.present\"] = 'TRUE'\n v.vmx[\"scsi0:#{disk_num}.redo\"] = ''\n end\n end\n end\n end\n\n if $provider == :parallels\n node.vm.provider :parallels do |v, override|\n disk_sizes.each do |disk_size|\n v.customize ['set', :id, '--device-add', 'hdd', '--size', disk_size * 1024]\n end\n end\n end\n\nend", "def attach_disk(config, prefix, disk_num, size)\n filename = \"#{prefix}#{disk_num}.vdi\"\n config.vm.provider \"virtualbox\" do |vb|\n if !File.exist?(filename) \n vb.customize ['createhd', '--filename', filename, '--size', (size * 1024).floor, '--variant', 'fixed']\n vb.customize ['modifyhd', filename, '--type', 'shareable']\n end\n\n vb.customize ['storageattach', :id, '--storagectl', 'SATAController', '--port', disk_num + 2, '--device', 0, '--type', 'hdd', '--medium', filename]\n end\nend", "def create_partitions\n info(\"Creating disk with #{PARTITION_TABLE_TYPE} parition table\")\n execute!(\"parted -s #{@dev} mklabel #{PARTITION_TABLE_TYPE}\")\n\n start_size = FIRST_PARTITION_OFFSET\n end_size = FIRST_PARTITION_OFFSET\n\n unspec_part = nil\n\n # Create the partitions\n @partition_layout.each_with_index do |part, index|\n # Deal with any \"open ended\" partitions last\n if not part.size_mb.is_a?(Integer)\n unspec_part = part\n next\n end\n\n start_size = end_size\n end_size += part.size_mb\n\n info(\"Creating partition #{part.label} (#{part.fs}, #{part.size_mb}MiB)\")\n execute!(\"parted #{@dev} mkpart #{part.label} #{part.fs} #{start_size}MiB #{end_size}MiB\")\n\n (part.flags || {}).each_pair { |k, v|\n info(\"Setting partition flag #{k} to #{v}\")\n execute!(\"parted #{@dev} set #{index + 1} #{k} #{v}\")\n }\n\n label_path = \"/dev/disk/by-partlabel/#{part.label}\"\n self.wait_for_device(label_path)\n\n if not part.fs\n warn(\"No filesystem specified for #{part.label}. Skipping FS\")\n else\n create_filesystem(part.fs, label_path, part.label)\n end\n\n if part.lvm\n notice(\"Setting up LVM on #{part.label}\")\n setup_lvm_on_partition(part)\n end\n end\n\n # Deal with any \"open ended\" partitions (that have an unspecified size_mb)\n if unspec_part\n part = unspec_part\n info(\"Creating partition #{part.label} (#{part.fs}, 100% remaining)\")\n execute!(\"parted #{@dev} mkpart #{part.label} #{part.fs} #{end_size}MiB 100%\")\n\n (part.flags || {}).each_pair { |k, v|\n info(\"Setting partition flag #{k} to #{v}\")\n execute!(\"parted #{@dev} set #{@partition_layout.length} #{k} #{v}\")\n }\n\n label_path = \"/dev/disk/by-partlabel/#{part.label}\"\n self.wait_for_device(label_path)\n create_filesystem(part.fs, label_path, part.label) if part.fs\n\n if part.lvm\n notice(\"Setting up LVM on #{part.label}\")\n setup_lvm_on_partition(part)\n end\n end\n\n nil\n end", "def gen_disks_line(n)\n node_disks = []\n if n.disks.length > 0 then\n node_disks = n.node_disks.sort.map{|x| x.xen_name }\n else\n if n.only_supports_ide?\n node_disks << \"file:/xen/domains/#{n.hostname}/disk.img,hda1,w\"\n else\n node_disks << \"file:/xen/domains/#{n.hostname}/disk.img,sda1,w\"\n end\n end\n node_disks.map { |x| \" '#{x}'\" }.join(\",\\n\")\nend", "def addVolume(dev, size, type: \"pd-standard\", delete_on_termination: false)\n devname = dev.gsub(/.*?\\/([^\\/]+)$/, '\\1')\n resname = MU::Cloud::Google.nameStr(@mu_name+\"-\"+devname)\n MU.log \"Creating disk #{resname}\"\n\n description = @deploy ? @deploy.deploy_id : @mu_name+\"-\"+devname\n\n newdiskobj = MU::Cloud::Google.compute(:Disk).new(\n size_gb: size,\n description: description,\n zone: @config['availability_zone'],\n# type: \"projects/#{config['project']}/zones/#{config['availability_zone']}/diskTypes/pd-ssd\",\n type: \"projects/#{@project_id}/zones/#{@config['availability_zone']}/diskTypes/#{type}\",\n# Other values include pd-ssd and local-ssd\n name: resname\n )\n\n begin\n newdisk = MU::Cloud::Google.compute(credentials: @config['credentials']).insert_disk(\n @project_id,\n @config['availability_zone'],\n newdiskobj\n )\n rescue ::Google::Apis::ClientError => e\n if e.message.match(/^alreadyExists: /)\n MU.log \"Disk #{resname} already exists, ignoring request to create\", MU::WARN\n return\n else\n raise e\n end\n end\n\n attachobj = MU::Cloud::Google.compute(:AttachedDisk).new(\n device_name: devname,\n source: newdisk.self_link,\n type: \"PERSISTENT\",\n auto_delete: delete_on_termination\n )\n\n MU.log \"Attaching disk #{resname} to #{@cloud_id} at #{devname}\"\n MU::Cloud::Google.compute(credentials: @config['credentials']).attach_disk(\n @project_id,\n @config['availability_zone'],\n @cloud_id,\n attachobj\n )\n\n end", "def discover_new_disks\n logger.debug 'Discovering new virtual disks.'\n\n StorageNode.list_disks().each do |volume|\n disk = VirtualDisk.find_by_disk_number(volume)\n next if disk\n\n disk_size = StorageNode.volume_size(volume)\n logger.info 'autodiscovered virtual disk %d with size %d Mb' % [volume, disk_size]\n disk = VirtualDisk.new(volume, disk_size)\n disk.save('discovered')\n end\n end", "def mount_ephemerals(attrs={})\n owner.volume(:ephemeral0, attrs){ device '/dev/sdb'; volume_id 'ephemeral0' ; mount_point '/mnt' ; tags( :bulk => true, :local => true, :fallback => true) } if flavor_info[:ephemeral_volumes] > 0\n owner.volume(:ephemeral1, attrs){ device '/dev/sdc'; volume_id 'ephemeral1' ; mount_point '/mnt2'; tags( :bulk => true, :local => true, :fallback => true) } if flavor_info[:ephemeral_volumes] > 1\n owner.volume(:ephemeral2, attrs){ device '/dev/sdd'; volume_id 'ephemeral2' ; mount_point '/mnt3'; tags( :bulk => true, :local => true, :fallback => true) } if flavor_info[:ephemeral_volumes] > 2\n owner.volume(:ephemeral3, attrs){ device '/dev/sde'; volume_id 'ephemeral3' ; mount_point '/mnt4'; tags( :bulk => true, :local => true, :fallback => true) } if flavor_info[:ephemeral_volumes] > 3\n end", "def create_gdom_disk(options)\n client_disk = options['q_struct']['gdom_disk'].value\n disk_size = options['q_struct']['gdom_size'].value\n disk_size = disk_size.downcase\n vds_disk = options['name']+\"_vdisk0\"\n if not client_disk.match(/\\/dev/)\n if not File.exist?(client_disk)\n message = \"Information:\\tCreating guest domain disk \"+client_disk+\" for client \"+options['name']\n command = \"mkfile -n #{disk_size} #{client_disk}\"\n output = execute_command(options,message,command)\n end\n end\n message = \"Information:\\tChecking Virtual Disk Server device doesn't already exist\"\n command = \"ldm list-services |grep 'primary-vds0' |grep '#{vds_disk}'\"\n output = execute_command(options,message,command)\n if not output.match(/#{options['name']}/)\n message = \"Information:\\tAdding disk device to Virtual Disk Server\"\n command = \"ldm add-vdsdev #{client_disk} #{vds_disk}@primary-vds0\"\n output = execute_command(options,message,command)\n end\n return\nend", "def addDisk(backingFile, sizeInMB, label = nil, summary = nil, options = {})\n # Remove nil keys if any, since the next line may not work\n options.reject! { |_k, v| v.nil? }\n # Merge default values:\n # - persistent is set to true to be backward compatible\n # - thin_provisioned is set to false explicitly since we call to_s on it further, so nil will not work for us\n options = {:persistent => true, :thin_provisioned => false}.merge(options)\n ck, un = available_scsi_units.first\n raise \"addDisk: no SCSI controller found\" unless ck\n\n vmConfigSpec = VimHash.new(\"VirtualMachineConfigSpec\") do |vmcs|\n vmcs.deviceChange = VimArray.new(\"ArrayOfVirtualDeviceConfigSpec\") do |vmcs_vca|\n vmcs_vca << VimHash.new(\"VirtualDeviceConfigSpec\") do |vdcs|\n vdcs.operation = VirtualDeviceConfigSpecOperation::Add\n if sizeInMB < 0\n sizeInMB = -sizeInMB\n else\n vdcs.fileOperation = VirtualDeviceConfigSpecFileOperation::Create\n end\n vdcs.device = VimHash.new(\"VirtualDisk\") do |vDev|\n vDev.key = -100 # temp key for creation\n vDev.capacityInKB = sizeInMB * 1024\n vDev.controllerKey = ck\n vDev.unitNumber = un\n # The following doesn't seem to work.\n vDev.deviceInfo = VimHash.new(\"Description\") do |desc|\n desc.label = label\n desc.summary = summary\n end if label || summary\n vDev.connectable = VimHash.new(\"VirtualDeviceConnectInfo\") do |con|\n con.allowGuestControl = \"false\"\n con.startConnected = \"true\"\n con.connected = \"true\"\n end\n if options[:dependent]\n mode = (options[:persistent] ? VirtualDiskMode::Persistent : VirtualDiskMode::Nonpersistent)\n else\n mode = (options[:persistent] ? VirtualDiskMode::Independent_persistent : VirtualDiskMode::Independent_nonpersistent)\n end\n vDev.backing = VimHash.new(\"VirtualDiskFlatVer2BackingInfo\") do |bck|\n bck.diskMode = mode\n bck.split = \"false\"\n bck.thinProvisioned = options[:thin_provisioned].to_s\n bck.writeThrough = \"false\"\n bck.fileName = backingFile\n begin\n dsn = @invObj.path2dsName(@dsPath)\n bck.datastore = @invObj.dsName2mo_local(dsn)\n rescue\n bck.datastore = nil\n end\n end\n end\n end\n end\n end\n\n logger.info \"MiqVimVm(#{@invObj.server}, #{@invObj.username}).addDisk: calling reconfigVM_Task\"\n taskMor = @invObj.reconfigVM_Task(@vmMor, vmConfigSpec)\n logger.info \"MiqVimVm(#{@invObj.server}, #{@invObj.username}).addDisk: returned from reconfigVM_Task\"\n waitForTask(taskMor)\n end", "def info_disks\n @disks = {}\n\n keys = disk_keys\n vc_disks = vcenter_disks_get\n one_disks = one_disks_list\n\n one_disks.each do |one_disk|\n index = one_disk['DISK_ID']\n\n disk = query_disk(one_disk, keys, vc_disks)\n\n vc_dev = vc_disks.delete(disk) if disk\n\n if vc_dev\n @disks[index] = Disk.new(index.to_i, one_disk, vc_dev)\n else\n @disks[index] = Disk.one_disk(index.to_i, one_disk)\n end\n end\n\n vc_disks.each {|d| @disks[d[:path_wo_ds]] = Disk.vc_disk(d) }\n\n @disks\n end", "def create(size)\n disk_id = uuid\n sh \"zfs create -o reservation=1024 -o quota=1024 #{base}/#{disk_id}\"\n disk_id\n end", "def create_vbox_hdd(client_name,vbox_disk_name)\n message = \"Creating:\\tVM hard disk for \"+client_name\n command = \"VBoxManage createhd --filename \\\"#{vbox_disk_name}\\\" --size \\\"#{$default_vm_size}\\\"\"\n execute_command(message,command)\n return\nend", "def create_template_delta_disks(_pool)\n puts(\"#{self.class.name} does not implement create_template_delta_disks\")\n end", "def merge_vm_disks(host, global, vb, controller)\n # Only get the first entry from global['vm_groups'] if more than one entry present\n vb_dir=global['vm_basedir'] ? global['vm_basedir'] + global['vm_groups'].partition(',')[0] + \"/\" + host['vm_name'] + \"/\" : \"./.virtualbox/\"\n if global['vm_disks'] or host['vm_disks']\n merge_hash = merge_2_array_of_hashes(global['vm_disks'], host['vm_disks'])\n merge_hash.each do |key, value|\n diskname=\"#{vb_dir}#{host['vm_name']}-#{key}.vdi\"\n unless File.exist?(diskname)\n vb.customize [\"createmedium\", \"disk\", \"--filename\", diskname, \"--size\", value * 1024 , \"--format\", \"vdi\", \"--variant\", \"Standard\"]\n end\n vb.customize [\"storageattach\", :id , \"--storagectl\", controller, \"--port\", key, \"--device\", \"0\", \"--type\", \"hdd\", \"--medium\", diskname]\n end\n end\nend", "def one_disks_list\n one_item.info if one_item.instance_of?(OpenNebula::VirtualMachine)\n one_item.retrieve_xmlelements('TEMPLATE/DISK')\n end", "def create_partition_to_fill_disk(disk)\n # @disk.create_partition('primary', '100%')\n disk.create_partition_table # LinuxAdmin::Disk.create_partition has this already...\n AwesomeSpawn.run!(\"parted -s #{disk.path} mkpart primary 0% 100%\")\n\n # FIXME: Refetch the disk after creating the partition\n disk = LinuxAdmin::Disk.local.find { |d| d.path == disk.path }\n disk.partitions.first\n end", "def prepare_vm_disk_attachment(disk_spec, storage_spec)\n disk_spec = disk_spec.symbolize_keys\n attachment_builder = ManageIQ::Providers::Ovirt::InfraManager::DiskAttachmentBuilder.new(\n :size_in_mb => disk_spec[:disk_size_in_mb],\n :storage => storage_spec,\n :name => disk_spec[:disk_name],\n :thin_provisioned => disk_spec[:thin_provisioned],\n :bootable => disk_spec[:bootable],\n :interface => disk_spec[:interface]\n )\n attachment_builder.disk_attachment\n end", "def create_test_disk(connection, zone)\n zone = 'us-central1-a'\n random_string = SecureRandom.hex\n\n disk = connection.disks.create({\n :name => \"fog-test-disk-#{random_string}\",\n :size_gb => \"10\",\n :zone => zone,\n :source_image => \"debian-7-wheezy-v20140408\",\n })\n disk.wait_for { ready? }\n disk\nend", "def attach_disk(disk)\n # Adding a new disk in newer vSphere versions\n # automatically cleans all system snapshots\n # https://github.com/OpenNebula/one/issues/5409\n if snapshots? || one_snapshots?\n error_msg = 'Existing sytem snapshots, cannot change disks. '\n error_msg << 'Please remove all snapshots and try again'\n raise error_msg\n end\n\n spec_hash = {}\n device_change = []\n\n # Extract unmanaged_keys\n unmanaged_keys = disk_keys\n vc_disks = vcenter_disks_get\n\n # Check if we're dealing with a StoragePod SYSTEM ds\n storpod = disk['VCENTER_DS_REF'].start_with?('group-')\n\n # Check if disk being attached is already connected to the VM\n raise 'DISK is already connected to VM' if disk_attached_to_vm(\n disk, unmanaged_keys, vc_disks\n )\n\n # Generate vCenter spec and reconfigure VM\n add_spec = calculate_add_disk_spec(disk)\n device_change << add_spec\n raise 'Could not generate DISK spec' if device_change.empty?\n\n extra_key = \"opennebula.mdisk.#{disk['DISK_ID']}\"\n extra_value = add_spec[:device].key.to_s\n\n spec_hash[:deviceChange] = device_change\n spec_hash[:extraConfig] =\n [{ :key => extra_key, :value => extra_value }]\n spec = RbVmomi::VIM.VirtualMachineConfigSpec(spec_hash)\n\n begin\n if storpod\n # Ask for StorageDRS recommendation\n # to reconfigure VM (AddDisk)\n sm = storagemanager\n\n # Disk id is -1 as I don't know\n # what disk id is going to be set\n disk_locator = [RbVmomi::VIM.PodDiskLocator(:diskId => -1)]\n\n # Disk locator is required for AddDisk\n vmpod_hash = {}\n vmpod_hash[:storagePod] = get_ds\n vmpod_hash[:disk] = disk_locator\n vmpod_config =\n RbVmomi::VIM::VmPodConfigForPlacement(\n vmpod_hash\n )\n\n # The storage pod selection requires initialize\n spod_hash = {}\n spod_hash[:initialVmConfig] = [vmpod_config]\n spod_select =\n RbVmomi::VIM::StorageDrsPodSelectionSpec(\n spod_hash\n )\n storage_spec = RbVmomi::VIM.StoragePlacementSpec(\n :type => :reconfigure,\n :podSelectionSpec => spod_select,\n :vm => self['_ref'],\n :configSpec => spec\n )\n\n # Query a storage placement recommendation\n result = sm\n .RecommendDatastores(\n :storageSpec => storage_spec\n ) rescue nil\n\n if result.nil?\n raise 'Could not get placement '\\\n 'specification for StoragePod'\n end\n\n if !result.respond_to?(:recommendations) ||\n result.recommendations.empty?\n raise 'Could not get placement '\\\n 'specification for StoragePod'\n end\n\n # Get recommendation key to be applied\n key = result.recommendations.first.key ||= ''\n\n if key.empty?\n raise 'Missing Datastore recommendation for StoragePod'\n end\n\n # Apply recommendation\n sm.ApplyStorageDrsRecommendation_Task(\n :key => [key]\n ).wait_for_completion\n\n # Add the key for the volatile disk to the\n # unmanaged opennebula.disk.id variables\n unit_number =\n spec_hash[:deviceChange][0][:device]\n .unitNumber\n controller_key =\n spec_hash[:deviceChange][0][:device]\n .controllerKey\n key =\n get_vcenter_disk_key(\n unit_number,\n controller_key\n )\n spec_hash = {}\n reference = {}\n reference[:key] =\n \"opennebula.disk.#{disk['DISK_ID']}\"\n reference[:value] = key.to_s\n spec_hash[:extraConfig] = [reference]\n @item\n .ReconfigVM_Task(\n :spec => spec_hash\n ).wait_for_completion\n else\n @item\n .ReconfigVM_Task(\n :spec => spec\n ).wait_for_completion\n end\n # Modify extraConfig if disks has a bad key\n sync_extraconfig_disk(spec_hash)\n rescue StandardError => e\n error = \"Cannot attach DISK to VM: #{e.message}.\"\n\n if VCenterDriver::CONFIG[:debug_information]\n error += \"\\n\\n#{e.backtrace.join(\"\\n\")}\"\n end\n\n raise error\n end\n end", "def create_disk(size)\n @logger.info(\"create_disk(#{size})\")\n disk_name = \"bosh-disk-#{SecureRandom.uuid}\"\n logger.info(\"Start to create an empty vhd blob: blob_name: #{disk_name}.vhd\")\n @blob_manager.create_empty_vhd_blob(container_name, \"#{disk_name}.vhd\", size)\n disk_name\n end", "def extra_volumes(base_name)\n configs = []\n\n config[:extra_volumes].each_with_index do |data, index|\n disk_id = (index + 1).to_s.rjust(2, '0')\n data[:name] = \"#{base_name}-extra-#{disk_id}\"\n configs << data\n end\n\n configs\n end", "def create_ebs_stripe(nickname, new_volume_size_in_gb, options = {})\n self.execute_terminate_volumes if options[:force]\n devices = @disk.generate_physical_device_names(options[:stripe])\n each_volume_size = (new_volume_size_in_gb / options[:stripe].to_f).ceil\n devices.each do |d| \n vol = self.create_volume(:nickname => \"#{nickname}-#{d}\", \n :description => \"Created by RS tools to initialize new EBS stripe volume\",\n :size => each_volume_size)\n raise vol unless vol['aws_id']\n \"Attaching new EBS volume: #{vol['aws_id']}\"\n att = self.attach_volume(vol['aws_id'], d)\n end\n devices.each {|d| self.wait_for_attachment(d) }\n\n @disk.initialize_stripe(devices)\n end", "def setup_disk(path)\n dev = ::File.readlink(path)\n full_path = ::File.absolute_path(dev, ::File.dirname(path))\n\n fs_type = get_fs_type(full_path)\n if fs_type.nil?\n Mixlib::ShellOut.new(\"mkfs.ext4 #{full_path}\").run_command\n fs_type = 'ext4'\n end\n\n fs_type\nend", "def setup_disk(path)\n dev = ::File.readlink(path)\n full_path = ::File.absolute_path(dev, ::File.dirname(path))\n\n fs_type = get_fs_type(full_path)\n if fs_type.nil?\n Mixlib::ShellOut.new(\"mkfs.ext4 #{full_path}\").run_command\n fs_type = 'ext4'\n end\n\n fs_type\nend", "def create_disk(name, size_mb, vm = nil, retries = @retries[\"default\"])\n new_disk = Xml::WrapperFactory.create_instance(\"DiskCreateParams\")\n new_disk.name = name\n new_disk.size_bytes = size_mb * 1024 * 1024 # VCD expects bytes\n new_disk.bus_type = Xml::HARDWARE_TYPE[:SCSI_CONTROLLER]\n new_disk.bus_sub_type = Xml::BUS_SUB_TYPE[:LSILOGIC]\n new_disk.add_locality(vm) if vm\n vdc = get_ovdc\n @logger.info(\"Creating independent disk #{name} of #{size_mb}MB.\")\n @logger.info(\"Disk locality ist set to #{vm.name} #{vm.urn}.\") if vm\n disk = @connection.post(vdc.add_disk_link, new_disk,\n Xml::MEDIA_TYPE[:DISK_CREATE_PARAMS])\n raise ApiRequestError unless disk.respond_to?(:running_tasks)\n # Creating a disk returns a disk with tasks inside\n retries.times do |try|\n return disk if disk.running_tasks.nil? || disk.running_tasks.empty?\n @logger.info(\"Disk #{disk.urn} has running tasks. Waiting for \" +\n \"tasks to finish. Try: #{try}/#{retries} .\" )\n disk.running_tasks.each do |t|\n monitor_task(t)\n end\n disk = @connection.get(disk)\n end\n end", "def create_from_disk(*filenames); end", "def create_fusion_vm_disk(options,fusion_vm_dir,fusion_disk_file)\n if File.exist?(fusion_disk_file)\n handle_output(options,\"Warning:\\t#{options['vmapp']} VM disk '#{fusion_disk_file}' already exists for #{options['name']}\")\n quit(options)\n end\n check_dir_exists(options,fusion_vm_dir)\n if options['host-os-name'].to_s.match(/Darwin/)\n vdisk_bin = \"/Applications/VMware Fusion.app/Contents/Library/vmware-vdiskmanager\"\n else\n vdisk_bin = \"/usr/bin/vmware-vdiskmanager\"\n end\n message = \"Creating \\t#{options['vmapp']} disk '\"+fusion_disk_file+\"' for \"+options['name']\n command = \"cd \\\"#{fusion_vm_dir}\\\" ; \\\"#{vdisk_bin}\\\" -c -s \\\"#{options['size']}\\\" -a LsiLogic -t 0 \\\"#{fusion_disk_file}\\\"\"\n execute_command(options,message,command)\n return\nend", "def compute_deltas \n req_disks = @new_resource.disks\n keyed_req = {} # for easy lookup, make a map of the requested disks \n cur = @ring_test\n name = @new_resource.name\n @to_add = []\n @to_rem = []\n \n \n ## figure out which disks need adding\n req_disks.each {|disk| \n key = RingInfo.dev_key disk[:ip],disk[:port],disk[:dev_name]\n @to_add << disk unless cur and cur.devices[key] # add unless present\n keyed_req[key] = disk\n } \n \n ### figure out which disks need removing\n cur.devices.each {|key, d|\n @to_rem << d unless keyed_req[key] # remove unless still requested\n } if cur\n \n Chef::Log.info(\"disks, to add #{@to_add.length} , to remove: #{@to_rem.length}\" ) \n Chef::Log.debug(\"disks, to add #{@to_add.join(\";\")} , to remove: #{@to_rem.join(\";\")}\" )\n \nend", "def create_partition device, partition_type = 'primary', start_unit, end_unit\n command = 'parted'\n params = \"#{device.path} -s -a optimal unit MB mkpart #{partition_type} ext3 #{start_unit} -- #{end_unit}\"\n parted = CommandsExecutor.new command, params\n parted.execute\n raise \"Command execution error: #{parted.stderr.read}\" if not parted.success?\n probe_kernal device\n end", "def compute_deltas\n req_disks = @new_resource.disks\n keyed_req = {} # for easy lookup, make a map of the requested disks\n cur = @ring_test\n name = @new_resource.name\n @to_add = []\n @to_rem = []\n\n ## figure out which disks need adding\n req_disks.each {|disk|\n key = RingInfo.dev_key disk[:ip],disk[:port],disk[:dev_name]\n @to_add << disk unless cur and cur.devices[key] # add unless present\n keyed_req[key] = disk\n }\n\n ### figure out which disks need removing\n cur.devices.each {|key, d|\n @to_rem << d unless keyed_req[key] # remove unless still requested\n } if cur\n\n Chef::Log.info(\"disks, to add #{@to_add.length} , to remove: #{@to_rem.length}\")\n Chef::Log.debug(\"disks, to add #{@to_add.join(\";\")} , to remove: #{@to_rem.join(\";\")}\")\n\nend", "def add_vm_disks(vm_service, disk_specs)\n storage_spec = disk_specs[:storage]\n default_disk_spec = disk_specs[:default] || {}\n attachments_service = vm_service.disk_attachments_service\n disk_specs[:disks].each do |disk_spec|\n attachment = prepare_vm_disk_attachment(default_disk_spec.merge(disk_spec), storage_spec)\n attachments_service.add(attachment)\n end\n end", "def ocs_storages_disk_only\n disks = []\n for o in ocs_storages\n if o.TYPE == 'disk'\n disks << o\n end\n end\n disks\n end", "def merge_vm_shared_disks(host, global, vb, controller)\n vb_dir=global['vm_basedir'] ? global['vm_basedir'] + global['vm_groups'].partition(',')[0] + \"/\" : \"./.virtualbox/\"\n if global['vm_shared_disks'] or host['vm_shared_disks']\n merge_hash = merge_2_array_of_hashes(global['vm_shared_disks'], host['vm_shared_disks'])\n merge_hash.each do |key, value|\n diskname=\"#{vb_dir}shared-#{key}.vdi\"\n unless File.exist?(diskname)\n vb.customize [\"createmedium\", \"disk\", \"--filename\", diskname, \"--size\", value * 1024 , \"--format\", \"vdi\", \"--variant\", \"Fixed\"]\n end\n vb.customize [\"storageattach\", :id , \"--storagectl\", controller, \"--port\", key, \"--device\", \"0\", \"--type\", \"hdd\", \"--medium\", diskname, \"--mtype\", \"shareable\"]\n end\n end\nend", "def managed_disks\n @managed_disks ||= collect_inventory(:managed_disks) { @sds.list_all }\n end", "def new_disk_ide\n Libvirt::Spec::Device.get(:disk).new.tap do |disk|\n disk.type = :file\n disk.device = :disk\n disk.target_dev = :hda\n disk.target_bus = :ide\n end\n end", "def create_disk(disk_id, location, size, storage_account_type)\n @logger.info(\"create_disk(#{disk_id}, #{location}, #{size}, #{storage_account_type})\")\n resource_group_name = disk_id.resource_group_name()\n disk_name = disk_id.disk_name()\n caching = disk_id.caching()\n tags = AZURE_TAGS.merge({\n \"caching\" => caching\n })\n disk_params = {\n :name => disk_name,\n :location => location,\n :tags => tags,\n :disk_size => size,\n :account_type => storage_account_type\n }\n @logger.info(\"Start to create an empty managed disk `#{disk_name}' in resource group `#{resource_group_name}'\")\n @azure_client2.create_empty_managed_disk(resource_group_name, disk_params)\n end", "def detach_disks_specs\n detach_disk_array = []\n extra_config = []\n keys = disk_keys.invert\n\n ipool = VCenterDriver::VIHelper.one_pool(OpenNebula::ImagePool)\n disks_each(:detached?) do |d|\n key = d.key.to_s\n source = VCenterDriver::FileHelper.escape_path(d.path)\n persistent =\n VCenterDriver::VIHelper\n .find_persistent_image_by_source(\n source, ipool\n )\n\n op = { :operation => :remove, :device => d.device }\n if !persistent && d.type != 'CDROM'\n op[:fileOperation] = :destroy\n end\n detach_disk_array << op\n\n # Remove reference opennebula.disk if exist from vmx and cache\n extra_config << d.config(:delete) if keys[key]\n end\n\n [detach_disk_array, extra_config]\n end", "def create_disk(size, cloud_properties, vm_id = nil)\n with_thread_name(\"create_disk(#{size})\") do\n begin\n @logger.debug(\"Persistent Disk Size: #{size}\")\n @logger.debug(\"Cloud Properties: #{cloud_properties}\")\n @logger.debug(\"VM Id: #{vm_id}\")\n # Form a name for the volume group\n vol_group_name = \"bosh-peristent-disk-#{vm_id}-#{rand(1000)}\"\n # Create the volume group\n volume_uuid = @vol_group_manager.create_volume_group(vol_group_name)\n @logger.info(\"New volume group created [#{vol_group_name}]\")\n # Create a volume disk\n @vol_group_manager.create_volume_disk(volume_uuid, size,\n @container_uuid)\n @logger.info(\"New volume disk created on volume #{vol_group_name}.\")\n # Return volume group's uuid\n volume_uuid\n rescue => e\n logger.error(e)\n cloud_error(e.message)\n end\n end\n end", "def add_hdd_to_vbox_vm(client_name,vbox_disk_name)\n message = \"Attaching:\\tStorage to VM \"+client_name\n command = \"VBoxManage storageattach \\\"#{client_name}\\\" --storagectl \\\"#{$vbox_disk_type}\\\" --port 0 --device 0 --type hdd --medium \\\"#{vbox_disk_name}\\\"\"\n execute_command(message,command)\n return\nend", "def create_disk(size, storage_account_name, caching)\n @logger.info(\"create_disk(#{size}, #{storage_account_name}, #{caching})\")\n disk_name = generate_data_disk_name(storage_account_name, caching)\n @logger.info(\"Start to create an empty vhd blob: blob_name: #{disk_name}.vhd\")\n @blob_manager.create_empty_vhd_blob(storage_account_name, DISK_CONTAINER, \"#{disk_name}.vhd\", size)\n disk_name\n end", "def create\n begin\n # Set the partition (/dev/sdb1), device (/dev/sdb) and alignment (optimal,minimal,none etc.) variables\n partition= resource[:name]\n device=partition[0,(partition.length-1)]\n alignment= resource[:alignment]\n\n # Now we can create the partition\n partitions = parted('-a', resource[:alignment],'--script',device,'mklabel',resource[:part_label],'mkpart', resource[:part_type],resource[:fs_type],resource[:p_begin],resource[:p_end])\n rescue Puppet::ExecutionFailure => e\n false\n end\n end", "def create_paths\n @storage_folders_path = File.join(@storage_path, \"folders\") # Not need date because we use rsync\n FileUtils.mkdir_p @storage_folders_path\n\n today = Time.now\n @storage_databases_path = File.join(@storage_path, \"databases\", today.year.to_s, today.month.to_s, today.day.to_s)\n FileUtils.mkdir_p @storage_databases_path\n end", "def get_disks(vm_id)\n end", "def create\n if @resource[:grow_fs] == :true\n fstabentry\n growfs\n mountfs\n else\n createfs()\n fstabentry\n mountfs\n end\n end", "def build_storage_profile(disk_no,component_name,slice_size,dev_id)\n data_disk2 = Azure::ARM::Compute::Models::DataDisk.new\n dev_name = dev_id.split(\"/\").last\n data_disk2.name = \"#{component_name}-datadisk-#{dev_name}\"\n OOLog.info(\"data_disk:\"+data_disk2.name)\n data_disk2.lun = disk_no-1\n OOLog.info(\"data_disk lun:\"+data_disk2.lun.to_s)\n data_disk2.disk_size_gb = slice_size\n data_disk2.vhd = Azure::ARM::Compute::Models::VirtualHardDisk.new\n data_disk2.vhd.uri = \"https://#{@storage_account_name}.blob.core.windows.net/vhds/#{@storage_account_name}-#{component_name}-datadisk-#{dev_name}.vhd\"\n OOLog.info(\"data_disk uri:\"+data_disk2.vhd.uri)\n data_disk2.caching = Azure::ARM::Compute::Models::CachingTypes::ReadWrite\n blob_name = \"#{@storage_account_name}-#{component_name}-datadisk-#{dev_name}.vhd\"\n is_new_disk_or_old = check_blob_exist(blob_name)\n if is_new_disk_or_old == true\n data_disk2.create_option = Azure::ARM::Compute::Models::DiskCreateOptionTypes::Attach\n else\n data_disk2.create_option = Azure::ARM::Compute::Models::DiskCreateOptionTypes::Empty\n end\n data_disk2\n end", "def build_ftk_disk_items(coll_pid, disk_image_files_dir, computer_media_photos_dir)\n assembler = FtkDiskImageItemAssembler.new(:collection_pid => coll_pid, :disk_image_files_dir => disk_image_files_dir, :computer_media_photos_dir => computer_media_photos_dir)\n assembler.process\nend", "def disks\n return @disks unless @disks.empty?\n\n info_disks\n end", "def create_instances\n min_count = max_count = @bs.number_of_nodes\n puts \"\\nCreating #{max_count} on-demand instance(s)\"\n options = {\n 'ClientToken' => generate_token,\n 'KeyName' => Chef::Config[:knife][:aws_ssh_key_id],\n 'InstanceType' => @bs.flavor,\n 'SubnetId' => @bs[:novpc] ? nil : @bs.subnet_id,\n 'Placement.AvailabilityZone' => @bs.mixins.az.data,\n 'SecurityGroupId' => @bs.mixins.sg.data\n }\n options['EbsOptimized'] = !! @bs[:ebs_optimized]\n\n ## REVIEW\n if ami.root_device_type == \"ebs\"\n ami_map = ami.block_device_mapping.first\n block_device_mapping = {\n 'DeviceName' => ami_map['deviceName'],\n 'Ebs.VolumeSize' => ami_map['volumeSize'].to_s,\n 'Ebs.DeleteOnTermination' => ami_map['deleteOnTermination']\n }\n options['BlockDeviceMapping'] = [block_device_mapping]\n end\n\n ## Optionally only include mapped devices\n ## This way we get all of the ephemeral drives, some unmapped however\n if @bs.mixins.volume.data[:ephemeral_available]\n ephmap = @bs.mixins.volume.data.ephemeral_available.each_with_index.map do |d,i|\n {\n 'VirtualName' => \"ephemeral#{i}\",\n 'DeviceName' => d\n }\n end\n options['BlockDeviceMapping'].concat( ephmap )\n end\n\n if (max_count == 1) and @bs[:private_ip_address]\n options['PrivateIpAddress'] = @bs.private_ip_address\n puts \"Assigning IP ADDRESS : #{options['PrivateIpAddress']}\"\n end\n\n if Chef::Config[:knife][:aws_user_data]\n begin\n options['UserData']= File.read(Chef::Config[:knife][:aws_user_data])\n rescue\n ui.warn(\"Cannot read #{Chef::Config[:knife][:aws_user_data]}:\"\\\n \" #{$!.inspect}. Ignoring option.\")\n end\n end\n\n # -----------------------------------------------------------------\n tries = 5\n print_table(options, 'Launch Config')\n begin\n puts \"\\nSending request...\"\n response = connection.run_instances(@bs.image, min_count,\n max_count, options)\n ui.msg(response.inspect)\n rescue Exception => e\n ui.warn(\"#{e.message}\\nException creating instances\")\n if (tries -= 1) <= 0\n ui.warn(\"\\n\\nMax tries reached. Exiting.\\n\\n\")\n exit 1\n else\n ui.msg(\"Trying again.\\n\")\n retry\n end\n end\n # now we have our servers\n instances = response.body['instancesSet']\n # select only instances that have instanceId key and collect those ids\n # into an array\n @bs[:instance_ids] =\n instances.select {|i| i.has_key?('instanceId')}.collect do |i|\n i['instanceId']\n end\n\n puts \"\\nNumber of instances started: #{@bs.instance_ids.size}\\n\"\n sleep 10\n puts \"Getting servers..\"\n # collect an array of servers retrieved based on the instance ids we\n # obtained above\n @bs[:servers] = @bs.instance_ids.collect do |id|\n begin\n server = connection.servers.get(id)\n rescue Exception => e\n sleep 7\n retry\n end\n raise Ec2Error.new(\"server #{id} was nil\") if server.nil?\n server\n end\n end", "def os_disk(account=nil)\n\n if account.nil?\n\n template = parent.nil? ? nil : parent.template\n return nil if template.nil?\n\n found = template.find_resources(Azure::ARM::Storage::StorageAccount)\n\n if found.nil? or found.length == 0\n account = template.storage_account do\n account_type Azure::ARM::Storage::StorageAccount::Standard_LRS\n end\n elsif found.length == 1\n account = found[0]\n else\n fail ArgumentError, 'cannot determine which storage account to use of the VM disks'\n end\n\n elsif account.is_a? String\n\n template = parent.nil? ? nil : parent.template\n return nil if template.nil?\n\n found = template.find_resource(Azure::ARM::Storage::StorageAccount, account)\n\n if found.nil?\n account = template.storage_accounts account do\n account_type Azure::ARM::Storage::StorageAccount::Standard_LRS\n end\n else\n account = found\n end\n\n end\n\n parent.add_dependency account\n\n name = parent.generate_name(parent.name.to_s + 'dsk')\n\n if parent.copy\n disk_uri = concat('http://', account.name, '.blob.core.windows.net/disks/', name, copyIndex(), '.vhd')\n else\n disk_uri = concat('http://', account.name, '.blob.core.windows.net/disks/', name, '.vhd')\n end\n\n props = { name: name,\n caching: 'ReadWrite',\n create_option: 'FromImage',\n vhd: { uri: disk_uri } }\n\n if parent.properties.nil? or parent.properties.storage_profile.nil?\n storage_profile os_disk: props\n else\n parent.properties.storage_profile.os_disk = OsDisk.new parent.properties.storage_profile, props\n end\n\n end", "def create_vm_specs(vm_id, cpu, ram, description, cd_rom_disk,\n system_disk, network, ephemeral_disk_size = nil)\n vm_create_specs = {\n name: \"bosh-#{vm_id}\", uuid: vm_id, memory_mb: ram, num_vcpus: cpu,\n description: \"DO NOT DELETE #{description.to_json}\",\n vm_disks: [ # CD-ROM\n { is_cdrom: true, is_empty: false,\n disk_address: { device_bus: 'ide' },\n vm_disk_clone: { disk_address: { vmdisk_uuid: cd_rom_disk } } },\n # System/Boot Disk\n { is_cdrom: false, is_empty: false,\n disk_address: { device_bus: 'scsi' },\n vm_disk_clone: { disk_address: { vmdisk_uuid: system_disk } } }\n ],\n vm_nics: []\n }\n unless ephemeral_disk_size.nil?\n vm_create_specs[:vm_disks] << {\n is_cdrom: false, is_empty: false,\n disk_address: { device_bus: 'scsi' },\n vm_disk_create: {\n storage_container_uuid: @container_uuid,\n size: (ephemeral_disk_size / 1000) * (1024 * 1024 * 1024)\n }\n }\n end\n network.each { |n| vm_create_specs[:vm_nics] << n }\n vm_create_specs\n end", "def create_disk(size, cloud_properties, vm_cid = nil)\n @telemetry_manager.monitor('initialize') do\n _init_azure\n end\n with_thread_name(\"create_disk(#{size}, #{cloud_properties})\") do\n id = vm_cid.nil? ? '' : vm_cid\n extras = { 'disk_size' => size }\n @telemetry_manager.monitor('create_disk', id: id, extras: extras) do\n validate_disk_size(size)\n disk_id = nil\n if @use_managed_disks\n if vm_cid.nil?\n # If instance_id is nil, the managed disk will be created in the resource group location.\n resource_group_name = _azure_config.resource_group_name\n resource_group = @azure_client.get_resource_group(resource_group_name)\n location = resource_group[:location]\n default_storage_account_type = STORAGE_ACCOUNT_TYPE_STANDARD_LRS\n zone = nil\n else\n instance_id = InstanceId.parse(vm_cid, _azure_config.resource_group_name)\n cloud_error('Cannot create a managed disk for a VM with unmanaged disks') unless instance_id.use_managed_disks?\n resource_group_name = instance_id.resource_group_name\n # If the instance is a managed VM, the managed disk will be created in the location of the VM.\n vm = @azure_client.get_virtual_machine_by_name(resource_group_name, instance_id.vm_name)\n location = vm[:location]\n instance_type = vm[:vm_size]\n zone = vm[:zone]\n default_storage_account_type = get_storage_account_type_by_instance_type(instance_type)\n end\n storage_account_type = cloud_properties.fetch('storage_account_type', default_storage_account_type)\n caching = cloud_properties.fetch('caching', 'None')\n validate_disk_caching(caching)\n disk_id = DiskId.create(caching, true, resource_group_name: resource_group_name)\n @disk_manager2.create_disk(disk_id, location, size / 1024, storage_account_type, zone)\n else\n storage_account_name = _azure_config.storage_account_name\n caching = cloud_properties.fetch('caching', 'None')\n validate_disk_caching(caching)\n unless vm_cid.nil?\n instance_id = InstanceId.parse(vm_cid, _azure_config.resource_group_name)\n @logger.info(\"Create disk for vm '#{instance_id.vm_name}'\")\n storage_account_name = instance_id.storage_account_name\n end\n disk_id = DiskId.create(caching, false, storage_account_name: storage_account_name)\n @disk_manager.create_disk(disk_id, size / 1024)\n end\n disk_id.to_s\n end\n end\n end", "def create_storages\n @storages = []\n @encryption_info = @ms_off_crypto.encryption_info\n @encrypted_package = @ms_off_crypto.encrypted_package\n\n @storages << Storage.new('EncryptionInfo', :data=>encryption_info, :left=>3, :right=>11) # example shows right child. do we need the summary info????\n @storages << Storage.new('EncryptedPackage', :data=>encrypted_package, :color=>Storage::COLORS[:red])\n @storages << Storage.new([6].pack(\"c\")+\"DataSpaces\", :child=>5, :modified =>129685612740945580, :created=>129685612740819979)\n @storages << version\n @storages << data_space_map\n @storages << Storage.new('DataSpaceInfo', :right=>8, :child=>7, :created=>129685612740828880,:modified=>129685612740831800)\n @storages << strong_encryption_data_space\n @storages << Storage.new('TransformInfo', :color => Storage::COLORS[:red], :child=>9, :created=>129685612740834130, :modified=>129685612740943959)\n @storages << Storage.new('StrongEncryptionTransform', :child=>10, :created=>129685612740834169, :modified=>129685612740942280)\n @storages << primary \n # @storages << summary_information\n # @storages << document_summary_information\n\n # we do this at the end as we need to build the minifat stream to determine the size. #HOWEVER - it looks like the size should not include the padding?\n @storages.unshift Storage.new('Root Entry', :type=>Storage::TYPES[:root], :color=>Storage::COLORS[:red], :child=>1, :data => mini_fat_stream)\n\n end", "def create_disk(disk_id, size)\n @logger.info(\"create_disk(#{disk_id}, #{size})\")\n storage_account_name = disk_id.storage_account_name\n disk_name = disk_id.disk_name\n @logger.info(\"Start to create an empty vhd blob: blob_name: #{disk_name}.vhd\")\n @blob_manager.create_empty_vhd_blob(storage_account_name, DISK_CONTAINER, \"#{disk_name}.vhd\", size)\n end", "def create_filesystem(fstype, where, label)\n if fstype == 'fat32'\n execute!(\"mkfs.fat -F -F32 -n#{label} #{where}\")\n elsif fstype == 'fat16'\n execute!(\"mkfs.fat -F -F16 -n#{label} #{where}\")\n elsif fstype == 'swap'\n execute!(\"mkswap -L #{label} #{where}\")\n else\n execute!(\"mkfs.#{fstype} -L \\\"#{label}\\\" #{where}\")\n end\n end", "def create_vdi(name, sr_ref, size)\n vdi_record = {\n \"name_label\" => \"#{name}\",\n \"name_description\" => \"Root disk for #{name} created by knfie xapi\",\n \"SR\" => sr_ref,\n \"virtual_size\" => input_to_bytes(size).to_s,\n \"type\" => \"system\",\n \"sharable\" => false,\n \"read_only\" => false,\n \"other_config\" => {},\n }\n \n # Async create the VDI\n task = xapi.Async.VDI.create(vdi_record)\n ui.msg \"waiting for VDI Create\"\n vdi_ref = get_task_ref(task)\n end", "def create_disk(size, cloud_properties, server_id = nil)\n volume_service_client = @openstack.volume\n with_thread_name(\"create_disk(#{size}, #{cloud_properties}, #{server_id})\") do\n raise ArgumentError, 'Disk size needs to be an integer' unless size.is_a?(Integer)\n cloud_error('Minimum disk size is 1 GiB') if size < 1024\n\n unique_name = generate_unique_name\n volume_params = {\n # cinder v1 requires display_ prefix\n display_name: \"volume-#{unique_name}\",\n display_description: '',\n # cinder v2 does not require prefix\n name: \"volume-#{unique_name}\",\n description: '',\n size: mib_to_gib(size),\n }\n\n if cloud_properties.key?('type')\n volume_params[:volume_type] = cloud_properties['type']\n elsif !@default_volume_type.nil?\n volume_params[:volume_type] = @default_volume_type\n end\n\n if server_id && @az_provider.constrain_to_server_availability_zone?\n server = @openstack.with_openstack { @openstack.compute.servers.get(server_id) }\n volume_params[:availability_zone] = server.availability_zone if server&.availability_zone\n end\n\n @logger.info('Creating new volume...')\n new_volume = @openstack.with_openstack { volume_service_client.volumes.create(volume_params) }\n\n @logger.info(\"Creating new volume `#{new_volume.id}'...\")\n @openstack.wait_resource(new_volume, :available)\n\n new_volume.id.to_s\n end\n end", "def initialize(options = {})\n @disks = {}\n options.each { |k, v| self.send :\"#{k}=\", v }\n end", "def add_cdrom_to_vbox_vm(client_name)\n message = \"Attaching:\\tCDROM to VM \"+client_name\n command = \"VBoxManage storagectl \\\"#{client_name}\\\" --name \\\"cdrom\\\" --add \\\"sata\\\" --controller \\\"IntelAHCI\\\"\"\n execute_command(message,command)\n if File.exist?($vbox_additions_iso)\n message = \"Attaching:\\tISO \"+$vbox_additions_iso+\" to VM \"+client_name\n command = \"VBoxManage storageattach \\\"#{client_name}\\\" --storagectl \\\"cdrom\\\" --port 0 --device 0 --type dvddrive --medium \\\"#{$vbox_additions_iso}\\\"\"\n execute_command(message,command)\n end\n return\nend", "def create_multi_config\n\t\t\t\tFile.open( \"#{$config_path}/#{@host}_disks.cfg\", 'w' ) do |out|\n\t\t\t\t\t\tout.puts \"# Diskfile for host #{@host} generated at #{Time.now}\"\n\t\t\t\t\t\[email protected] do |disk|\n\t\t\t\t\t\t\t\tnext unless disk[:disk]\n\t\t\t\t\t\t\t\twarn = disk[:warn]\n\t\t\t\t\t\t\t\tcrit = disk[:crit]\n\t\t\t\t\t\t\t\twarn = \"$WARN$\" if disk[:warn].nil?\n\t\t\t\t\t\t\t\tcrit = \"$CRIT$\" if disk[:crit].nil?\n\t\t\t\t\t\t\t\tout.puts \"command[#{disk[:disk].tr(\"/\",\"_\")}::check_disk]=check_nrpe -H #{@host} #{$ssl} -c check_disk -a #{warn} '#{crit} -E' #{disk[:disk]}\" if $isbuggy == true\n\t\t\t\t\t\t\t\tout.puts \"command[#{disk[:disk].tr(\"/\",\"_\")}::check_disk]=check_nrpe -H #{@host} #{$ssl} -c check_disk -a #{warn} #{crit} #{disk[:disk]}\" if $isbuggy == false\n\t\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\t$log.info \"check_multi config written for host #{@host}\"\n\t\tend", "def reference_disks(template_ref, disks, managed)\n return [] if disks.empty? || instantiated_as_persistent?\n\n extraconfig = []\n if managed\n key_prefix = 'opennebula.mdisk'\n else\n key_prefix = 'opennebula.disk'\n end\n\n # Get vcenter VM disks to know real path of cloned disk\n vcenter_disks = vcenter_disks_get\n\n # Create an array with the paths of the disks in vcenter template\n if !template_ref.nil?\n template = VCenterDriver::Template.new_from_ref(template_ref,\n vi_client)\n template_disks = template.vcenter_disks_get\n else\n # If we are dealing with a Wild VM, we simply use\n # what is available in the vCenter VM\n template_disks = vcenter_disks_get\n end\n template_disks_vector = []\n template_disks.each do |d|\n template_disks_vector << d[:path_wo_ds]\n end\n\n # Try to find index of disks in template disks\n disks.each do |disk|\n disk_source =\n VCenterDriver::FileHelper\n .unescape_path(\n disk['SOURCE']\n )\n template_disk = template_disks.select do |d|\n d[:path_wo_ds] == disk_source\n end.first\n\n if template_disk\n vcenter_disk = vcenter_disks.select do |d|\n d[:key] == template_disk[:key]\n end.first\n end\n\n unless vcenter_disk\n raise \"disk with path #{disk_source}\"\\\n 'not found in the vCenter VM'\n end\n\n reference = {}\n reference[:key] = \"#{key_prefix}.#{disk['DISK_ID']}\"\n reference[:value] = (vcenter_disk[:key]).to_s\n extraconfig << reference\n end\n\n extraconfig\n end", "def allocate_disk( amount )\n return false\n end", "def create_vdi(name, sr_ref, size)\n vdi_record = {\n 'name_label' => \"#{name}\",\n 'name_description' => \"Root disk for #{name} created by #{ENV['USER']} with knfie xapi\",\n 'SR' => sr_ref,\n 'virtual_size' => input_to_bytes(size).to_s,\n 'type' => 'system',\n 'sharable' => false,\n 'read_only' => false,\n 'other_config' => {}\n }\n\n # Async create the VDI\n task = xapi.Async.VDI.create(vdi_record)\n ui.msg 'waiting for VDI Create'\n vdi_ref = get_task_ref(task)\n vdi_ref\n end", "def disk_space()\n\n instructions = 'df -h'\n r = @ssh ? @ssh.exec!(instructions) : `#{instructions}`\n\n @results[:disk_usage] = {}\n\n a = r.lines.grep(/\\/dev\\/root/)\n\n puts ('a: ' + a.inspect).debug if @debug\n\n if a.any? then\n size, used, avail = a[0].split(/ +/).values_at(1,2,3)\n\n @results[:disk_usage][:root] = {size: size, used: used, \n avail: avail}\n end\n\n a2 = r.lines.grep(/\\/dev\\/sda1/)\n\n puts ('a2: ' + a2.inspect).debug if @debug\n\n if a2.any? then\n size, used, avail = a2[0].split(/ +/).values_at(1,2,3)\n\n @results[:disk_usage][:sda1] = {size: size, used: used, \n avail: avail}\n end\n\n end", "def _init_filesystem\n\t\t# Prepare temporary work directory\n\t\tcommand_send(\"sudo rm -rf /tmp/.captain\")\n\t\tcommand_send(\"mkdir -p /tmp/captain/transfers\")\n\t\tcommand_send(\"mkdir -p /tmp/captain/checkpoints/export\")\n\t\tcommand_send(\"mkdir -p /tmp/captain/checkpoints/import\")\n\tend", "def partitions(**opts)\n\t\t\t\topts[:disk]||[email protected](:sysinit, :disk)\n\t\t\t\topts[:mountpoint]||[email protected](:sysinit, :mountpoint)\n\t\t\t\t@partitions=partitions_helper(@partitions||Marshal.load(Marshal.dump(@computer.dig(:sysinit,:partitions))), **opts)\n\t\t\t\t@partitions = yield @partitions, **opts if block_given?\n\t\t\t\t@partitions\n\t\t\tend", "def disks\n self.config.hardware.device.grep(RbVmomi::VIM::VirtualDisk)\n end", "def hyperv\n has_grow, grow_by = SubutaiDisk.has_grow\n file_disk = SubutaiDisk.file_path(grow_by, \"hyper_v\")\n disk_path = Pathname.new file_disk\n\n unless disk_path.exist?\n Put.warn SubutaiDisk.message(grow_by)\n\n if has_grow\n if SubutaiDisk.hyperv_create_disk(grow_by, disk_path.to_s)\n SubutaiDisk.save_path(SubutaiDisk.port, disk_path.to_s)\n SubutaiDisk.save_conf(grow_by)\n end\n end\n else\n Put.error \"Disk file already exist in #{file_disk}\"\n end\n end", "def disks\n \tbs = {}\n\tnoko_details.xpath('/domain/devices/disk').map do |ddsk|\n\t next if ddsk.xpath('@device').text != \"disk\"\n\t next if ddsk.xpath('@snapshot').text == \"external\"\n\t sf = ddsk.xpath('source/@dev','source/@file').text\n\t td = ddsk.xpath('target/@dev').text\n\t bs[td] = sf\n\tend\n\tbs\n end", "def disk(index, opts = {})\n index = index.to_s\n\n return @disks[index] if @disks[index] && opts[:sync].nil?\n\n one_disk =\n one_item\n .retrieve_xmlelements(\n \"TEMPLATE/DISK[DISK_ID='#{index}']\"\n ).first rescue nil\n\n raise \"disk #{index} not found\" unless one_disk\n\n opts[:keys].nil? ? keys = disk_keys : keys = opts[:keys]\n if opts[:disks].nil?\n vc_disks = vcenter_disks_get\n else\n vc_disks = opts[:disks]\n end\n vc_disk = query_disk(one_disk, keys, vc_disks)\n\n if vc_disk\n Disk.new(index.to_i, one_disk, vc_disk)\n else\n Disk.one_disk(index.to_i, one_disk)\n end\n end", "def gen_partition_script(number_of_volumes, mount_point_prefix, resize_root_vol = false)\n resize_root = resize_root_vol ? 0 : 1\n template = <<-END.gsub(/^ {6}/, '')\n #!/bin/bash\n RESIZE_ROOT=<%= resize_root %>\n if [ $RESIZE_ROOT -eq 0 ]; then\n echo \"Resizing the root partition\"\n resize2fs /dev/`cat /proc/partitions | awk '/xvd*/ {print $4}' | head -n1`\n fi\n NUM_OF_VOLS=<%= number_of_volumes %>\n if [ $NUM_OF_VOLS -ne 0 ]; then\n DEVICES=`cat /proc/partitions | awk '/xvd*/ {print $4}' | tail -n<%= number_of_volumes %>`\n echo \"Formatting and mounting initiated\"\n count=1\n for dev in $DEVICES; do\n echo \"Formatting and mounting $dev\"\n fdisk -u /dev/$dev << EOF\n n\n p\n 1\n\n\n w\n EOF\n mkfs.ext4 /dev/${dev}1\n data_dir=$((count++))\n mkdir -p <%= mount_point_prefix %>/${data_dir}\n mount /dev/${dev}1 <%= mount_point_prefix %>${data_dir}\n done\n fi\n END\n ERB.new(template).result(binding)\n end", "def provision_and_mount_volume(server, disk_size, device)\n unless provider.find_server_device(server, device)\n say \"Provisioning #{disk_size}Gb persistent disk for inception VM...\"\n provider.create_and_attach_volume(\"Inception Disk\", disk_size, server, device)\n end\n\n # Format and mount the volume\n if aws?\n say \"Skipping volume mounting on AWS 12.10 inception VM until its fixed\", [:yellow, :bold]\n run_ssh_command_until_successful server, \"sudo mkdir -p /var/vcap/store\"\n else\n say \"Mounting persistent disk as volume on inception VM...\"\n run_ssh_command_until_successful server, \"sudo mkfs.ext4 #{device} -F\"\n run_ssh_command_until_successful server, \"sudo mkdir -p /var/vcap/store\"\n run_ssh_command_until_successful server, \"sudo mount #{device} /var/vcap/store\"\n end\n end", "def secondary_storage\n unless ::File.exist?(new_resource.nfs_path)\n shell_out!(\"mkdir -p #{new_resource.nfs_path}\")\n shell_out!(\"chown -R root:root #{new_resource.nfs_path}\")\n end\n end", "def disk_spec(file_name, size, unit)\n disk = RbVmomi::VIM.VirtualDisk(\n :backing => disk_backing(file_name),\n :controllerKey => 0,\n :key => 0,\n :unitNumber => unit,\n :capacityInKB => size\n )\n\n config = {\n :device => disk,\n :fileOperation => RbVmomi::VIM.VirtualDeviceConfigSpecFileOperation('create'),\n :operation => RbVmomi::VIM.VirtualDeviceConfigSpecOperation('add')\n }\n\n if vsan_data_store?(file_name) && resource[:vm_storage_policy]\n config[:profile] = [VIM::VirtualMachineDefinedProfileSpec(\n :profileId => profile(resource[:vm_storage_policy]).profileId.uniqueId\n )]\n end\n\n RbVmomi::VIM.VirtualDeviceConfigSpec(config)\n end", "def disk_specs(path)\n specs = []\n unit = 0\n if resource[:virtual_disks]\n resource[:virtual_disks].each do |vd|\n size = vd[\"size\"].to_i * 1024 * 1024\n specs << disk_spec(path, size, unit)\n unit += 1\n end\n else\n specs << disk_spec(path, resource[:disk_size], unit)\n end\n\n specs\n end", "def createvolume\n if not checkRequirements([\"thezone\",\"thevolume\"])\n return false\n end\n checkToken(@thezone)\n req = {}\n req[\"name\"] = \"oe-#{@thevolume.name}\"\n req[\"description\"] = @thevolume.description\n req[\"sizeGb\"] = @thevolume.size\n submit = queryGCE(:path => '/compute/v1beta15/projects/#{@thezone.name}/zones/#{@thevolume.azone.name}/disks', :method => 'post', :options => '', :data => req.to_json, :access_token => @thezone.toekn )\n d = checkQuery(:type => 'zone', :token => @thezone.token, :projectname => @thezone.name, :zonename => @thevolume.azone.name, :operationname => submit[\"name\"])\n data = queryGCE(:path => '/compute/v1beta15/projects/#{@thezone.name}/zones/#{@thevolume.azone.name}/disks/#{req[\"name\"]}', :method => 'get', :options => '', :access_token => @thezone.token) if d\n data ? data[\"name\"] : false\n end", "def bootloader_partitions\n raise RuntimeError, \"Not implemented in base class\"\n end", "def ignore_disk_keys()\n ignore_disk_keys = [\n \"disk\",\n \"diskpath\"\n ]\n return ignore_disk_keys\nend", "def disk(disk_id, opts = { :root_device => false })\n @disks << disk_id\n @root_device = @disks.length if opts[:root_device]\n end", "def autodiscover_devices\n logger.debug 'Discovering information about storage subsystem (HDD and MD devices)'\n @storage_information = HddAutodiscover.new(STORAGE_CONFIG[:volume_name])\n collected = @storage_information.collect()\n\n assert [:storage, :free_space, collected[:lvm][:free]]\n collected[:hdd].each do |hdd|\n assert [:hdd, hdd.device, :sn, hdd.sn]\n assert [:hdd, hdd.device, :temperature, hdd.temperature]\n assert [:hdd, hdd.device, :health, hdd.health]\n end\n end", "def make_forensic_image(options)\n drive_path = %x(VBoxManage list hdds | grep '#{options[:project_dir].split('/').last}').sub(/\\ALocation:\\s*/, '').sub(/\\n/, '')\n # drive_path = %x(VBoxManage list hdds | grep '#{options[:project_dir].split('/').last}').sub(/\\ALocation:\\s*|\\n\\Z/, '')\n drive_name = drive_path.split('/').last\n\n options[:image_output_location] = \"#{options[:project_dir]}/#{drive_name}\".sub(/.vmdk|.vdi/, '') unless options.has_key? :image_output_location\n\n unless options.has_key? :no_vm_shutdown\n ## Ensure all vms are shutdown\n system \"cd '#{options[:project_dir]}' && vagrant halt\"\n\n if options.has_key? :create_raw_image\n create_dd_image(drive_path, options[:image_output_location])\n end\n\n if options.has_key? :create_ewf_image\n create_ewf_image(drive_path, options[:image_output_location])\n end\n\n if options.has_key? :delete_vm_after_image_creation\n delete_virtualbox_vm(options[:vm_name])\n end\n else\n @colour.error 'Cannot create forensic image as --no-vm-shutdown option is set to true'\n end\n\n return options\nend", "def check_disk\n super\n end", "def disk\n @disk ||= begin\n dInfo = OpenStruct.new\n dInfo.lvObj = self\n dInfo.hardwareId = \"\"\n MiqDisk.new(Lvm2DiskIO, dInfo, 0)\n end\n end", "def create_dd_image(drive_path, image_output_location)\n ## Make DD image\n @colour.notify \"Creating dd image with path #{image_output_location}.raw\"\n @colour.notify 'This may take a while:'\n @colour.notify \"Raw image #{image_output_location}.raw created\" if system \"aBoxManage clonemedium disk '#{drive_path}' '#{image_output_location}.raw' --format RAW\"\nend", "def secondary_storage\n unless ::File.exist?(@current_resource.nfs_path)\n directory @current_resource.nfs_path do\n owner \"root\"\n group \"root\"\n action :create\n recursive true\n end\n end\n end", "def CreatePartition(disk, device, ptype, id, start, len, mby)\n Builtins.y2milestone(\n \"CreatePartition disk:%1 device:%2 ptype:%3 id:%4 start:%5 len:%6 mby:%7\",\n disk,\n device,\n ptype,\n id,\n start,\n len,\n mby\n )\n pt = fromSymbol(@conv_ptype, ptype)\n Builtins.y2milestone(\"CreatePartition type:%1 pt:%2\", ptype, pt)\n ret, cdev = @sint.createPartition(disk, pt, start, len)\n cdev = \"\" if ret<0\n if device != cdev\n Builtins.y2error(\"CreatePartition device:%1 cdev:%2\", device, cdev)\n end\n Builtins.y2error(\"CreatePartition ret %1\", ret) if ret<0\n ret = @sint.changePartitionId(device, id)\n Builtins.y2error(\"CreatePartition ret %1\", ret) if ret<0\n tmp = fromSymbol(@conv_mountby, mby)\n @sint.changeMountBy(device, tmp)\n Builtins.y2milestone(\"CreatePartition sint ret:%1\", ret)\n UpdateTargetMap()\n ret == 0\n end", "def attach_volumes!(server, volumes_count, size)\n #create a new block storage connection obj\n volume_service = Fog::Volume::OpenStack.new(\n :openstack_api_key => @os_password,\n :openstack_username => @os_username,\n :openstack_auth_url => @os_auth_url,\n :openstack_tenant => @os_tenant,\n )\n base = 'sdd'\n volumes_count.times do |i|\n base = base.next!\n #create a new volume\n vol = volume_service.volumes.create(\n :size => size,\n :display_name => \"#{server.name}-#{i}\",\n :description => \"Volume attached to #{server.name} - managed by ankus\"\n )\n vol.reload\n vol.wait_for { status == 'available' }\n server.attach_volume(vol.id, \"/dev/#{base}\")\n vol.wait_for { status == 'in-use' }\n end\n end", "def sync_disks(machine)\n @logger.info \"Syncing disks for #{machine.name}...\"\n machine.disks.each do |disk|\n begin\n disk.remote_id ? update_disk(disk) : create_disk(disk)\n rescue StandardError => e\n message = e.is_a?(RestClient::Exception) ? e.response : e\n raise Exceptions::OnPremiseException, message\n end\n end\n end", "def create_partition(size = nil, type = Partition.PartitionType[:TYPE_PRIMARY])\n DiskUtils.create_partition self, size[:start_block], size[:end_block]\n partitions = Device.find(self).partitions\n return partitions.last\n end", "def AddMountPointsForWinParts(partitions, primary, max_prim, foreign_nr)\n partitions = deep_copy(partitions)\n return if !Arch.i386 && !Arch.ia64 && !Arch.x86_64\n\n foreign_ids = \"CDEFGHIJKLMNOPQRSTUVW\"\n\n Builtins.foreach(partitions) do |partition|\n new_partition = deep_copy(partition)\n fsid = Ops.get_integer(partition, \"fsid\", Partitions.fsid_native)\n partnum = 0\n if Builtins.haskey(partition, \"nr\") &&\n Ops.is_integer?(Ops.get(partition, \"nr\", 0))\n partnum = Ops.get_integer(partition, \"nr\", 0)\n end\n if !Builtins.haskey(partition, \"mount\") &&\n !Ops.get_boolean(partition, \"delete\", false) &&\n Ops.less_or_equal(partnum, max_prim) == primary &&\n Ops.less_than(foreign_nr.value, 24) &&\n Partitions.IsDosWinNtPartition(fsid) &&\n !Arch.ia64 &&\n !IsEfiPartition(partition) &&\n Ops.greater_or_equal(\n Ops.get_integer(partition, \"size_k\", 0),\n 1024 * 1024\n ) &&\n Builtins.contains(\n [:vfat, :ntfs],\n Ops.get_symbol(partition, \"used_fs\", :none)\n )\n Ops.set(\n new_partition,\n \"fstopt\",\n FileSystems.DefaultFstabOptions(partition)\n )\n if Builtins.contains(Partitions.fsid_dostypes, fsid)\n Ops.set(\n new_partition,\n \"mount\",\n Ops.add(\n \"/dos/\",\n Builtins.substring(foreign_ids, foreign_nr.value, 1)\n )\n )\n foreign_nr.value = Ops.add(foreign_nr.value, 1)\n else\n Ops.set(\n new_partition,\n \"mount\",\n Ops.add(\n \"/windows/\",\n Builtins.substring(foreign_ids, foreign_nr.value, 1)\n )\n )\n foreign_nr.value = Ops.add(foreign_nr.value, 1)\n end\n ChangeVolumeProperties(new_partition)\n Builtins.y2milestone(\"win part %1\", new_partition)\n end\n end\n\n nil\n end", "def addVolume(dev, size, type: \"gp2\", delete_on_termination: false)\n\n if setDeleteOntermination(dev, delete_on_termination)\n MU.log \"A volume #{dev} already attached to #{self}, skipping\", MU::NOTICE\n return\n end\n\n MU.log \"Creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n creation = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).create_volume(\n availability_zone: cloud_desc.placement.availability_zone,\n size: size,\n volume_type: type\n )\n\n MU.retrier(wait: 3, loop_if: Proc.new {\n creation = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).describe_volumes(volume_ids: [creation.volume_id]).volumes.first\n if ![\"creating\", \"available\"].include?(creation.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n creation.state != \"available\"\n })\n\n\n if @deploy\n MU::Cloud::AWS.createStandardTags(\n creation.volume_id,\n region: @region,\n credentials: @credentials,\n optional: @config['optional_tags'],\n nametag: @mu_name+\"-\"+dev.upcase,\n othertags: @config['tags']\n )\n end\n\n MU.log \"Attaching #{creation.volume_id} as #{dev} to #{@cloud_id} in #{@region} (credentials #{@credentials})\"\n attachment = nil\n MU.retrier([Aws::EC2::Errors::IncorrectState], wait: 15, max: 4) {\n attachment = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).attach_volume(\n device: dev,\n instance_id: @cloud_id,\n volume_id: creation.volume_id\n )\n }\n\n begin\n att_resp = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).describe_volumes(volume_ids: [attachment.volume_id])\n if att_resp and att_resp.volumes and !att_resp.volumes.empty? and\n att_resp.volumes.first.attachments and\n !att_resp.volumes.first.attachments.empty?\n attachment = att_resp.volumes.first.attachments.first\n if !attachment.nil? and ![\"attaching\", \"attached\"].include?(attachment.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n end\n end while attachment.nil? or attachment.state != \"attached\"\n\n # Set delete_on_termination, which for some reason is an instance\n # attribute and not on the attachment\n setDeleteOntermination(dev, delete_on_termination)\n end", "def parallels\n has_grow, grow_by = SubutaiDisk.has_grow\n \n if has_grow\n if SubutaiDisk.parallels_create_disk(grow_by)\n Put.warn SubutaiDisk.message(grow_by)\n SubutaiDisk.save_conf(grow_by)\n end\n end\n end", "def disk_all(cfg)\n ignored = cfg['ignore_fs'] || 'tmpfs'\n ignore_fs = \"fstype!~\\\"#{ignored}\\\"\"\n query = @client.percent_query_free(\n \"node_filesystem_files{#{ignore_fs}}\",\n \"node_filesystem_files_free{#{ignore_fs}}\"\n )\n prepare_metrics('disk_all', @client.query(query))\n end", "def add_to(vm)\n add_bus_to vm\n disks.each do |port_device, disk|\n disk.add_to vm, self, port_device.first, port_device.last\n end\n self\n end" ]
[ "0.70231146", "0.69995695", "0.68383676", "0.68118185", "0.6750576", "0.67435026", "0.66048616", "0.6545638", "0.65237164", "0.65123236", "0.629663", "0.62834424", "0.6283214", "0.62750745", "0.6208587", "0.61978734", "0.6118388", "0.6093614", "0.60689795", "0.605721", "0.602354", "0.6013172", "0.60126776", "0.6004047", "0.5990273", "0.5970265", "0.5958328", "0.59447396", "0.59372866", "0.59326875", "0.59312123", "0.59312123", "0.59291357", "0.5920679", "0.5900355", "0.5872826", "0.5869765", "0.58641124", "0.58608824", "0.5855831", "0.5839274", "0.5814927", "0.5814688", "0.5803474", "0.57961833", "0.5756994", "0.5746671", "0.5734946", "0.5732815", "0.5719713", "0.5718059", "0.5716057", "0.5713547", "0.5697314", "0.56743765", "0.5654192", "0.5648647", "0.5622983", "0.56076014", "0.56044054", "0.5603583", "0.5582923", "0.5581385", "0.5572555", "0.5564491", "0.55636156", "0.555379", "0.553459", "0.55125153", "0.55070066", "0.55030984", "0.54804534", "0.5470134", "0.5456959", "0.5437968", "0.54265577", "0.54159176", "0.5415022", "0.5391078", "0.5382437", "0.5381004", "0.53751326", "0.53741485", "0.5372398", "0.53614634", "0.5359971", "0.53575134", "0.5343537", "0.5333676", "0.5332713", "0.53184056", "0.5308061", "0.53059405", "0.52981967", "0.5290388", "0.52842337", "0.5280945", "0.5267997", "0.5255958", "0.52382445", "0.52331424" ]
0.0
-1
Cleanup a domain's volumes
def volume_cleanup(domain) domain.volumes.each do |volume| debug("Removing volumes #{volume.key}") volume.destroy if volume.key end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cleanup_storage vm\n vm.volumes.each do |vol|\n @logger.debug \"Deleting volume #{vol.name} for OpenStack host #{vm.name}\"\n vm.detach_volume(vol.id)\n vol.wait_for { ready? }\n vol.destroy\n end\n end", "def cleanup\n case SubutaiConfig.provider\n when :hyper_v\n SubutaiDisk.hyperv_remove_disk\n end\n\n # cleanup virtual disks\n disks = SubutaiConfig.get(:_DISK_PATHES)\n unless disks.nil?\n disks.keys.each do |key|\n if File.exist?(disks[key])\n begin\n File.delete(disks[key])\n puts \"==> default: Deleted file: #{disks[key]}\"\n rescue Errno::EACCES\n puts \"==> default: (Permission denied) Failed delete file: #{disks[key]}\"\n end\n end\n end\n end\n\n # cleanup generated files\n if File.exist?(SubutaiConfig::GENERATED_FILE)\n begin\n File.delete SubutaiConfig::GENERATED_FILE\n puts \"==> default: Deleted file: #{SubutaiConfig::GENERATED_FILE}\"\n rescue Errno::EACCES\n puts \"==> default: (Permission denied) Failed delete file: #{SubutaiConfig::GENERATED_FILE}\"\n end\n end\n end", "def destroy_domain(domain)\n debug(\"Destroying domain #{domain.id}\")\n domain.halt if domain.active\n debug(\"Removing volumes for domain #{domain.id}\")\n volume_cleanup(domain)\n domain.destroy\n end", "def cleanup\n FileUtils.rm(autoinst_path, force: true)\n FileUtils.rm(definition_path, force: true)\n FileUtils.rm(libvirt_definition_path, force: true)\n if provider == :libvirt\n # Due a bug in vagrant-libvirt the images will not cleanuped correctly\n # in the /var/lib/libvirt directory. This has to be done manually\n # (including DB update)\n system \"sudo virsh vol-delete #{IMAGE_BOX_NAME} default\"\n end\n end", "def cleanup\n @logger.notify \"Cleaning up Lxc Container\"\n\n @hosts.each do | host |\n if container = host['lxc_container']\n @logger.debug(\"stop container #{host}\")\n begin\n ip = container.ip_addresses.join(\",\")\n # If IP is empty it was deleting the /etc/hosts file.\n # So checking first if IP is available or not\n if !ip.empty?\n @logger.notify \"Deleting hostname #{host} in /etc/host\"\n system \"sed -i '/^#{ip}/d' /etc/hosts\"\n else\n @logger.notify \"IP address not found, skipping to delete hostname from /etc/hosts file\"\n end\n # Stop the container\n container.stop\n sleep 2\n rescue Exception => e\n @logger.warn(\"stop of container #{host} failed: #{e}\")\n end\n @logger.debug(\"delete container #{host}\")\n begin\n container.destroy\n rescue Exception => e\n @logger.warn(\"deletion of container #{host} failed: #{e}\")\n end\n end\n end\n end", "def clean_related_volumes!\n delegate(provider, :clean_related_volumes!)\n end", "def cleanup!\r\n got = @ndev.rpc.request_system_storage_cleanup\r\n gone_h = {}\r\n got.xpath('file-list/file').each do |file|\r\n _cleanup_file_to_h( file, gone_h )\r\n end\r\n gone_h\r\n end", "def cleanup?\r\n got = @ndev.rpc.request_system_storage_cleanup( :dry_run => true )\r\n dryrun_h = {}\r\n got.xpath('file-list/file').each do |file|\r\n _cleanup_file_to_h( file, dryrun_h )\r\n end\r\n dryrun_h \r\n end", "def after_destroy\n super\n\n # Try our best to remove the directory. If it fails there is little\n # else that we could do to resolve the situation -- we already tried to\n # delete it once...\n self.tmpdir and FileUtils.remove_entry_secure(self.tmpdir, true)\n\n # Remove repo directory.\n if self.iso_url\n # Some files in archives are write-only. Change this property so the\n # delete succeeds.\n remove_directory(iso_location)\n end\n end", "def cleanup\n _send_command(\"hosts -d\")\n _send_command(\"services -d\")\n _send_command(\"creds -d\")\n end", "def cleanup!; end", "def cleanup!; end", "def cleanup_release\n if ssh.directory_exists?(release_path)\n ssh.run(\"rm -rf #{release_path}\")\n end\n end", "def destroy #Changing ensure to absent\n\n\t\tdebug \"Trying to destroy domain %s\" % [resource[:name]]\n\n\t\tbegin\n\t\t\tdom.destroy\n\t\trescue Libvirt::Error => e\n\t\t\tdebug \"Domain %s already Stopped\" % [resource[:name]]\n\t\tend\n\t\tdom.undefine\n\n\tend", "def destroy #Changing ensure to absent\n\n\t\tdebug \"Trying to destroy domain %s\" % [resource[:name]]\n\n\t\tbegin\n\t\t\tdom.destroy\n\t\trescue Libvirt::Error => e\n\t\t\tdebug \"Domain %s already Stopped\" % [resource[:name]]\n\t\tend\n\t\tdom.undefine\n\n\tend", "def cleanup\n end", "def cleanup\n end", "def cleanup\n end", "def cleanup\n end", "def cleanup\n cleanup_primitive full_name, hostname\n wait_for_status name\n end", "def cleanup; end", "def cleanup; end", "def cleanup; end", "def cleanup; end", "def cleanup\n\tsh 'del /F /Q .\\_site\\*'\n\t# sh 'rm -rf ./_site'\nend", "def cleanup\n end", "def cleanup\n end", "def cleanup\n end", "def delete_directory domain\n # Use FileUtils\n fu = FileUtils\n # If we're just running as a simulation\n if $simulate\n # Use ::DryRun, that just echoes the commands, instead of the normal FileUtils\n fu = FileUtils::DryRun # ::DryRun / ::NoWrite\n end\n\n # Tell the user\n puts \"> Tar bort domänen från filsystemet\".green\n\n # Build the paths\n mail_dir = BASE_PATH_MAIL + domain['domain']\n home_dir = BASE_PATH_HOME + domain['domain']\n\n # Remove the directories\n fu.rm_r mail_dir\n fu.rm_r home_dir\nend", "def clean_environment\n `rm -rf /tmp/#{@upload_id}` # all sliced pages of the source file\n end", "def domain_volumes\n # Use the domain name as our volume base name.\n base_name = domain_name\n\n if not config[:image_name]\n config[:image_name] = default_image\n end\n\n # Clone our root volume from our base image.\n root_volume = clone_volume(config[:image_name], \"#{base_name}-root\")\n\n # Return the array of created volumes\n [root_volume].concat(\n create_volumes(\n extra_volumes(base_name)\n )\n )\n end", "def cleanup\n @logger.notify \"Cleaning up OpenStack\"\n @vms.each do |vm|\n cleanup_storage(vm)\n @logger.debug \"Release floating IPs for OpenStack host #{vm.name}\"\n floating_ips = vm.all_addresses # fetch and release its floating IPs\n floating_ips.each do |address|\n @compute_client.disassociate_address(vm.id, address['ip'])\n @compute_client.release_address(address['id'])\n end\n @logger.debug \"Destroying OpenStack host #{vm.name}\"\n vm.destroy\n if @options[:openstack_keyname].nil?\n @logger.debug \"Deleting random keypair\"\n @compute_client.delete_key_pair vm.name\n end\n end\n end", "def cleanup\n cleanup_unpack_path\n cleanup_download_path\n end", "def cleanup\n end", "def cleanup\n end", "def cleanup_records\n Fog::DNS[:dreamhost].records.each do |r|\n # Do not delete the 'do-not-delete' record, we need it for the tests\n r.destroy if r.name =~ /#{test_domain}/ and r.name != do_not_delete_record\n end\nend", "def cleanup(prefix)\n print_header \"Cleaning up ...\"\n Constants::SOFTWARE.each do |sw|\n cmd \"rm #{prefix}/#{sw}.tar.gz; rm -rf #{prefix}/#{sw}\" if File.exist? \"#{prefix}/#{sw}.tar.gz\"\n end\n end", "def destroy\n if self.class.cfg_name == \"server\"\n begin\n ip = canonicalIP\n MU::Master.removeIPFromSSHKnownHosts(ip) if ip\n if @deploy and @deploy.deployment and\n @deploy.deployment['servers'] and @config['name']\n me = @deploy.deployment['servers'][@config['name']][@mu_name]\n if me\n [\"private_ip_address\", \"public_ip_address\"].each { |field|\n if me[field]\n MU::Master.removeIPFromSSHKnownHosts(me[field])\n end\n }\n if me[\"private_ip_list\"]\n me[\"private_ip_list\"].each { |private_ip|\n MU::Master.removeIPFromSSHKnownHosts(private_ip)\n }\n end\n end\n end\n rescue MU::MuError => e\n MU.log e.message, MU::WARN\n end\n end\n if [email protected]? and [email protected]?\n @cloudobj.groomer.cleanup\n elsif [email protected]?\n @groomer.cleanup\n end\n if [email protected]?\n if [email protected]? and [email protected]? and [email protected]_name.nil?\n @deploy.notify(self.class.cfg_plural, @config['name'], nil, mu_name: @cloudobj.mu_name, remove: true, triggering_node: @cloudobj, delayed_save: @delayed_save)\n elsif !@mu_name.nil?\n @deploy.notify(self.class.cfg_plural, @config['name'], nil, mu_name: @mu_name, remove: true, triggering_node: self, delayed_save: @delayed_save)\n end\n @deploy.removeKitten(self)\n end\n # Make sure that if notify gets called again it won't go returning a\n # bunch of now-bogus metadata.\n @destroyed = true\n if [email protected]?\n def @cloudobj.notify\n {}\n end\n else\n def notify\n {}\n end\n end\n end", "def cleanup\r\n end", "def destroy_model_storage(repository, model)\n if ENV['destroy']!=nil && ENV['destroy']=='true'\n sdb.delete_domain(@uri[:domain])\n end\n end", "def clean()\n rels = releases()\n rels.pop()\n\n unless rels.empty?\n rm = ['rm', '-rf'].concat(rels.map {|r| release_dir(r)})\n rm << release_dir('skip-*')\n cmd.ssh(rm)\n end\n end", "def destroy!\n fail \"Can not destroy a running stone\" if running?\n rm_rf system_config_filename\n rm_rf extent_directory\n rm_rf log_directory\n rm_rf tranlog_directories\n end", "def cleanup\n if File.exist?(@scalerui_dir)\n print \"Cleaning up directories...\"\n FileUtils.rm_r(@scalerui_dir)\n puts \"done\"\n else\n puts \"Nothing to clean up!\"\n end\n end", "def destroy_and_undefine\n @display.stop if @display&.active?\n begin\n old_domain = @virt.lookup_domain_by_name(@domain_name)\n old_domain.destroy if old_domain.active?\n old_domain.undefine\n rescue StandardError\n # Nothing to clean up\n end\n end", "def purge\n end", "def destroy(_)\n paths = [\n instance.provisioner[:root_path], instance.verifier[:root_path]\n ]\n paths.each do |p|\n FileUtils.rm_rf(p)\n logger.info(\"[Localhost] Deleted temp dir '#{p}'.\")\n end\n self.class.unlock!\n end", "def destroy\n FFI::Libvirt.virStoragePoolDestroy(self) == 0\n end", "def destroy\n FFI::Libvirt.virDomainDestroy(self) == 0\n end", "def destroy\n FFI::Libvirt.virDomainDestroy(self) == 0\n end", "def cleanup\n cleanup_accounts\n end", "def finalize(*args)\n FFI::Libvirt.virDomainFree(self)\n end", "def remove_nfs_datastore\n vm_host = find_vm_host\n nfsdatastore = get_nfs_datastore(vm_host)\n\n if nfsdatastore\n Puppet.notice \"removing nfs datastore\"\n vm_host.configManager.datastoreSystem.RemoveDatastore(:datastore => nfsdatastore)\n end\n end", "def after_destroy\n bundle_dir = File.join( self.userfolder, self.bundle_src_folder )\n logger.info(\"==================> Remove all bundle files for: \" + bundle_dir )\n %x[rm -f -R #{bundle_dir}]\n \n # delete bundle image file name\n %x[rm #{fq_bundle_image_filename('*')}]\n \n #delete sample audio prompt\n %x[rm #{fq_bundle_sample_prompt}]\n end", "def cleanup\n tmpdir = File.join(OBS_BUILD_DIR,OBS_LOCAL_TMP)\n if File.exists?(tmpdir)\n FileUtils.rm_rf(tmpdir)\n end\n end", "def destroy(async=true)\n basedir = @config.get(\"GEAR_BASE_DIR\")\n\n path = File.join(basedir, \".httpd.d\", \"#{container_uuid}_*\")\n FileUtils.rm_rf(Dir.glob(path))\n\n reload_all(async)\n end", "def destroy(async=true)\n basedir = @config.get(\"GEAR_BASE_DIR\")\n\n path = File.join(basedir, \".httpd.d\", \"#{container_uuid}_*\")\n FileUtils.rm_rf(Dir.glob(path))\n\n reload_all(async)\n end", "def cleanup\n\tend", "def cleanup\n\tend", "def clean_remote!\n resp = @connection.get_bucket(\n @storage.bucket, prefix: File.dirname(@remote_path)\n )\n keys = resp.body['Contents'].map {|item| item['Key'] }\n\n @connection.delete_multiple_objects(@storage.bucket, keys) unless keys.empty?\n end", "def purge\n\n FileUtils.remove_dir(@basepath)\n end", "def cleanup_domain\n domain.reject!(&:blank?)\n end", "def cleanup\n\n # ----------------------------------------------\n account_name = 'your account name' # <-- change this!\n project_name = 'your project name' # <-- change this!\n # ----------------------------------------------\n\n project_dir = \"/home/#{account_name}/www\"\n Dir.chdir(project_dir)\n\n Dir.entries(project_name).select do |entry1|\n\n dir1 = File.join(project_name,entry1) #dir2 = \"#{project_name}/#{entry1}\"\n if is_directory?(dir1)\n Dir.entries(dir1).select do |entry2|\n \n dir2 = File.join(dir1,entry2) #dir2 = \"#{project_name}/#{entry1}/#{entry2}\"\n if is_directory?(dir2)\n Dir.entries(dir2).select do |entry3|\n \n dir3 = File.join(dir2,entry3) #dir3 = \"#{project_name}/#{entry1}/#{entry2}/#{entry3}\"\n if is_directory?(dir3)\n Dir.entries(dir3).select do |entry4|\n delete_file(File.join(dir3,entry4))\n end\n end\n\n delete_file(dir3)\n delete_dir(dir3)\n end\n end\n\n delete_file(dir2)\n delete_dir(dir2)\n end\n end\n\n delete_file(dir1)\n delete_dir(dir1)\n end\n\n delete_dir(project_name)\nend", "def cleanup\n\t\tcleanup_ssh\n\n\tend", "def cleanup\n docker.stop_container\n docker.delete_container\n end", "def cleanup_dirs\n @cleanup_dirs ||= ['.']\n end", "def clean_up_spanner_objects\n puts \"Cleaning up instances and databases after spanner tests.\"\n $spanner.instances.all.select { |i| i.instance_id.start_with? $spanner_prefix }.each do |instance|\n instance.databases.all.each &:drop\n instance.delete\n end\nrescue => e\n puts \"Error while cleaning up instances and databases after spanner tests.\\n\\n#{e}\"\nend", "def cleanup\n self.objectives.destroy_all\n end", "def cleanup\n begin\n File.unlink(File.join(Restore::Config.socket_dir, 'restore_backend.sock'))\n rescue => e\n BackgrounDRb::ServerLogger.log_exception('server', e)\n end\n end", "def cleanup\n winrm.run_cmd( \"del #{base64_file_name} /F /Q\" )\n winrm.run_cmd( \"del #{command_file_name} /F /Q\" )\n end", "def rm_deposit_bag_safely_for_ceph\n if deposit_bag_pathname.exist?\n deposit_bag_pathname.rmtree\n else\n Honeybadger.notify(\"Deposit bag was missing. This is unusual; it's likely that the workflow step ran once before, and \" \\\n \"failed on the network call to preservation_catalog. Please confirm that #{druid} passes checksum \" \\\n 'validation in preservation_catalog, and that its preserved version matches the Cocina in dor-services-app.')\n end\n\n stat_moab_dir_contents\n end", "def purge\n purge_file\n cdb_destroy\n end", "def cleanup_derivatives\n deleted_files = []\n image_derivatives = resource.file_metadata.select { |file| (file.derivative? || file.thumbnail_file?) && file.mime_type.include?(image_mime_type) }\n image_derivatives.each do |file|\n storage_adapter.delete(id: file.id)\n deleted_files << file.id\n end\n cleanup_derivative_metadata(derivatives: deleted_files)\n end", "def delete_libvirt_pool\n run_sequence([\n 'sudo virsh pool-destroy default',\n 'sudo virsh pool-delete default',\n 'sudo virsh pool-undefine default'\n ], until_first_error: false)\n end", "def cleanup(opts = {})\n Console.remove(ctrc.ct)\n zfs(:destroy, '-r', ctrc.dataset, valid_rcs: [1]) if opts[:dataset]\n\n syscmd(\"rm -rf #{ctrc.lxc_dir} #{ctrc.ct.user_hook_script_dir}\")\n File.unlink(ctrc.log_path) if File.exist?(ctrc.log_path)\n File.unlink(ctrc.config_path) if File.exist?(ctrc.config_path)\n\n DB::Containers.remove(ctrc.ct)\n\n begin\n if ctrc.group.has_containers?(ctrc.user)\n CGroup.rmpath_all(ctrc.ct.base_cgroup_path)\n\n else\n CGroup.rmpath_all(ctrc.ct.group.full_cgroup_path(ctrc.user))\n end\n rescue SystemCallError\n # If some of the cgroups are busy, just leave them be\n end\n\n bashrc = File.join(ctrc.lxc_dir, '.bashrc')\n File.unlink(bashrc) if File.exist?(bashrc)\n\n grp_dir = ctrc.group.userdir(ctrc.user)\n\n if !ctrc.group.has_containers?(ctrc.user) && Dir.exist?(grp_dir)\n Dir.rmdir(grp_dir)\n end\n end", "def detachvolume\n false\n end", "def cleanup\n return unless @dst\n\n @dst.unlink\n @dst = nil\n end", "def destroy\n vm.destroy\n FileUtils.rm_rf dir\n end", "def clean!\n stop\n remove_instance_dir!\n FileUtils.remove_entry(config.download_path) if File.exists?(config.download_path)\n FileUtils.remove_entry(config.tmp_save_dir, true) if File.exists? config.tmp_save_dir\n md5.clean!\n FileUtils.remove_entry(config.version_file) if File.exists? config.version_file\n end", "def cleanup(tmp_path, meeting_id)\n # Delete temporary files\n FileUtils.rm_rf(tmp_path)\n\n # Delete all raw recording data\n # TODO: Find a way to outsource this into a script that runs after all post_archive scripts have run successfully\n system('sudo', 'bbb-record', '--delete', \"#{meeting_id}\") || raise('Failed to delete local recording')\nend", "def delete_collection\n FileUtils.rm_r @src_path\n FileUtils.rm_r @store_path if store_exist?\n end", "def drop_infrastructure\n drop_event_log if event_log_exists?\n drop_sync_state if sync_state_exists?\n drop_change_logs\n drop_activity_markers\n end", "def cleanup_cached_images()\n\n # swap_dir = \"../public/swap\" # use when running locally from /lib/b2_bucket.rb\n swap_dir = \"./public/swap\" # use when running via app.rb\n swap_contents = \"#{swap_dir}/*\"\n gitkeep = \"#{swap_dir}/.gitkeep\"\n\n if File.directory?(swap_dir)\n FileUtils.rm_rf(Dir.glob(swap_contents)) # delete contents of /public/swap \n file = File.new(gitkeep, 'w') # recreate .gitkeep file\n file.close if file\n else\n puts \"Directory does not exist!\"\n end\n\nend", "def pkg_clean\n sysprint \"#{@name} clean\"\n\n FileUtils::rm_rf(@objdir, :secure => true)\n end", "def cleanTmp\n ts_str = \"/tmp/d\" + Date.today.strftime(\"%Y%m%d\") + \"-*\"\n Gitchefsync.logger.info \"clean up of #{ts_str}\"\n FS.cmdNoError \"sudo rm -fr #{ts_str}\"\n end", "def finalize(*args)\n FFI::Libvirt.virStoragePoolFree(self)\n end", "def clean_directory\n Dir.foreach(@server_dir) do |file|\n fn = File.join(@server_dir, file)\n File.delete(fn) if fn[-1] != '.'\n end\n end", "def purge\n\n\t\tend", "def destroy_and_undefine\n # Shamb0_TODO_20200609=>POC/WT-bringup\n # old_net = @virt.lookup_network_by_name(@net_name)\n # old_net.destroy if old_net.active?\n # old_net.undefine\n rescue StandardError\n # Nothing to clean up\n end", "def purge_items\n purge(@nodename)\n end", "def clean!\n stop\n FileUtils.remove_entry(download_path) if File.exists? download_path\n FileUtils.remove_entry(tmp_save_dir, true) if File.exists? tmp_save_dir\n FileUtils.remove_entry(instance_dir, true) if File.exists? instance_dir\n FileUtils.remove_entry(md5sum_path) if File.exists? md5sum_path\n FileUtils.remove_entry(version_file) if File.exists? version_file\n end", "def clean!\n stop\n FileUtils.remove_entry(download_path) if File.exists? download_path\n FileUtils.remove_entry(tmp_save_dir, true) if File.exists? tmp_save_dir\n FileUtils.remove_entry(instance_dir, true) if File.exists? instance_dir\n FileUtils.remove_entry(md5sum_path) if File.exists? md5sum_path\n FileUtils.remove_entry(version_file) if File.exists? version_file\n end", "def erase_old_data()\n Djinn.log_run(\"rm -rf /tmp/h*\")\n Djinn.log_run(\"rm -f ~/.appscale_cookies\")\n Djinn.log_run(\"rm -f #{APPSCALE_HOME}/.appscale/status-*\")\n Djinn.log_run(\"rm -f #{APPSCALE_HOME}/.appscale/database_info\")\n\n Nginx.clear_sites_enabled\n HAProxy.clear_sites_enabled\n Djinn.log_run(\"echo '' > /root/.ssh/known_hosts\") # empty it out but leave the file there\n CronHelper.clear_crontab\n end", "def cleanup_files(resource)\n remove_public_dir(resource) # where the local manifest file is stored\n remove_s3_data_files(resource)\n rescue StandardError => e\n msg = \"An unexpected error occurred when cleaning up files for resource #{resource.id}: \"\n msg << e.full_message\n logger.warn(msg)\n end", "def cleanup\n true\n end", "def safe_destroy(name)\n # More conservative: Create a list of related resources to delete.\n # The downside is that if a root resource has already been deleted,\n # (like a DNS record) we won't find the formerly dependent records.\n\n flat_list = Lister.new(debug: @debug, availability_zone: @availability_zone)\n .list(name, true)\n\n flat_list[:groups].each do |group_name|\n delete_group_policy(group_name) rescue LOGGER.warn(\"Error deleting policy: #{$!} at #{$@}\")\n LOGGER.info(\"Deleted policy #{group_name}\")\n delete_group(group_name) rescue LOGGER.warn(\"Error deleting group: #{$!} at #{$@}\")\n LOGGER.info(\"Deleted group #{group_name}\")\n end\n flat_list.delete(:groups)\n\n flat_list[:key_names].each do |key_name|\n delete_key(key_name) rescue LOGGER.warn(\"Error deleting PK: #{$!} at #{$@}\")\n LOGGER.info(\"Deleted PK #{key_name}\")\n end\n flat_list.delete(:key_names)\n\n terminate_instances_by_id(flat_list[:instance_ids]) rescue LOGGER.warn(\"Error terminating EC2 instances: #{$!} at #{$@}\")\n LOGGER.info(\"Terminated EC2 instances #{flat_list[:instance_ids]}\")\n flat_list.delete(:instance_ids)\n flat_list.delete(:volume_ids) # Volumes are set to disappear with their instance.\n\n delete_dns_cname_records(dns_zone(name), flat_list[:cnames]) rescue LOGGER.warn(\"Error deleting CNAMEs: #{$!} at #{$@}\")\n LOGGER.info(\"Deleted CNAMEs #{flat_list[:cnames]}\")\n flat_list.delete(:cnames)\n\n flat_list.keys.tap do |forgot|\n fail(\"Still need to clean up #{forgot}\") unless forgot.empty?\n end\n end", "def cleanup\n data_provider = Workflow::Invocation.lookup_data_provider(self.class.data_provider_name)\n channels = data_provider.data_provider_channels.all(:order => 'name')\n channels.each do |channel|\n dir = source_dir_for_channel(channel)\n # XXX ugly hack to get cleanup functional for initial release.\n # What should happen is a separate workflow should be instantiated\n # for every channel, like all other workflows work.\n @params[:channel] = channel\n cleanup_dir(dir, params)\n end\n end", "def cleanup!\n # This method may be left unimplemented if that is applicable\n end", "def destroy!\n orchio_purge\n end", "def cleanup_files\n FileUtils.rm_rf(File.join(Rails.root, \"tmp\"))\n end", "def delete\n fast_storage.delete\n warehouse_storage.delete\n end" ]
[ "0.71809286", "0.68798983", "0.68682027", "0.6783374", "0.6367989", "0.6347211", "0.63415235", "0.62592435", "0.62468827", "0.62252426", "0.62249434", "0.62249434", "0.62228924", "0.62121224", "0.62121224", "0.62065923", "0.62065923", "0.62065923", "0.62065923", "0.618122", "0.6172718", "0.6172718", "0.6172718", "0.6172718", "0.61650145", "0.6148953", "0.6148953", "0.6148953", "0.612664", "0.6116372", "0.61151874", "0.6113922", "0.611338", "0.61053395", "0.61053395", "0.60900325", "0.6081739", "0.60589314", "0.6049937", "0.6028246", "0.60062623", "0.59915173", "0.59807265", "0.5976157", "0.59740084", "0.5968323", "0.5968263", "0.596642", "0.596642", "0.5944319", "0.5936876", "0.58975214", "0.5894929", "0.5878772", "0.5875065", "0.5875065", "0.58626235", "0.58626235", "0.5847863", "0.584724", "0.58442706", "0.5835072", "0.58294797", "0.58180076", "0.580087", "0.57902133", "0.5782741", "0.5782406", "0.57816845", "0.5770441", "0.57697767", "0.57579786", "0.57490945", "0.57182884", "0.5716635", "0.5713083", "0.5707587", "0.5700568", "0.5694143", "0.56861126", "0.5684854", "0.56829995", "0.56778485", "0.5668017", "0.5665511", "0.56584287", "0.56561834", "0.56526905", "0.5644569", "0.56319904", "0.56319904", "0.5624363", "0.5624316", "0.56237453", "0.56180423", "0.5608269", "0.56045884", "0.55994105", "0.55990356", "0.5598772" ]
0.866983
0
Create an array of all virtual disks for the domain
def domain_volumes # Use the domain name as our volume base name. base_name = domain_name if not config[:image_name] config[:image_name] = default_image end # Clone our root volume from our base image. root_volume = clone_volume(config[:image_name], "#{base_name}-root") # Return the array of created volumes [root_volume].concat( create_volumes( extra_volumes(base_name) ) ) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def disks\n self.config.hardware.device.grep(RbVmomi::VIM::VirtualDisk)\n end", "def list_virtual_machine_disks\n request_path = '/services/disks'\n request = ManagementHttpRequest.new(:get, request_path, nil)\n response = request.call\n Serialization.disks_from_xml(response)\n end", "def ocs_storages_disk_only\n disks = []\n for o in ocs_storages\n if o.TYPE == 'disk'\n disks << o\n end\n end\n disks\n end", "def managed_disks\n @managed_disks ||= collect_inventory(:managed_disks) { @sds.list_all }\n end", "def create_iscsi_disks(vbox, name)\n unless controller_exists(name, 'SATA Controller')\n vbox.customize ['storagectl', :id,\n '--name', 'SATA Controller',\n '--add', 'sata']\n end\n\n dir = \"#{ENV['HOME']}/VirtualBox\\ VMs/vdisks\"\n Dir.mkdir dir unless File.directory?(dir)\n\n osts = (1..20).map { |x| [\"OST#{x}\", '5120'] }\n\n [\n %w[mgt 512],\n %w[mdt0 5120]\n ].concat(osts).each_with_index do |(name, size), i|\n file_to_disk = \"#{dir}/#{name}.vdi\"\n port = (i + 1).to_s\n\n unless File.exist?(file_to_disk)\n vbox.customize ['createmedium',\n 'disk',\n '--filename',\n file_to_disk,\n '--size',\n size,\n '--format',\n 'VDI',\n '--variant',\n 'fixed']\n end\n\n vbox.customize ['storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', port,\n '--type', 'hdd',\n '--medium', file_to_disk,\n '--device', '0']\n\n vbox.customize ['setextradata', :id,\n \"VBoxInternal/Devices/ahci/0/Config/Port#{port}/SerialNumber\",\n name.ljust(20, '0')]\n end\nend", "def get_disks(vm_id)\n end", "def create_iscsi_disks(vbox)\n unless controller_exists(ISCSI_NAME, 'SATA Controller')\n vbox.customize ['storagectl', :id,\n '--name', 'SATA Controller',\n '--add', 'sata']\n end\n\n (1..10).each do |i|\n id = i.to_s.rjust(2, '0')\n disk = \"./tmp/disk#{i}.vdi\"\n\n unless File.exist?(disk)\n vbox.customize ['createmedium', 'disk',\n '--filename', disk,\n '--size', '100',\n '--format', 'VDI',\n '--variant', 'fixed']\n end\n\n vbox.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', i,\n '--type', 'hdd',\n '--medium', disk\n ]\n vbox.customize [\n 'setextradata', :id,\n \"VBoxInternal/Devices/ahci/0/Config/Port#{i}/SerialNumber\",\n \"081118FC1221NCJ6G8#{id}\"\n ]\n end\nend", "def disk_specs(path)\n specs = []\n unit = 0\n if resource[:virtual_disks]\n resource[:virtual_disks].each do |vd|\n size = vd[\"size\"].to_i * 1024 * 1024\n specs << disk_spec(path, size, unit)\n unit += 1\n end\n else\n specs << disk_spec(path, resource[:disk_size], unit)\n end\n\n specs\n end", "def virtual_disk_views\n client.enumerate(\"http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/root/DCIM/DCIM_VirtualDiskView\")\n end", "def info_disks\n @disks = {}\n\n keys = disk_keys\n vc_disks = vcenter_disks_get\n one_disks = one_disks_list\n\n one_disks.each do |one_disk|\n index = one_disk['DISK_ID']\n\n disk = query_disk(one_disk, keys, vc_disks)\n\n vc_dev = vc_disks.delete(disk) if disk\n\n if vc_dev\n @disks[index] = Disk.new(index.to_i, one_disk, vc_dev)\n else\n @disks[index] = Disk.one_disk(index.to_i, one_disk)\n end\n end\n\n vc_disks.each {|d| @disks[d[:path_wo_ds]] = Disk.vc_disk(d) }\n\n @disks\n end", "def disks\n return @disks unless @disks.empty?\n\n info_disks\n end", "def one_disks_list\n one_item.info if one_item.instance_of?(OpenNebula::VirtualMachine)\n one_item.retrieve_xmlelements('TEMPLATE/DISK')\n end", "def discover_new_disks\n logger.debug 'Discovering new virtual disks.'\n\n StorageNode.list_disks().each do |volume|\n disk = VirtualDisk.find_by_disk_number(volume)\n next if disk\n\n disk_size = StorageNode.volume_size(volume)\n logger.info 'autodiscovered virtual disk %d with size %d Mb' % [volume, disk_size]\n disk = VirtualDisk.new(volume, disk_size)\n disk.save('discovered')\n end\n end", "def get_virtualization_virtual_disk_list(opts = {})\n data, _status_code, _headers = get_virtualization_virtual_disk_list_with_http_info(opts)\n data\n end", "def configure_disks(vb, server, hostname, name)\n vminfo = vm_info(name)\n disks = server['disks'] || {}\n unless vminfo =~ /Storage Controller Name \\(1\\): *SATA Controller/\n # puts \"Attaching SATA Controller\"\n vb.customize [\n 'storagectl', :id,\n '--name', 'SATA Controller',\n '--add', 'sata',\n '--portcount', disks.size\n ]\n # else\n # puts 'SATA Controller already attached'\n end\n\n disks.each_with_index do |disk, i|\n disk_name = disk.first\n disk_size = disk.last['size']\n disk_uuid = disk.last['uuid']\n real_uuid = \"00000000-0000-0000-0000-#{disk_uuid.rjust(12,'0')}\"\n if server['cluster']\n disk_filename = File.join(VAGRANT_ROOT, \"#{disk_name}_#{server['cluster']}.vdi\")\n else\n disk_filename = File.join(VAGRANT_ROOT, \"#{disk_name}.vdi\")\n end\n\n if File.file?(disk_filename)\n # puts \"Disk #{disk_filename} already created\"\n disk_hash = `VBoxManage showmediuminfo \"#{disk_filename}\"`.scan(/(.*): *(.*)/).to_h\n current_uuid = disk_hash['UUID']\n else\n # puts \"Creating disk #{disk_filename}\"\n current_uuid = '0'\n if server['cluster']\n vb.customize [\n 'createhd',\n '--filename', disk_filename,\n '--size', disk_size.to_s,\n '--variant', 'Fixed'\n ]\n vb.customize [\n 'modifyhd', disk_filename,\n '--type', 'shareable'\n ]\n else\n vb.customize [\n 'createhd',\n '--filename', disk_filename,\n '--size', disk_size.to_s,\n '--variant', 'Standard'\n ]\n end\n end\n\n # Conditional for adding disk_uuid\n if server['cluster'] && current_uuid == real_uuid\n # puts \"Attaching shareable disk #{disk_filename}\"\n vb.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', (i + 1).to_s,\n '--device', 0,\n '--type', 'hdd',\n '--medium', disk_filename,\n '--mtype', 'shareable'\n ]\n elsif server['cluster']\n # puts \"Attaching shareable disk #{disk_filename}, adding UUID #{real_uuid}\"\n vb.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', (i + 1).to_s,\n '--device', 0,\n '--type', 'hdd',\n '--medium', disk_filename,\n '--mtype', 'shareable',\n '--setuuid', real_uuid\n ]\n elsif current_uuid == real_uuid\n # puts \"Attaching normal disk #{disk_filename}\"\n vb.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', (i + 1).to_s,\n '--device', 0,\n '--type', 'hdd',\n '--medium', disk_filename\n ]\n else\n # puts \"Attaching normal disk #{disk_filename}, adding UUID #{real_uuid}\"\n vb.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', (i + 1).to_s,\n '--device', 0,\n '--type', 'hdd',\n '--medium', disk_filename,\n '--setuuid', real_uuid\n ]\n end\n end\nend", "def get_disks(vm_id)\n with_thread_name(\"get_disks(#{vm_id})\") do\n begin\n @logger.debug(\"Requesting disks attached to VM #{vm_id}...\")\n vm = @vm_manager.get_virtual_machine(vm_id, true)\n disks = []\n vm['vm_disk_info'].each do |disk|\n unless disk['disk_address']['volume_group_uuid'].nil?\n disks << disk['disk_address']['volume_group_uuid']\n end\n end\n @logger.debug(\"VM Disks: #{disks}\")\n disks\n rescue => e\n @logger.error(e)\n cloud_error(e.message)\n end\n end\n end", "def keys\n @disk.keys\n end", "def disks\n \tbs = {}\n\tnoko_details.xpath('/domain/devices/disk').map do |ddsk|\n\t next if ddsk.xpath('@device').text != \"disk\"\n\t next if ddsk.xpath('@snapshot').text == \"external\"\n\t sf = ddsk.xpath('source/@dev','source/@file').text\n\t td = ddsk.xpath('target/@dev').text\n\t bs[td] = sf\n\tend\n\tbs\n end", "def detach_disks_specs\n detach_disk_array = []\n extra_config = []\n keys = disk_keys.invert\n\n ipool = VCenterDriver::VIHelper.one_pool(OpenNebula::ImagePool)\n disks_each(:detached?) do |d|\n key = d.key.to_s\n source = VCenterDriver::FileHelper.escape_path(d.path)\n persistent =\n VCenterDriver::VIHelper\n .find_persistent_image_by_source(\n source, ipool\n )\n\n op = { :operation => :remove, :device => d.device }\n if !persistent && d.type != 'CDROM'\n op[:fileOperation] = :destroy\n end\n detach_disk_array << op\n\n # Remove reference opennebula.disk if exist from vmx and cache\n extra_config << d.config(:delete) if keys[key]\n end\n\n [detach_disk_array, extra_config]\n end", "def get_disks(vm_cid)\n @telemetry_manager.monitor('initialize') do\n _init_azure\n end\n with_thread_name(\"get_disks(#{vm_cid})\") do\n @telemetry_manager.monitor('get_disks', id: vm_cid) do\n disks = []\n vm = @vm_manager.find(InstanceId.parse(vm_cid, _azure_config.resource_group_name))\n raise Bosh::Clouds::VMNotFound, \"VM '#{vm_cid}' cannot be found\" if vm.nil?\n\n vm[:data_disks].each do |disk|\n disks << disk[:disk_bosh_id] unless is_ephemeral_disk?(disk[:name]) # disk_bosh_id is same to disk_cid\n end\n disks\n end\n end\n end", "def get_virtualization_vmware_virtual_disk_list(opts = {})\n data, _status_code, _headers = get_virtualization_vmware_virtual_disk_list_with_http_info(opts)\n data\n end", "def attach_disks_specs\n attach_disk_array = []\n extraconfig = []\n attach_spod_array = []\n attach_spod_disk_info = {}\n\n pos = { :ide => 0, :scsi => 0 }\n disks_each(:no_exists?) do |disk|\n disk.one_item['TYPE'] == 'CDROM' ? k = :ide : k = :scsi\n\n if disk.storpod?\n spec = calculate_add_disk_spec(disk.one_item, pos[k])\n attach_spod_array << spec\n\n controller_key = spec[:device].controllerKey\n unit_number = spec[:device].unitNumber\n\n unit_ctrl = \"#{controller_key}-#{unit_number}\"\n attach_spod_disk_info[unit_ctrl] = disk.id\n else\n aspec = calculate_add_disk_spec(disk.one_item, pos[k])\n extra_key = \"opennebula.mdisk.#{disk.one_item['DISK_ID']}\"\n extra_value = aspec[:device].key.to_s\n\n attach_disk_array << aspec\n extraconfig << { :key => extra_key, :value => extra_value }\n end\n\n pos[k]+=1\n end\n\n { :disks => attach_disk_array,\n :spods => attach_spod_array,\n :spod_info => attach_spod_disk_info,\n :extraconfig => extraconfig }\n end", "def __nrpe_alldisks\n\t\t\t\tdata = call_nrpe \"check_alldisks\"\n\t\t\t\treturn false if data == false\n\t\t\t\tdisks = data.chomp.split(\"|\")[1].split.map! {|x| x = x.split(\"=\")[0] }\n\t\t\t\treturn disks\n\t\tend", "def list_partitions #by nelsongs\n\treturn `fdisk -l | grep /dev | grep -v Disk | awk '{print $1}'`.chomp.split\nend", "def attach_volumes(node, disk_sizes)\n if $provider == :virtualbox\n node.vm.provider :virtualbox do |v, override|\n disk_num = 0\n disk_sizes.each do |disk_size|\n disk_num += 1\n diskname = File.join(File.dirname(File.expand_path(__FILE__)), \".virtualbox\", \"#{node.vm.hostname}-#{disk_num}.vdi\")\n unless File.exist?(diskname)\n v.customize ['createhd', '--filename', diskname, '--size', disk_size * 1024]\n end\n v.customize ['storageattach', :id, '--storagectl', 'SATA Controller', '--port', disk_num, '--device', 0, '--type', 'hdd', '--medium', diskname]\n end\n end\n end\n\n if $provider == :vmware_fusion\n node.vm.provider :vmware_fusion do |v, override|\n vdiskmanager = '/Applications/VMware\\ Fusion.app/Contents/Library/vmware-vdiskmanager'\n unless File.exist?(vdiskmanager)\n dir = File.join(File.dirname(File.expand_path(__FILE__)), \".vmware\")\n unless File.directory?( dir )\n Dir.mkdir dir\n end\n\n disk_num = 0\n disk_sizes.each do |disk_size|\n disk_num += 1\n diskname = File.join(dir, \"#{node.vm.hostname}-#{disk_num}.vmdk\")\n unless File.exist?(diskname)\n `#{vdiskmanager} -c -s #{disk_size}GB -a lsilogic -t 1 #{diskname}`\n end\n\n v.vmx[\"scsi0:#{disk_num}.filename\"] = diskname\n v.vmx[\"scsi0:#{disk_num}.present\"] = 'TRUE'\n v.vmx[\"scsi0:#{disk_num}.redo\"] = ''\n end\n end\n end\n end\n\n if $provider == :parallels\n node.vm.provider :parallels do |v, override|\n disk_sizes.each do |disk_size|\n v.customize ['set', :id, '--device-add', 'hdd', '--size', disk_size * 1024]\n end\n end\n end\n\nend", "def physical_disk_views\n client.enumerate(\"http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/root/DCIM/DCIM_PhysicalDiskView\")\n end", "def storage_list(mixins = nil)\n storages = Occi::Core::Resources.new\n\n Backends::Ec2::Helpers::AwsConnectHelper.rescue_aws_service(@logger) do\n volumes = @ec2_client.describe_volumes.volumes\n volumes.each do |volume|\n next unless volume\n storages << storage_parse_backend_obj(volume)\n end if volumes\n end\n\n storages\n end", "def get_volumes\n volumes = get(\"cloud-instances/#{guid}/volumes\")[\"volumes\"] || []\n\n volumes.map do |volume|\n get_volume(volume[\"volumeID\"])\n end\n end", "def volumes\n @volumes.to_a.freeze\n end", "def get_virtualization_vmware_virtual_disk_list_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: VirtualizationApi.get_virtualization_vmware_virtual_disk_list ...'\n end\n allowable_values = [\"allpages\", \"none\"]\n if @api_client.config.client_side_validation && opts[:'inlinecount'] && !allowable_values.include?(opts[:'inlinecount'])\n fail ArgumentError, \"invalid value for \\\"inlinecount\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/api/v1/virtualization/VmwareVirtualDisks'\n\n # query parameters\n query_params = opts[:query_params] || {}\n query_params[:'$filter'] = opts[:'filter'] if !opts[:'filter'].nil?\n query_params[:'$orderby'] = opts[:'orderby'] if !opts[:'orderby'].nil?\n query_params[:'$top'] = opts[:'top'] if !opts[:'top'].nil?\n query_params[:'$skip'] = opts[:'skip'] if !opts[:'skip'].nil?\n query_params[:'$select'] = opts[:'select'] if !opts[:'select'].nil?\n query_params[:'$expand'] = opts[:'expand'] if !opts[:'expand'].nil?\n query_params[:'$apply'] = opts[:'apply'] if !opts[:'apply'].nil?\n query_params[:'$count'] = opts[:'count'] if !opts[:'count'].nil?\n query_params[:'$inlinecount'] = opts[:'inlinecount'] if !opts[:'inlinecount'].nil?\n query_params[:'at'] = opts[:'at'] if !opts[:'at'].nil?\n query_params[:'tags'] = opts[:'tags'] if !opts[:'tags'].nil?\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json', 'text/csv', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'VirtualizationVmwareVirtualDiskResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['cookieAuth', 'http_signature', 'oAuth2', 'oAuth2']\n\n new_options = opts.merge(\n :operation => :\"VirtualizationApi.get_virtualization_vmware_virtual_disk_list\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VirtualizationApi#get_virtualization_vmware_virtual_disk_list\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def volumes\n Collection::StorageVolumeCollection.new(self)\n end", "def disks_each(condition)\n res = []\n disks.each do |_id, disk|\n next unless disk.method(condition).call\n\n yield disk if block_given?\n\n res << disk\n end\n\n res\n end", "def disk_safes\n if UltraVault.config.api_version > 1\n @disk_safes ||= UltraVault::DiskSafe.all.map { |disk_safe| disk_safe if disk_safe.agent_id == id }.compact\n else\n @disk_safes ||= UltraVault::DiskSafe.find_all_by_agent_id(id)\n end\n end", "def retrieve_volumes\n dbg { \"retrieving #{pool_info}, #{hv_info}\" }\n\n volumes = pool.list_all_volumes\n dbg { \"list_all_volumes #{pool_info}, #{hv_info}\" }\n\n storage_volumes = volumes.map.with_index do |vol, index|\n id = \"#{uuid}--#{index}\"\n StorageVolume.new(vol, pool: self, id: id)\n end\n\n dbg { \"retrieved size=#{storage_volumes.size}, #{pool_info}, #{hv_info}\" }\n storage_volumes\n end", "def all_disks\n DB[\"select distinct d.path, d.id from dirs d\n join files f on d.id = f.dir_id\n where f.is_music = 't'\"]\nend", "def get_virtualization_virtual_disk_list_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: VirtualizationApi.get_virtualization_virtual_disk_list ...'\n end\n allowable_values = [\"allpages\", \"none\"]\n if @api_client.config.client_side_validation && opts[:'inlinecount'] && !allowable_values.include?(opts[:'inlinecount'])\n fail ArgumentError, \"invalid value for \\\"inlinecount\\\", must be one of #{allowable_values}\"\n end\n # resource path\n local_var_path = '/api/v1/virtualization/VirtualDisks'\n\n # query parameters\n query_params = opts[:query_params] || {}\n query_params[:'$filter'] = opts[:'filter'] if !opts[:'filter'].nil?\n query_params[:'$orderby'] = opts[:'orderby'] if !opts[:'orderby'].nil?\n query_params[:'$top'] = opts[:'top'] if !opts[:'top'].nil?\n query_params[:'$skip'] = opts[:'skip'] if !opts[:'skip'].nil?\n query_params[:'$select'] = opts[:'select'] if !opts[:'select'].nil?\n query_params[:'$expand'] = opts[:'expand'] if !opts[:'expand'].nil?\n query_params[:'$apply'] = opts[:'apply'] if !opts[:'apply'].nil?\n query_params[:'$count'] = opts[:'count'] if !opts[:'count'].nil?\n query_params[:'$inlinecount'] = opts[:'inlinecount'] if !opts[:'inlinecount'].nil?\n query_params[:'at'] = opts[:'at'] if !opts[:'at'].nil?\n query_params[:'tags'] = opts[:'tags'] if !opts[:'tags'].nil?\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json', 'text/csv', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'VirtualizationVirtualDiskResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['cookieAuth', 'http_signature', 'oAuth2', 'oAuth2']\n\n new_options = opts.merge(\n :operation => :\"VirtualizationApi.get_virtualization_virtual_disk_list\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VirtualizationApi#get_virtualization_virtual_disk_list\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def add_instance_disk(array, instance, disk)\n if instance.managed_disk?\n disk_type = 'managed'\n disk_location = disk.managed_disk.id\n managed_disk = @managed_disks.find { |d| d.id.casecmp(disk_location).zero? }\n\n if managed_disk\n disk_size = managed_disk.properties.disk_size_gb.gigabytes\n mode = managed_disk.try(:sku).try(:name)\n else\n _log.warn(\"Unable to find disk information for #{instance.name}/#{instance.resource_group}\")\n disk_size = nil\n mode = nil\n end\n else\n disk_type = 'unmanaged'\n disk_location = disk.try(:vhd).try(:uri)\n disk_size = disk.try(:disk_size_gb).try(:gigabytes)\n\n if disk_location\n uri = Addressable::URI.parse(disk_location)\n storage_name = uri.host.split('.').first\n container_name = File.dirname(uri.path)\n blob_name = uri.basename\n\n storage_acct = @storage_accounts.find { |s| s.name.casecmp(storage_name).zero? }\n mode = storage_acct.try(:sku).try(:name)\n\n if @options.get_unmanaged_disk_space && disk_size.nil?\n storage_keys = @sas.list_account_keys(storage_acct.name, storage_acct.resource_group)\n storage_key = storage_keys['key1'] || storage_keys['key2']\n blob_props = storage_acct.blob_properties(container_name, blob_name, storage_key)\n disk_size = blob_props.content_length.to_i\n end\n end\n end\n\n disk_record = {\n :device_type => 'disk',\n :controller_type => 'azure',\n :device_name => disk.name,\n :location => disk_location,\n :size => disk_size,\n :disk_type => disk_type,\n :mode => mode\n }\n\n array << disk_record\n end", "def get_disks(token, disks_url)\n disk_ids = Array.new \n endpoint = disks_url.scan(/\\/organizations.*disks/)\n resp_hash = get_url(token, endpoint[0], {})\n resp_hash[:embedded][:disks].each do |disk|\n disk_ids.push extract_id(disk[:_links][:self][:href], 'disks')\n end\n return [disk_ids, resp_hash]\nend", "def reference_disks(template_ref, disks, managed)\n return [] if disks.empty? || instantiated_as_persistent?\n\n extraconfig = []\n if managed\n key_prefix = 'opennebula.mdisk'\n else\n key_prefix = 'opennebula.disk'\n end\n\n # Get vcenter VM disks to know real path of cloned disk\n vcenter_disks = vcenter_disks_get\n\n # Create an array with the paths of the disks in vcenter template\n if !template_ref.nil?\n template = VCenterDriver::Template.new_from_ref(template_ref,\n vi_client)\n template_disks = template.vcenter_disks_get\n else\n # If we are dealing with a Wild VM, we simply use\n # what is available in the vCenter VM\n template_disks = vcenter_disks_get\n end\n template_disks_vector = []\n template_disks.each do |d|\n template_disks_vector << d[:path_wo_ds]\n end\n\n # Try to find index of disks in template disks\n disks.each do |disk|\n disk_source =\n VCenterDriver::FileHelper\n .unescape_path(\n disk['SOURCE']\n )\n template_disk = template_disks.select do |d|\n d[:path_wo_ds] == disk_source\n end.first\n\n if template_disk\n vcenter_disk = vcenter_disks.select do |d|\n d[:key] == template_disk[:key]\n end.first\n end\n\n unless vcenter_disk\n raise \"disk with path #{disk_source}\"\\\n 'not found in the vCenter VM'\n end\n\n reference = {}\n reference[:key] = \"#{key_prefix}.#{disk['DISK_ID']}\"\n reference[:value] = (vcenter_disk[:key]).to_s\n extraconfig << reference\n end\n\n extraconfig\n end", "def list_volumes\n response = @connection.req(\"GET\", \"/#{@volume_path}\")\n volumes_hash = JSON.parse(response.body)[\"volumes\"]\n volumes_hash.inject([]){|res, current| res << OpenStack::Volume::Volume.new(current); res}\n end", "def merge_vm_disks(host, global, vb, controller)\n # Only get the first entry from global['vm_groups'] if more than one entry present\n vb_dir=global['vm_basedir'] ? global['vm_basedir'] + global['vm_groups'].partition(',')[0] + \"/\" + host['vm_name'] + \"/\" : \"./.virtualbox/\"\n if global['vm_disks'] or host['vm_disks']\n merge_hash = merge_2_array_of_hashes(global['vm_disks'], host['vm_disks'])\n merge_hash.each do |key, value|\n diskname=\"#{vb_dir}#{host['vm_name']}-#{key}.vdi\"\n unless File.exist?(diskname)\n vb.customize [\"createmedium\", \"disk\", \"--filename\", diskname, \"--size\", value * 1024 , \"--format\", \"vdi\", \"--variant\", \"Standard\"]\n end\n vb.customize [\"storageattach\", :id , \"--storagectl\", controller, \"--port\", key, \"--device\", \"0\", \"--type\", \"hdd\", \"--medium\", diskname]\n end\n end\nend", "def list request_pb, options:, &block\n uri = \"/compute/v1/projects/#{request_pb.project}/zones/#{request_pb.zone}/disks\"\n\n query_string_params = {}\n query_string_params[\"filter\"] = request_pb.filter.to_s if request_pb.filter && request_pb.filter != \"\"\n query_string_params[\"maxResults\"] = request_pb.max_results.to_s if request_pb.max_results && request_pb.max_results != 0\n query_string_params[\"orderBy\"] = request_pb.order_by.to_s if request_pb.order_by && request_pb.order_by != \"\"\n query_string_params[\"pageToken\"] = request_pb.page_token.to_s if request_pb.page_token && request_pb.page_token != \"\"\n query_string_params[\"returnPartialSuccess\"] = request_pb.return_partial_success.to_s if request_pb.return_partial_success && request_pb.return_partial_success != false\n\n response = @client_stub.make_get_request(\n uri: uri,\n params: query_string_params,\n options: options,\n )\n\n result = ::Google::Cloud::Compute::V1::DiskList.decode_json response.body, ignore_unknown_fields: true\n\n yield result, response if block_given?\n\n result\n end", "def create_delta_disks\n begin\n disks =\n @item['config.hardware.device']\n .grep(RbVmomi::VIM::VirtualDisk)\n disk_without_snapshots = disks.select do |x|\n x.backing.parent.nil?\n end\n rescue StandardError\n error = 'Cannot extract existing disks on template.'\n use_linked_clones = false\n return error, use_linked_clones\n end\n\n if !disk_without_snapshots.empty?\n\n begin\n if self['config.template']\n @item.MarkAsVirtualMachine(\n :pool => resource_pool,\n :host => self['runtime.host']\n )\n end\n rescue StandardError => e\n @item.MarkAsTemplate()\n error = 'Cannot mark the template as a VirtualMachine. '\\\n 'Not using linked clones. '\\\n \"Reason: #{e.message}/#{e.backtrace}\"\n use_linked_clones = false\n return error, use_linked_clones\n end\n\n begin\n spec = {}\n spec[:deviceChange] = []\n\n disk_without_snapshots.each do |disk|\n remove_disk_spec =\n {\n :operation => :remove,\n :device => disk\n }\n spec[:deviceChange] << remove_disk_spec\n\n add_disk_spec =\n {\n :operation => :add,\n :fileOperation => :create,\n :device => disk.dup.tap do |x|\n x.backing =\n x.backing.dup\n x.backing.fileName =\n \"[#{disk.backing.datastore.name}]\"\n x.backing.parent =\n disk.backing\n end\n }\n spec[:deviceChange] << add_disk_spec\n end\n\n @item\n .ReconfigVM_Task(\n :spec => spec\n ).wait_for_completion unless spec[:deviceChange].empty?\n rescue StandardError => e\n error = 'Cannot create the delta disks on top '\\\n \"of the template. Reason: #{e.message}.\"\n\n if VCenterDriver::CONFIG[:debug_information]\n error += \"\\n\\n#{e.backtrace}\"\n end\n\n use_linked_clones = false\n return error, use_linked_clones\n end\n\n begin\n @item.MarkAsTemplate()\n rescue StandardError => e\n error = 'Cannot mark the VirtualMachine as '\\\n 'a template. Not using linked clones.' \\\n \" Reason: #{e.message}.\"\n\n if VCenterDriver::CONFIG[:debug_information]\n error += \"\\n\\n#{e.backtrace}\"\n end\n\n use_linked_clones = false\n return error, use_linked_clones\n end\n end\n\n error = nil\n use_linked_clones = true\n\n [error, use_linked_clones]\n end", "def getVMs\n @vms = VirtualMachine.all(@ip_address)\n end", "def volumes\n service.list_pool_volumes uuid\n end", "def collectVMs(rf, db)\n\trf.childEntity.grep(RbVmomi::VIM::Datacenter).each do |dc|\n\t\tprogressbar = ProgressBar.create(:title => \"VMs\", :format => '%t |%b>>%i| %p%% %a')\n\t\tprogressbar.total = counter(dc, \"v\")\n\t\tdc.vmFolder.childEntity.each do |folder|\n\t\t\tfolder.childEntity.each do |vmlist|\n\t\t\t\tnext if vmlist.class.to_s == \"Folder\"\n\t\t\t\tdb.select(3)\n\t\t\t\tdb.hset(\"#{vmlist.name}\", \"Status\", \"#{vmlist.summary.overallStatus}\")\n\t\t\t\tdb.hset(\"#{vmlist.name}\", \"Uptime\", \"#{vmlist.summary.quickStats.uptimeSeconds}\")\n\t\t\t\tdb.hset(\"#{vmlist.name}\", \"CPUusage\", \"#{vmlist.summary.quickStats.overallCpuUsage}\")\n\t\t\t\tdb.hset(\"#{vmlist.name}\", \"CPUnum\", \"#{vmlist.summary.config.numCpu}\")\n\t\t\t\tdb.hset(\"#{vmlist.name}\", \"MemUsage\", \"#{vmlist.summary.quickStats.guestMemoryUsage}\")\n\t\t\t\tdb.hset(\"#{vmlist.name}\", \"MemTotal\", \"#{vmlist.summary.config.memorySizeMB}\")\n\t\t\t\tprogressbar.increment\n\t\t\tend\n\t\tend\n\tend\nend", "def disk_all(cfg)\n ignored = cfg['ignore_fs'] || 'tmpfs'\n ignore_fs = \"fstype!~\\\"#{ignored}\\\"\"\n query = @client.percent_query_free(\n \"node_filesystem_files{#{ignore_fs}}\",\n \"node_filesystem_files_free{#{ignore_fs}}\"\n )\n prepare_metrics('disk_all', @client.query(query))\n end", "def add_physical_disks\n add_collection(physical_infra, :physical_disks) do |builder|\n builder.add_properties(\n :manager_ref => %i(physical_storage ems_ref),\n :manager_ref_allowed_nil => %i(ems_ref)\n )\n end\n end", "def list_kvms\n info = Hash[\n *Dir.glob(File.join(KVM_HOME, 'storage', '*.qcow2')).map{|dir|\n key = File.basename(dir).sub('.qcow2', '')\n [key, {:address => kvm_ip(key), :type => kvm_type(key)}]\n }.sort{|a,b|\n a.first <=> b.first\n }.flatten\n ]\n info.each do |name, info|\n puts \"#{name}\"\n puts \" Type: #{info[:type]}\"\n puts \" Address: #{info[:address]}\"\n end\nend", "def disk_space()\n\n instructions = 'df -h'\n r = @ssh ? @ssh.exec!(instructions) : `#{instructions}`\n\n @results[:disk_usage] = {}\n\n a = r.lines.grep(/\\/dev\\/root/)\n\n puts ('a: ' + a.inspect).debug if @debug\n\n if a.any? then\n size, used, avail = a[0].split(/ +/).values_at(1,2,3)\n\n @results[:disk_usage][:root] = {size: size, used: used, \n avail: avail}\n end\n\n a2 = r.lines.grep(/\\/dev\\/sda1/)\n\n puts ('a2: ' + a2.inspect).debug if @debug\n\n if a2.any? then\n size, used, avail = a2[0].split(/ +/).values_at(1,2,3)\n\n @results[:disk_usage][:sda1] = {size: size, used: used, \n avail: avail}\n end\n\n end", "def add_disk(server, size)\n host = server.to_s\n\n # Increment disk id\n if !DISKS.key?(host) then\n DISKS[host] = 0\n else\n DISKS[host] += 1\n end\n disk_id = DISKS[host]\n disk_filename = \".vagrant/disks/\" + host + \"_\" + disk_id.to_s + \".vdi\"\n\n server.vm.provider \"virtualbox\" do |v|\n # Create disk if it not exist\n unless File.exist?(disk)\n v.customize [\"createhd\", \"--filename\", disk_filename, \"--size\", size * 1024 * 1024]\n end\n v.customize ['storageattach', :id, '--storagectl', 'SATA Controller', '--port', disk_id, '--device', 0, '--type', 'hdd', '--medium', disk]\n end\nend", "def list_all_volumes\n Docker::Volume.all({}, client)\n rescue\n []\n end", "def gen_disks_line(n)\n node_disks = []\n if n.disks.length > 0 then\n node_disks = n.node_disks.sort.map{|x| x.xen_name }\n else\n if n.only_supports_ide?\n node_disks << \"file:/xen/domains/#{n.hostname}/disk.img,hda1,w\"\n else\n node_disks << \"file:/xen/domains/#{n.hostname}/disk.img,sda1,w\"\n end\n end\n node_disks.map { |x| \" '#{x}'\" }.join(\",\\n\")\nend", "def storages\n IbmCloudRest.get \"#{@uri}/storage\"\n end", "def existing_kvms\n Dir.glob(File.join(KVM_HOME, 'storage', '*.qcow2')).map do |file|\n File.basename(file).sub('.qcow2', '')\n end\nend", "def getstorages(rhevmhost,rhevmuser,rhevmpass)\n require 'rubygems'\n require 'rest_client'\n require 'nokogiri'\n\n storages = Array.new{Array.new}\n\n resource = RestClient::Resource.new(rhevmhost, :user => rhevmuser, :password => rhevmpass, :verify_ssl => OpenSSL::SSL::VERIFY_NONE)\n storage_data = Nokogiri::XML(resource[\"/api/storagedomains/\"].get.body)\n storage_data.xpath(\"//storage_domain\").each do |storage_domain|\n if storage_domain.xpath('type').text == \"data\"\n name=storage_domain.xpath('name').text\n $evm.log(\"info\", \"Name: #{name}\")\n storages.push [name, name]\n end\n end\n\n return storages\nend", "def addDisk(backingFile, sizeInMB, label = nil, summary = nil, options = {})\n # Remove nil keys if any, since the next line may not work\n options.reject! { |_k, v| v.nil? }\n # Merge default values:\n # - persistent is set to true to be backward compatible\n # - thin_provisioned is set to false explicitly since we call to_s on it further, so nil will not work for us\n options = {:persistent => true, :thin_provisioned => false}.merge(options)\n ck, un = available_scsi_units.first\n raise \"addDisk: no SCSI controller found\" unless ck\n\n vmConfigSpec = VimHash.new(\"VirtualMachineConfigSpec\") do |vmcs|\n vmcs.deviceChange = VimArray.new(\"ArrayOfVirtualDeviceConfigSpec\") do |vmcs_vca|\n vmcs_vca << VimHash.new(\"VirtualDeviceConfigSpec\") do |vdcs|\n vdcs.operation = VirtualDeviceConfigSpecOperation::Add\n if sizeInMB < 0\n sizeInMB = -sizeInMB\n else\n vdcs.fileOperation = VirtualDeviceConfigSpecFileOperation::Create\n end\n vdcs.device = VimHash.new(\"VirtualDisk\") do |vDev|\n vDev.key = -100 # temp key for creation\n vDev.capacityInKB = sizeInMB * 1024\n vDev.controllerKey = ck\n vDev.unitNumber = un\n # The following doesn't seem to work.\n vDev.deviceInfo = VimHash.new(\"Description\") do |desc|\n desc.label = label\n desc.summary = summary\n end if label || summary\n vDev.connectable = VimHash.new(\"VirtualDeviceConnectInfo\") do |con|\n con.allowGuestControl = \"false\"\n con.startConnected = \"true\"\n con.connected = \"true\"\n end\n if options[:dependent]\n mode = (options[:persistent] ? VirtualDiskMode::Persistent : VirtualDiskMode::Nonpersistent)\n else\n mode = (options[:persistent] ? VirtualDiskMode::Independent_persistent : VirtualDiskMode::Independent_nonpersistent)\n end\n vDev.backing = VimHash.new(\"VirtualDiskFlatVer2BackingInfo\") do |bck|\n bck.diskMode = mode\n bck.split = \"false\"\n bck.thinProvisioned = options[:thin_provisioned].to_s\n bck.writeThrough = \"false\"\n bck.fileName = backingFile\n begin\n dsn = @invObj.path2dsName(@dsPath)\n bck.datastore = @invObj.dsName2mo_local(dsn)\n rescue\n bck.datastore = nil\n end\n end\n end\n end\n end\n end\n\n logger.info \"MiqVimVm(#{@invObj.server}, #{@invObj.username}).addDisk: calling reconfigVM_Task\"\n taskMor = @invObj.reconfigVM_Task(@vmMor, vmConfigSpec)\n logger.info \"MiqVimVm(#{@invObj.server}, #{@invObj.username}).addDisk: returned from reconfigVM_Task\"\n waitForTask(taskMor)\n end", "def collectVMs(rf, db)\n\trf.childEntity.grep(RbVmomi::VIM::Datacenter).each do |dc|\n\t\tprogressbar = ProgressBar.create(:title => \"#{$single_folder}\", :format => '%t |%b>>%i| %p%% %a')\n\t\tif dc.vmFolder.childEntity.find { |x| x.name == \"#{$single_folder}\" }\n\t\t\tfolder = dc.vmFolder.childEntity.find { |x| x.name == \"#{$single_folder}\" }\n\t\t\tprogressbar.total = folder.childEntity.length\n\t\t\tfolder.childEntity.each do |vmlist|\n\t\t\t\tdb.select(3)\n\t\t\t\tdb.hset(\"#{vmlist.name}\", \"Status\", \"#{vmlist.summary.overallStatus}\")\n\t\t\t\tdb.hset(\"#{vmlist.name}\", \"Uptime\", \"#{vmlist.summary.quickStats.uptimeSeconds}\")\n\t\t\t\tdb.hset(\"#{vmlist.name}\", \"CPUusage\", \"#{vmlist.summary.quickStats.overallCpuUsage}\")\n\t\t\t\tdb.hset(\"#{vmlist.name}\", \"CPUnum\", \"#{vmlist.summary.config.numCpu}\")\n\t\t\t\tdb.hset(\"#{vmlist.name}\", \"MemUsage\", \"#{vmlist.summary.quickStats.guestMemoryUsage}\")\n\t\t\t\tdb.hset(\"#{vmlist.name}\", \"MemTotal\", \"#{vmlist.summary.config.memorySizeMB}\")\n\t\t\t\tprogressbar.increment\n\t\t\tend\n\t\tend\n\tend\nend", "def all_devices search = nil\n partitions = []\n devices = []\n device = nil\n has_extended = false\n if DEBUG_MODE or Platform.ubuntu? or Platform.fedora?\n command = \"lsblk\"\n params = \" #{search} -b -P -o VENDOR,MODEL,TYPE,SIZE,KNAME,UUID,LABEL,MOUNTPOINT,FSTYPE,RM\"\n end\n lsblk = CommandsExecutor.new command, params\n lsblk.execute\n raise \"Command execution error: #{lsblk.stderr.read}\" if not lsblk.success?\n\n lsblk.result.each_line do |line|\n data_hash = {}\n line.squish!\n line_data = line.gsub!(/\"(.*?)\"/, '\\1#').split \"#\"\n line_data.each do |data|\n data.strip!\n key, value = data.split \"=\"\n data_hash[key.downcase] = value\n end\n data_hash['rm'] = data_hash['rm'].to_i # rm = 1 if device is a removable/flash device, otherwise 0\n if data_hash['type'] == 'mpath'\n data_hash.except!('uuid', 'label', 'mountpoint', 'fstype')\n if device\n multipath_info = {'mkname' => data_hash['kname'], 'multipath' => true, 'size' => data_hash['size']}\n device.merge! multipath_info\n else\n data_hash['multipath'] = true\n device = data_hash\n devices.push device\n end\n next\n end\n if data_hash['type'] == 'disk'\n data_hash.except!('uuid', 'label', 'mountpoint', 'fstype')\n unless device.nil?\n device['partitions'] = partitions\n partitions = []\n devices.push device\n device = nil # cleanup the variable\n end\n device = data_hash\n next\n end\n if data_hash['type'] == 'part'\n data_hash.except!('model', 'vendor')\n data_hash.merge! self.usage data_hash['kname']\n\n partition_number = get_partition_number \"/dev/#{data_hash['kname']}\" # For reference: data_hash['kname'].match(/[0-9]*$/)[0].to_i\n extended_partition_types = ['0x05'.hex, '0x0F'.hex]\n if partition_type_hex(data_hash['kname']).in? extended_partition_types\n has_extended = true\n next\n end\n if has_extended and partition_number > 4\n data_hash['logical'] = true\n end\n # device['partitions'].nil? ? device['partitions'] = [data_hash] : device['partitions'].push(data_hash)\n partitions.push(data_hash)\n end\n end\n device['partitions'] = partitions if device\n devices.push device\n if search\n return devices.first || partitions.first\n else\n return devices\n end\n end", "def get_sas_logical_jbod_drives(client, name)\n item = get_sas_logical_jbod(client, name)\n response = client.rest_get(item['uri'] + '/drives')\n client.response_handler(response)\n end", "def create_gdom_disk(options)\n client_disk = options['q_struct']['gdom_disk'].value\n disk_size = options['q_struct']['gdom_size'].value\n disk_size = disk_size.downcase\n vds_disk = options['name']+\"_vdisk0\"\n if not client_disk.match(/\\/dev/)\n if not File.exist?(client_disk)\n message = \"Information:\\tCreating guest domain disk \"+client_disk+\" for client \"+options['name']\n command = \"mkfile -n #{disk_size} #{client_disk}\"\n output = execute_command(options,message,command)\n end\n end\n message = \"Information:\\tChecking Virtual Disk Server device doesn't already exist\"\n command = \"ldm list-services |grep 'primary-vds0' |grep '#{vds_disk}'\"\n output = execute_command(options,message,command)\n if not output.match(/#{options['name']}/)\n message = \"Information:\\tAdding disk device to Virtual Disk Server\"\n command = \"ldm add-vdsdev #{client_disk} #{vds_disk}@primary-vds0\"\n output = execute_command(options,message,command)\n end\n return\nend", "def list\n cf_get(path: \"#{uri_prefix}/virtual_dns\")\n end", "def list_virtual_machine_images(imageType=:all)\n images = Array.new\n if imageType == :public or imageType == :all\n public_images = list_public_virtual_machine_images\n images.concat public_images\n end\n\n if imageType == :private or imageType == :all\n private_images = list_private_virtual_machine_images\n images.concat private_images\n end\n\n images\n end", "def new_disk_ide\n Libvirt::Spec::Device.get(:disk).new.tap do |disk|\n disk.type = :file\n disk.device = :disk\n disk.target_dev = :hda\n disk.target_bus = :ide\n end\n end", "def detach_persistent_disks(vm)\n spec_hash = {}\n spec_hash[:deviceChange] = []\n ipool = VCenterDriver::VIHelper.one_pool(OpenNebula::ImagePool)\n if ipool.respond_to?(:message)\n raise \"Could not get OpenNebula ImagePool: #{ipool.message}\"\n end\n\n vm.config.hardware.device.each do |disk|\n next unless disk_or_cdrom?(disk)\n\n # Let's try to find if disks is persistent\n source_unescaped = disk.backing.fileName.sub(\n /^\\[(.*?)\\] /, ''\n ) rescue next\n source = VCenterDriver::FileHelper.escape_path(source_unescaped)\n\n persistent = VCenterDriver::VIHelper\n .find_persistent_image_by_source(\n source, ipool\n )\n\n next unless persistent\n\n spec_hash[:deviceChange] << {\n :operation => :remove,\n :device => disk\n }\n end\n\n return if spec_hash[:deviceChange].empty?\n\n begin\n vm.ReconfigVM_Task(:spec => spec_hash).wait_for_completion\n rescue StandardError => e\n error = \"Cannot detach all DISKs from VM: #{e.message}.\"\n\n if VCenterDriver::CONFIG[:debug_information]\n error += \"\\n\\n#{e.backtrace}\"\n end\n\n raise error\n end\n end", "def os_disk(account=nil)\n\n if account.nil?\n\n template = parent.nil? ? nil : parent.template\n return nil if template.nil?\n\n found = template.find_resources(Azure::ARM::Storage::StorageAccount)\n\n if found.nil? or found.length == 0\n account = template.storage_account do\n account_type Azure::ARM::Storage::StorageAccount::Standard_LRS\n end\n elsif found.length == 1\n account = found[0]\n else\n fail ArgumentError, 'cannot determine which storage account to use of the VM disks'\n end\n\n elsif account.is_a? String\n\n template = parent.nil? ? nil : parent.template\n return nil if template.nil?\n\n found = template.find_resource(Azure::ARM::Storage::StorageAccount, account)\n\n if found.nil?\n account = template.storage_accounts account do\n account_type Azure::ARM::Storage::StorageAccount::Standard_LRS\n end\n else\n account = found\n end\n\n end\n\n parent.add_dependency account\n\n name = parent.generate_name(parent.name.to_s + 'dsk')\n\n if parent.copy\n disk_uri = concat('http://', account.name, '.blob.core.windows.net/disks/', name, copyIndex(), '.vhd')\n else\n disk_uri = concat('http://', account.name, '.blob.core.windows.net/disks/', name, '.vhd')\n end\n\n props = { name: name,\n caching: 'ReadWrite',\n create_option: 'FromImage',\n vhd: { uri: disk_uri } }\n\n if parent.properties.nil? or parent.properties.storage_profile.nil?\n storage_profile os_disk: props\n else\n parent.properties.storage_profile.os_disk = OsDisk.new parent.properties.storage_profile, props\n end\n\n end", "def objects\n # array to hold objects being backed up\n objects = []\n result = veeamconfig('job', 'info', '--name', \"#{@resource[:name]}\").lines\n\n # loop through every line of output\n result.each do |line|\n # the Include Disk lines are what we need\n if line.include? 'Include Disk:'\n # tease out the disk/volume being backed up\n object = line.split(': ')[1].strip\n # append the disk/volume to the array\n objects << object\n end\n end\n\n # return the disks/volumes being backed up, sorted properly\n return objects.sort_by(&:downcase)\n end", "def vms(folder)\n children = []\n folder.childEntity.each do |x|\n name, junk = x.to_s.split('(')\n case name\n when \"Folder\"\n children.push(vms(x))\n when \"VirtualMachine\"\n children.push(x)\n end\n end\n\n children\nend", "def list_nix_partitions # nelsongs\n\treturn `fdisk -l | grep /dev | grep -v Disk | awk '{if ($2==\"*\" && $6==\"83\") print $1;else {if ($5==\"83\") print $1}}'`.chomp.split\nend", "def getmountedvolumes\n # only support Linux for now\n return {} unless Facter['kernel'] && Facter['kernel'].value == 'Linux'\n\n dir = \"/etc\"\n mounted = {}\n\n # AUTOFS - gather only files named auto[._]*\n Dir.glob(File.join(dir, \"*\")).each do |file|\n next if file !~ /^auto[._].*/\n\n # AUTOFS - match only lines that look like nfs syntax such as host:/path\n IO.foreach(file) do |line|\n if line =~ /\\w:\\S/ && line !~ /^\\s*#/\n # Parse it, Example : \" nventory_backup -noatime,intr irvnetappbk:/vol/nventory_backup \"\n if line =~ /^(\\w[\\w\\S]+)\\s+\\S+\\s+(\\w[\\w\\S]+):(\\S+)/\n mnt = $1\n host = $2\n vol = $3\n mounted[\"volumes[mounted][/mnt/#{mnt}][config]\"] = file\n mounted[\"volumes[mounted][/mnt/#{mnt}][volume_server]\"] = host\n mounted[\"volumes[mounted][/mnt/#{mnt}][volume]\"] = vol\n mounted[\"volumes[mounted][/mnt/#{mnt}][type]\"] = 'nfs'\n end\n end\n end # IO.foreach\n end # Dir.glob\n\n # FSTAB - has diff syntax than AUTOFS. Example: \"server:/usr/local/pub /pub nfs rsize=8192,wsize=8192,timeo=14,intr\"\n IO.foreach(\"/etc/fstab\") do |line|\n if line =~ /^(\\w[\\w\\S]+):(\\S+)\\s+(\\S+)\\s+nfs/\n host = $1\n vol = $2\n mnt = $3\n mounted[\"volumes[mounted][#{mnt}][config]\"] = \"/etc/fstab\"\n mounted[\"volumes[mounted][#{mnt}][volume_server]\"] = host\n mounted[\"volumes[mounted][#{mnt}][volume]\"] = vol\n mounted[\"volumes[mounted][#{mnt}][type]\"] = 'nfs'\n end\n end # IO.foreach\n return mounted\n end", "def merge_vm_shared_disks(host, global, vb, controller)\n vb_dir=global['vm_basedir'] ? global['vm_basedir'] + global['vm_groups'].partition(',')[0] + \"/\" : \"./.virtualbox/\"\n if global['vm_shared_disks'] or host['vm_shared_disks']\n merge_hash = merge_2_array_of_hashes(global['vm_shared_disks'], host['vm_shared_disks'])\n merge_hash.each do |key, value|\n diskname=\"#{vb_dir}shared-#{key}.vdi\"\n unless File.exist?(diskname)\n vb.customize [\"createmedium\", \"disk\", \"--filename\", diskname, \"--size\", value * 1024 , \"--format\", \"vdi\", \"--variant\", \"Fixed\"]\n end\n vb.customize [\"storageattach\", :id , \"--storagectl\", controller, \"--port\", key, \"--device\", \"0\", \"--type\", \"hdd\", \"--medium\", diskname, \"--mtype\", \"shareable\"]\n end\n end\nend", "def disk_attached_to_vm(disk, unmanaged_keys, vc_disks)\n img_name = ''\n device_found = nil\n disk_id = disk['DISK_ID']\n unmanaged_key = unmanaged_keys[\"opennebula.disk.#{disk_id}\"]\n\n img_name_escaped = VCenterDriver::FileHelper.get_img_name(\n disk,\n one_item['ID'],\n self['name'],\n instantiated_as_persistent?\n )\n\n img_name = VCenterDriver::FileHelper.unescape_path(img_name_escaped)\n\n vc_disks.each do |d|\n key_matches = (unmanaged_key && d[:key] == unmanaged_key.to_i)\n path_matches = (d[:path_wo_ds] == img_name)\n\n if key_matches || path_matches\n device_found = d\n break\n end\n end\n\n device_found\n end", "def volumes_list(state = \"active\", limit = \"1000\")\n case state\n when \"active\"\n api_call = {\n :method => \"ListActiveVolumes\",\n :params => {\n :startVolumeID => 0,\n :limit => limit\n }\n }\n when \"deleted\"\n api_call = {\n :method => \"ListDeletedVolumes\",\n :params => {}\n }\n end\n answer = query_sf(api_call)\n return answer[\"volumes\"]\n end", "def init_ebs_volumes\n @@client.describe_volumes.volumes\n end", "def hyperv\n has_grow, grow_by = SubutaiDisk.has_grow\n file_disk = SubutaiDisk.file_path(grow_by, \"hyper_v\")\n disk_path = Pathname.new file_disk\n\n unless disk_path.exist?\n Put.warn SubutaiDisk.message(grow_by)\n\n if has_grow\n if SubutaiDisk.hyperv_create_disk(grow_by, disk_path.to_s)\n SubutaiDisk.save_path(SubutaiDisk.port, disk_path.to_s)\n SubutaiDisk.save_conf(grow_by)\n end\n end\n else\n Put.error \"Disk file already exist in #{file_disk}\"\n end\n end", "def volumes_for_account(accountid)\n api_call = {\n :method => \"ListVolumesForAccount\",\n :params => {\n :accountID => accountid\n }\n }\n answer = query_sf(api_call)\n return answer\n end", "def add_vm_disks(vm_service, disk_specs)\n storage_spec = disk_specs[:storage]\n default_disk_spec = disk_specs[:default] || {}\n attachments_service = vm_service.disk_attachments_service\n disk_specs[:disks].each do |disk_spec|\n attachment = prepare_vm_disk_attachment(default_disk_spec.merge(disk_spec), storage_spec)\n attachments_service.add(attachment)\n end\n end", "def get_local_storage\n storage = {:hostname => Socket.gethostname, :mounts => [], :vgs => []}\n\n LVM::LVM.new({:command => \"/usr/bin/sudo /sbin/lvm\"}) do |lvm|\n lvm.volume_groups.each do |vg|\n vg.logical_volumes.each do |lv|\n mnt = get_mount(lv.name)\n fs = {:mount => mnt.mount_point, :fs => mnt.name, :lv => lv.name, :vg => vg.name}\n storage[:mounts] << fs\n end\n\n volg = {:vg => vg.name, :pvs => []}\n vg.physical_volumes.each do |pv|\n volg[:pvs] << {:pv => pv.name}\n end\n storage[:vgs] << volg\n end\n end\n storage\n end", "def list_partitions_with_size_and_type # by nelsongs. => list: partition size type\n\treturn `fdisk -l | grep /dev | grep -v Disk | awk '{if ($2==\"*\") print $1\":\"$5\":\"$6;else print $1\":\"$4\":\"$5}' | sed s/+//g`.split\nend", "def all_storages\n hosts = all_hosts\n MiqPreloader.preload(hosts, :storages)\n hosts.collect(&:storages).flatten.compact.uniq\n end", "def list_public_virtual_machine_images\n request_path = '/services/images'\n request = ManagementHttpRequest.new(:get, request_path, nil)\n response = request.call\n Serialization.virtual_machine_images_from_xml(response)\n end", "def index\n @virtual_domains = VirtualDomain.all\n end", "def get_os_disk(vm)\n uri = Addressable::URI.parse(vm.properties.storage_profile.os_disk.vhd.uri)\n\n # The uri looks like https://foo123.blob.core.windows.net/vhds/something123.vhd\n disk = File.basename(uri.to_s) # disk name, e.g. 'something123.vhd'\n path = File.dirname(uri.path)[1..-1] # container, e.g. 'vhds'\n\n acct = get_from_vm(vm)\n keys = list_account_keys(acct.name, acct.resource_group)\n key = keys['key1'] || keys['key2']\n\n acct.blob_properties(path, disk, key)\n end", "def create_vm_specs(vm_id, cpu, ram, description, cd_rom_disk,\n system_disk, network, ephemeral_disk_size = nil)\n vm_create_specs = {\n name: \"bosh-#{vm_id}\", uuid: vm_id, memory_mb: ram, num_vcpus: cpu,\n description: \"DO NOT DELETE #{description.to_json}\",\n vm_disks: [ # CD-ROM\n { is_cdrom: true, is_empty: false,\n disk_address: { device_bus: 'ide' },\n vm_disk_clone: { disk_address: { vmdisk_uuid: cd_rom_disk } } },\n # System/Boot Disk\n { is_cdrom: false, is_empty: false,\n disk_address: { device_bus: 'scsi' },\n vm_disk_clone: { disk_address: { vmdisk_uuid: system_disk } } }\n ],\n vm_nics: []\n }\n unless ephemeral_disk_size.nil?\n vm_create_specs[:vm_disks] << {\n is_cdrom: false, is_empty: false,\n disk_address: { device_bus: 'scsi' },\n vm_disk_create: {\n storage_container_uuid: @container_uuid,\n size: (ephemeral_disk_size / 1000) * (1024 * 1024 * 1024)\n }\n }\n end\n network.each { |n| vm_create_specs[:vm_nics] << n }\n vm_create_specs\n end", "def vm_disk_management(options = {})\n Azure::VirtualMachineImageManagement::VirtualMachineDiskManagementService.new(default_client(options))\n end", "def list_nix_partitions_with_size_and_type # nelsongs\n\t`fdisk -l | grep /dev | grep -v Disk | awk '{if ($2==\"*\" && ($6==\"83\" || $6==\"82\")) print $1\":\"$5\":\"$6;else {if ($5==\"83\" || $5==\"82\") print $1\":\"$4\":\"$5}}' | sed s/+//g`.split\nend", "def volumes\n volumes = @ec2.volumes\n volumes.delete_if {|v| v.instance_id != id}\n end", "def GetPartitionList\n deep_copy(@partition_info)\n end", "def list_ks_vbox_vms()\n search_string = \"rhel|centos|ubuntu\"\n list_vbox_vms(search_string)\n return\nend", "def vm_instances\n @conn.vminstances\n end", "def extra_volumes(base_name)\n configs = []\n\n config[:extra_volumes].each_with_index do |data, index|\n disk_id = (index + 1).to_s.rjust(2, '0')\n data[:name] = \"#{base_name}-extra-#{disk_id}\"\n configs << data\n end\n\n configs\n end", "def find_vmx_files(ds, output_directory) # rubocop:disable Metrics/MethodLength\n datastore_path = \"[#{ds.name}] /#{output_directory}\"\n puts datastore_path\n search_spec = {\n details: { fileOwner: false, fileSize: false, fileType: true,\n modification: false },\n query: [\n RbVmomi::VIM::VmConfigFileQuery(), RbVmomi::VIM::TemplateConfigFileQuery()\n ]\n }\n task = ds.browser.SearchDatastoreSubFolders_Task(datastorePath: datastore_path,\n searchSpec: search_spec)\n\n results = task.wait_for_completion\n\n files = []\n results.each do |result|\n result.file.each do |file|\n files << \"#{result.folderPath}/#{file.path}\"\n end\n end\n\n files\nend", "def get_disk_util\n used_info = query_data(params[:host], 'df.1kblocks.used', params[:from_ts], params[:to_ts])\n total_info = query_data(params[:host], 'df.1kblocks.total', params[:from_ts], params[:to_ts])\n\n used_info.delete_if { |k, v|\n k.index(\"fstype=tmpfs\")\n }\n total_info.delete_if { |k, v|\n k.index(\"fstype=tmpfs\")\n }\n \n results = {}\n used_info.each { |k, v|\n matcher = /mount=([\\/\\w]+) /.match(k)\n if matcher\n path = matcher[1]\n\n if total_info.has_key?(k)\n total_v = total_info[k]\n total_v_map = Hash[total_v]\n results[path] = v.collect { |point|\n ts = point[0]\n if (total_v_map.has_key?(ts))\n [ts, format(\"%.2f\", point[1] * 100.0 / total_v_map[ts]).to_f]\n end\n }.keep_if { |v| v }\n end\n end\n }\n\n render json: results\n end", "def drives()\n return MicrosoftGraph::Drives::DrivesRequestBuilder.new(@path_parameters, @request_adapter)\n end", "def autodiscover_devices\n logger.debug 'Discovering information about storage subsystem (HDD and MD devices)'\n @storage_information = HddAutodiscover.new(STORAGE_CONFIG[:volume_name])\n collected = @storage_information.collect()\n\n assert [:storage, :free_space, collected[:lvm][:free]]\n collected[:hdd].each do |hdd|\n assert [:hdd, hdd.device, :sn, hdd.sn]\n assert [:hdd, hdd.device, :temperature, hdd.temperature]\n assert [:hdd, hdd.device, :health, hdd.health]\n end\n end", "def disk\n @disk ||= begin\n dInfo = OpenStruct.new\n dInfo.lvObj = self\n dInfo.hardwareId = \"\"\n MiqDisk.new(Lvm2DiskIO, dInfo, 0)\n end\n end", "def related_storage_volumes\n related_volumes.find_all do |volume|\n volume.provider_name == \"compellent\" || volume.provider_name == \"vnx\"\n end\n end", "def mount_ephemerals(attrs={})\n owner.volume(:ephemeral0, attrs){ device '/dev/sdb'; volume_id 'ephemeral0' ; mount_point '/mnt' ; tags( :bulk => true, :local => true, :fallback => true) } if flavor_info[:ephemeral_volumes] > 0\n owner.volume(:ephemeral1, attrs){ device '/dev/sdc'; volume_id 'ephemeral1' ; mount_point '/mnt2'; tags( :bulk => true, :local => true, :fallback => true) } if flavor_info[:ephemeral_volumes] > 1\n owner.volume(:ephemeral2, attrs){ device '/dev/sdd'; volume_id 'ephemeral2' ; mount_point '/mnt3'; tags( :bulk => true, :local => true, :fallback => true) } if flavor_info[:ephemeral_volumes] > 2\n owner.volume(:ephemeral3, attrs){ device '/dev/sde'; volume_id 'ephemeral3' ; mount_point '/mnt4'; tags( :bulk => true, :local => true, :fallback => true) } if flavor_info[:ephemeral_volumes] > 3\n end", "def all\n data = []\n if @ec2_main.settings.openstack \n conn = @ec2_main.environment.connection\n if conn != nil\n begin \n x = conn.flavors.all\n x.each do |y|\n vcpu = nil\n begin \n vcpu = y.vcpus\n rescue\n vcpu = nil \n end\n if vcpu != nil \n data.push(\"#{y.id} (#{y.name} Mem: #{y.ram}MB Disk: #{y.disk}GB VCPU: #{y.vcpus}VCPUs)\")\n else\n data.push(\"#{y.id} (#{y.name} Mem: #{y.ram}MB Disk: #{y.disk}GB)\") \n end\n end\n rescue\n puts \"ERROR: getting all flavors #{$!}\"\n end\n else \n raise \"Connection Error\" \n end \n elsif @ec2_main.settings.google \n conn = @ec2_main.environment.connection\n if conn != nil\n begin \n response = conn.list_machine_types($google_zone)\n\t\t\t if response.status == 200\n\t x = response.body['items']\n\t x.each do |r|\n\t\t\t\t data.push(\"#{r['name']} ( Mem: #{r['memoryMb']}MB Disks: #{r['maximumPersistentDisks']} Disk Size: #{r['maximumPersistentDisksSizeGb']}GB CPUs: #{r['guestCpus']})\")\n \t end\n\t else\n\t \t data = []\n end\n rescue\n puts \"ERROR: getting all flavors #{$!}\"\n end\n else \n raise \"Connection Error\" \n end \t\t\n\t else \n data.push('t1.micro (EBS only Micro 32 or 64-bit, 613 MB, up to 2 compute unit)') \n data.push('m1.small (Small 32 or 64-bit, 1.7 GB, 1 compute unit)')\n data.push('m1.medium (Medium 32 or 64-bit, 3.75 GB, 2 compute unit)')\n data.push('m1.large (Large 64-bit, 7.5 GB, 4 compute unit)')\n data.push('m1.xlarge (Extra Large 64-bit, 15 GB, 8 compute unit)')\n data.push('m3.xlarge (EBS Only Extra Large 64-bit, 15 GB, 13 compute unit)')\n data.push('m3.2xlarge (EBS Only Extra Double Large 64-bit, 30 GB, 26 compute unit)')\n data.push('m2.xlarge (High Memory Extra Large 64-bit, 17.1 GB, 6.5 compute unit)')\n data.push('m2.2xlarge (High Memory Double Extra Large 64-bit, 34.2 GB, 13 compute unit)')\n data.push('m2.4xlarge (High Memory Quadruple Large 64-bit, 68.4 GB, 26 compute unit)')\n data.push('c1.medium (Compute optimized CPU Medium 32 or 64-bit, 1.7 GB, 5 compute unit)')\n data.push('c1.xlarge (Compute optimized CPU Extra Large 64-bit, 7 GB, 20 compute unit)')\n data.push('c3.xlarge (Compute optimized Extra Large 64-bit, 3.75 GB, 7 compute unit)')\n data.push('c3.2xlarge (Compute optimized Double Extra Large 64-bit, 7 GB, 14 compute unit)')\n data.push('c3.4xlarge (Compute optimized Quadruple Large 64-bit, 15 GB, 28 compute unit)')\t\n data.push('c3.8xlarge (Compute optimized Eight Large 64-bit, 30 GB, 55 compute unit)')\n data.push('i2.xlarge\t\t (High I/O 1x800 GB SSD, 30.5 GB, 14 compute unit)')\n data.push('i2.2xlarge\t\t (High I/O 2x800 GB SSD, 61 GB, 27 compute unit)')\n data.push('i2.4xlarge\t\t (High I/O 4x800 GB SSD, 122 GB, 53 compute unit)')\n data.push('i2.8xlarge\t \t (High I/O 8x800 GB SSD, 244 GB, 104 compute unit)')\t\t \n data.push('cc1.4xlarge (Cluster Compute Quadruple Extra Large 64-bit, 23 GB, 33.5 compute unit. 10GBit network)')\n data.push('cc2.8xlarge (Cluster Compute Eight Extra Large 64-bit, 60.5 GB, 88 compute unit. 10GBit network)')\n\t\t data.push('g2.2xlarge (Cluster GPU Quadruple Extra Large 64-bit, 15 GB, 26compute unit.)') \n data.push('cg1.4xlarge (Cluster GPU Quadruple Extra Large 64-bit, 22 GB, 33.5 compute unit. 10GBit network)') \n data.push('hi1.4xlarge (High I/O Quadruple Extra Large 64-bit, 60.5 GB, 2x1024GB SSD, 35 compute unit. 10GBit network)')\n\t\t data.push('hs1.8xlarge (High I/O Quadruple Extra Large 64-bit, 117 GB, 24x2048GB SSD, 35 compute unit. 10GBit network)')\n \t\t\n end \n return data\n end", "def possible_dirs\n @storage_paths\n end" ]
[ "0.73276126", "0.6938894", "0.676588", "0.6646116", "0.6485004", "0.6422962", "0.64167136", "0.63778716", "0.6317316", "0.63155466", "0.63019836", "0.6266429", "0.62215626", "0.6162207", "0.60976696", "0.60670096", "0.60431045", "0.6036013", "0.60063803", "0.596952", "0.5935589", "0.5899904", "0.58745533", "0.58433926", "0.5826005", "0.5817577", "0.5815207", "0.58061993", "0.5780187", "0.57589227", "0.5754515", "0.57285315", "0.5698283", "0.5688724", "0.5675091", "0.5670472", "0.5669566", "0.56544495", "0.56410927", "0.55941534", "0.5570844", "0.55664", "0.55394316", "0.5507002", "0.550549", "0.5504625", "0.549941", "0.5488693", "0.54657125", "0.54598767", "0.5436277", "0.5424042", "0.5414724", "0.5413819", "0.5408589", "0.5397615", "0.53929454", "0.5387904", "0.5379104", "0.53535914", "0.53435993", "0.53422165", "0.5303771", "0.53015167", "0.52991915", "0.52893895", "0.52647257", "0.5255604", "0.5254914", "0.52521294", "0.5249061", "0.5240648", "0.5232025", "0.5216537", "0.5209489", "0.52082187", "0.52077305", "0.5205459", "0.5184977", "0.51822865", "0.5160568", "0.5159519", "0.5145558", "0.51420134", "0.513751", "0.51371753", "0.5135649", "0.5130151", "0.51297957", "0.5119033", "0.5115963", "0.5115209", "0.51124024", "0.5102274", "0.5102234", "0.5100235", "0.50886893", "0.5086608", "0.5083223", "0.50804937" ]
0.67960477
2
Create a config structure for each additional volume.
def extra_volumes(base_name) configs = [] config[:extra_volumes].each_with_index do |data, index| disk_id = (index + 1).to_s.rjust(2, '0') data[:name] = "#{base_name}-extra-#{disk_id}" configs << data end configs end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_volume(volume_name, config:, **kwargs)\n object = { name: volume_name }.merge(config).merge(kwargs)\n log.info \"Creating volume: #{object}\"\n volumes << object\n end", "def create\n tmp = Puppet::FileSystem::Uniquefile.new('quobyte_volume_config')\n tmp.write(resource[:content])\n tmp.flush()\n\n qmgmt(['volume', 'config', 'import', [resource[:name]], tmp.path])\n end", "def create_additional_options\n type_dir = new_resource.send('config_dir_' + new_resource.type)\n new_resource.additional_options.each do |k, v|\n file \"#{new_resource.instance} :create #{type_dir})/#{k}\" do\n path \"#{type_dir}/#{k}\"\n content v\n end\n end\n end", "def volume(volume_name, attrs={}, &block)\n volumes[volume_name] ||= Ironfan::Volume.new(:parent => self, :name => volume_name)\n volumes[volume_name].configure(attrs, &block)\n volumes[volume_name]\n end", "def attach_volumes(node, disk_sizes)\n if $provider == :virtualbox\n node.vm.provider :virtualbox do |v, override|\n disk_num = 0\n disk_sizes.each do |disk_size|\n disk_num += 1\n diskname = File.join(File.dirname(File.expand_path(__FILE__)), \".virtualbox\", \"#{node.vm.hostname}-#{disk_num}.vdi\")\n unless File.exist?(diskname)\n v.customize ['createhd', '--filename', diskname, '--size', disk_size * 1024]\n end\n v.customize ['storageattach', :id, '--storagectl', 'SATA Controller', '--port', disk_num, '--device', 0, '--type', 'hdd', '--medium', diskname]\n end\n end\n end\n\n if $provider == :vmware_fusion\n node.vm.provider :vmware_fusion do |v, override|\n vdiskmanager = '/Applications/VMware\\ Fusion.app/Contents/Library/vmware-vdiskmanager'\n unless File.exist?(vdiskmanager)\n dir = File.join(File.dirname(File.expand_path(__FILE__)), \".vmware\")\n unless File.directory?( dir )\n Dir.mkdir dir\n end\n\n disk_num = 0\n disk_sizes.each do |disk_size|\n disk_num += 1\n diskname = File.join(dir, \"#{node.vm.hostname}-#{disk_num}.vmdk\")\n unless File.exist?(diskname)\n `#{vdiskmanager} -c -s #{disk_size}GB -a lsilogic -t 1 #{diskname}`\n end\n\n v.vmx[\"scsi0:#{disk_num}.filename\"] = diskname\n v.vmx[\"scsi0:#{disk_num}.present\"] = 'TRUE'\n v.vmx[\"scsi0:#{disk_num}.redo\"] = ''\n end\n end\n end\n end\n\n if $provider == :parallels\n node.vm.provider :parallels do |v, override|\n disk_sizes.each do |disk_size|\n v.customize ['set', :id, '--device-add', 'hdd', '--size', disk_size * 1024]\n end\n end\n end\n\nend", "def create_lvm_volumes( opts = {} )\n opts = deep_merge_hashes( @aws_default_instance_options, opts )\n unless exist?( opts[ :lvm_volumes ].first[1] )\n create_lvm_volumes!( opts )\n end\n end", "def getmountedvolumes\n # only support Linux for now\n return {} unless Facter['kernel'] && Facter['kernel'].value == 'Linux'\n\n dir = \"/etc\"\n mounted = {}\n\n # AUTOFS - gather only files named auto[._]*\n Dir.glob(File.join(dir, \"*\")).each do |file|\n next if file !~ /^auto[._].*/\n\n # AUTOFS - match only lines that look like nfs syntax such as host:/path\n IO.foreach(file) do |line|\n if line =~ /\\w:\\S/ && line !~ /^\\s*#/\n # Parse it, Example : \" nventory_backup -noatime,intr irvnetappbk:/vol/nventory_backup \"\n if line =~ /^(\\w[\\w\\S]+)\\s+\\S+\\s+(\\w[\\w\\S]+):(\\S+)/\n mnt = $1\n host = $2\n vol = $3\n mounted[\"volumes[mounted][/mnt/#{mnt}][config]\"] = file\n mounted[\"volumes[mounted][/mnt/#{mnt}][volume_server]\"] = host\n mounted[\"volumes[mounted][/mnt/#{mnt}][volume]\"] = vol\n mounted[\"volumes[mounted][/mnt/#{mnt}][type]\"] = 'nfs'\n end\n end\n end # IO.foreach\n end # Dir.glob\n\n # FSTAB - has diff syntax than AUTOFS. Example: \"server:/usr/local/pub /pub nfs rsize=8192,wsize=8192,timeo=14,intr\"\n IO.foreach(\"/etc/fstab\") do |line|\n if line =~ /^(\\w[\\w\\S]+):(\\S+)\\s+(\\S+)\\s+nfs/\n host = $1\n vol = $2\n mnt = $3\n mounted[\"volumes[mounted][#{mnt}][config]\"] = \"/etc/fstab\"\n mounted[\"volumes[mounted][#{mnt}][volume_server]\"] = host\n mounted[\"volumes[mounted][#{mnt}][volume]\"] = vol\n mounted[\"volumes[mounted][#{mnt}][type]\"] = 'nfs'\n end\n end # IO.foreach\n return mounted\n end", "def configure_vms(config)\n $vms.each do |key,vm_config|\n # pp vm_config\n config.vm.define key, autostart: vm_config['autostart'] || false do |node|\n # node.ssh.insert_key = true\n # node.ssh.username = \"toto\"\n \n configure_node(vm_config, node)\n vm_config_copy = object_clone(vm_config)\n vm_config_copy.delete('packer')\n # generate packer\n packer_gen(vm_config)\n log_to_file(\"vms/#{vm_config['lib']['namespace']}/#{vm_config['lib']['key']}.yaml\", vm_config_copy.sort_by_key(true).to_h.to_yaml)\n end\n end\nend", "def group_ebs_volumes\n @group_ebs_volumes ||= Hash[ebs_groups.map do |group_name|\n vols = ebs_volumes.select { |vol| vol.group == group_name}\n [group_name, EbsGroupConfig.new(group_name).populate!(vols)]\n end]\n end", "def mount_ephemerals(attrs={})\n owner.volume(:ephemeral0, attrs){ device '/dev/sdb'; volume_id 'ephemeral0' ; mount_point '/mnt' ; tags( :bulk => true, :local => true, :fallback => true) } if flavor_info[:ephemeral_volumes] > 0\n owner.volume(:ephemeral1, attrs){ device '/dev/sdc'; volume_id 'ephemeral1' ; mount_point '/mnt2'; tags( :bulk => true, :local => true, :fallback => true) } if flavor_info[:ephemeral_volumes] > 1\n owner.volume(:ephemeral2, attrs){ device '/dev/sdd'; volume_id 'ephemeral2' ; mount_point '/mnt3'; tags( :bulk => true, :local => true, :fallback => true) } if flavor_info[:ephemeral_volumes] > 2\n owner.volume(:ephemeral3, attrs){ device '/dev/sde'; volume_id 'ephemeral3' ; mount_point '/mnt4'; tags( :bulk => true, :local => true, :fallback => true) } if flavor_info[:ephemeral_volumes] > 3\n end", "def addVolume(dev, size, type: \"gp2\")\n if @cloud_id.nil? or @cloud_id.empty?\n MU.log \"#{self} didn't have a cloud id, couldn't determine 'active?' status\", MU::ERR\n return true\n end\n az = nil\n MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_instances(\n instance_ids: [@cloud_id]\n ).reservations.each { |resp|\n if !resp.nil? and !resp.instances.nil?\n resp.instances.each { |instance|\n az = instance.placement.availability_zone\n instance.block_device_mappings.each { |vol|\n if vol.device_name == dev\n MU.log \"A volume #{dev} already attached to #{self}, skipping\", MU::NOTICE\n return\n end\n }\n }\n end\n }\n MU.log \"Creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n creation = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).create_volume(\n availability_zone: az,\n size: size,\n volume_type: type\n )\n begin\n sleep 3\n creation = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_volumes(volume_ids: [creation.volume_id]).volumes.first\n if ![\"creating\", \"available\"].include?(creation.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n end while creation.state != \"available\"\n\n if @deploy\n MU::MommaCat.listStandardTags.each_pair { |key, value|\n MU::MommaCat.createTag(creation.volume_id, key, value, region: @config['region'], credentials: @config['credentials'])\n }\n MU::MommaCat.createTag(creation.volume_id, \"Name\", \"#{MU.deploy_id}-#{@config[\"name\"].upcase}-#{dev.upcase}\", region: @config['region'], credentials: @config['credentials'])\n end\n\n attachment = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).attach_volume(\n device: dev,\n instance_id: @cloud_id,\n volume_id: creation.volume_id\n )\n\n begin\n sleep 3\n attachment = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_volumes(volume_ids: [attachment.volume_id]).volumes.first.attachments.first\n if ![\"attaching\", \"attached\"].include?(attachment.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n end while attachment.state != \"attached\"\n end", "def create\n properties = [ resource[:name],\n resource[:user],\n resource[:group],\n resource[:config],\n resource[:mode],\n ]\n\n qmgmt(['volume', 'create'] + properties)\n end", "def init\n create_file options[:inventory_config] do\n<<-YML\n# sources:\n# - \"https://supermarket.getchef.com\"\n# cookbooks:\n# cookbook-name:\n# versions:\n# - \"~> 4.0.2\"\n# - \"> 5.0.0\"\n# git:\n# location: url | path\n# branches:\n# - a_branch_name\n# refs:\n# - SHA\n\nYML\n end\n end", "def add_volume(bucket,mount,options=nil)\n s3fs_volumes << { :bucket => bucket, :mount => mount, :options => options }\n end", "def create_default_volume()\n # Create a default application_volume using the volume attributes from the cookbook\n create_node_volume(:application_volume)\n end", "def create_volumes(volume_definitions)\n volume_definitions.each { |volume| client.volumes.create(volume) }\n end", "def add_volume(container_name: nil, volume_name: nil, volume_config:, mount_path: nil,\n mount_config: {}, block: false, timeout: 60, polling: 5)\n\n create_volume(volume_name, config: volume_config)\n mount_volume(container_name,\n volume_name: volume_name,\n mount_path: mount_path,\n **mount_config)\n\n update\n sleep polling\n wait_for_deployments(timeout: timeout, polling: polling) if block\n reload(true)\n end", "def generate_ceph_ansible_configs(ansible_dir, hosts, provision)\n ceph_inventory = \\\n {\n 'mons' => { 'hosts' => {} },\n 'mgrs' => { 'hosts' => {} },\n 'osds' => { 'hosts' => {} },\n 'clients' => { 'hosts' => {},\n 'vars' => { 'copy_admin_key' => true } }\n }\n\n hosts.each do |h|\n host = Resource.object('hosts')\n host.info(h['id'])\n\n ceph_group = host.one['TEMPLATE/PROVISION/CEPH_GROUP']\n\n case ceph_group\n when 'osd,mon'\n ceph_inventory['mons']['hosts'][host.one['NAME']] = nil\n ceph_inventory['mgrs']['hosts'][host.one['NAME']] = nil\n ceph_inventory['osds']['hosts'][host.one['NAME']] = nil\n when 'osd'\n ceph_inventory['osds']['hosts'][host.one['NAME']] = nil\n when 'clients'\n ceph_inventory['clients']['hosts'][host.one['NAME']] =\n nil\n end\n end\n\n fname = \"#{ansible_dir}/ceph_inventory.yml\"\n Driver.write_file_log(fname, YAML.dump(ceph_inventory))\n @inventories << fname\n\n # eval ceph group_vars template\n ceph_vars = File.read(\n \"#{ANSIBLE_LOCATION}/ceph_hci/group_vars.yml.erb\"\n )\n yaml = provision.body['ceph_vars'].to_yaml.gsub!(\"---\\n\", '')\n\n ceph_vars = ERBVal.render_from_hash(\n ceph_vars,\n 'vars' => yaml\n )\n\n fname = \"#{ansible_dir}/ceph_group_vars.yml\"\n Driver.write_file_log(fname, ceph_vars)\n @group_vars << fname\n end", "def add_config_folder\n directory 'templates/dry/config', \"#{name}/config\", recursive: true\n end", "def packer_config\n JSON.dump(JSON.parse(super).tap do |config|\n config['builders'] = [\n {\n \"type\" => \"vsphere\",\n \"vcenter_server\" => Stemcell::Builder::validate_env('VCENTER_SERVER'),\n \"username\" => Stemcell::Builder::validate_env('VCENTER_USERNAME'),\n \"password\" => Stemcell::Builder::validate_env('VCENTER_PASSWORD'),\n \"insecure_connection\" => true,\n\n \"template\" => Stemcell::Builder::validate_env('BASE_TEMPLATE'),\n \"folder\" => Stemcell::Builder::validate_env('VCENTER_VM_FOLDER'),\n \"vm_name\" => \"packer-vmx\",\n \"host\" => Stemcell::Builder::validate_env('VCENTER_HOST'),\n \"resource_pool\" => \"\",\n # \"ssh_username\" => 'Administrator',\n # \"ssh_password\" => Stemcell::Builder::validate_env('ADMINISTRATOR_PASSWORD'),\n 'communicator' => 'winrm',\n 'winrm_username' => 'Administrator',\n 'winrm_password' => Stemcell::Builder::validate_env('ADMINISTRATOR_PASSWORD'),\n 'winrm_timeout' => '3h',\n 'winrm_insecure' => true,\n \"CPUs\" => ENV.fetch('NUM_VCPUS', '4'),\n \"RAM\" => ENV.fetch('MEM_SIZE', '4096'),\n }\n ]\n end)\n end", "def add_persistent_volume(volume, host_spec = {:host_name => 'localhost'})\n @per_volumes_by_hosts[host_spec[:host_name]] << volume\n @hosts_specs[host_spec[:host_name]] ||= host_spec.to_h\n end", "def domain_volumes\n # Use the domain name as our volume base name.\n base_name = domain_name\n\n if not config[:image_name]\n config[:image_name] = default_image\n end\n\n # Clone our root volume from our base image.\n root_volume = clone_volume(config[:image_name], \"#{base_name}-root\")\n\n # Return the array of created volumes\n [root_volume].concat(\n create_volumes(\n extra_volumes(base_name)\n )\n )\n end", "def configure(name: nil, ports: [], volumes: [], env: {}, create_args: [])\n @settings[name] ||= { :ports => [], :volumes => [], :env => {}, :create_args => []}\n @settings[name][:ports].concat(ports)\n @settings[name][:volumes].concat(volumes)\n @settings[name][:env].merge!(env)\n @settings[name][:create_args].concat(create_args)\n end", "def attach_volumes!(server, volumes_count, size)\n #create a new block storage connection obj\n volume_service = Fog::Volume::OpenStack.new(\n :openstack_api_key => @os_password,\n :openstack_username => @os_username,\n :openstack_auth_url => @os_auth_url,\n :openstack_tenant => @os_tenant,\n )\n base = 'sdd'\n volumes_count.times do |i|\n base = base.next!\n #create a new volume\n vol = volume_service.volumes.create(\n :size => size,\n :display_name => \"#{server.name}-#{i}\",\n :description => \"Volume attached to #{server.name} - managed by ankus\"\n )\n vol.reload\n vol.wait_for { status == 'available' }\n server.attach_volume(vol.id, \"/dev/#{base}\")\n vol.wait_for { status == 'in-use' }\n end\n end", "def prepare_config_files\n #Create .config dir\n #Create tucotuco dir\n #Create short dir\n #Create info file\n end", "def create_multi_config\n\t\t\t\tFile.open( \"#{$config_path}/#{@host}_disks.cfg\", 'w' ) do |out|\n\t\t\t\t\t\tout.puts \"# Diskfile for host #{@host} generated at #{Time.now}\"\n\t\t\t\t\t\[email protected] do |disk|\n\t\t\t\t\t\t\t\tnext unless disk[:disk]\n\t\t\t\t\t\t\t\twarn = disk[:warn]\n\t\t\t\t\t\t\t\tcrit = disk[:crit]\n\t\t\t\t\t\t\t\twarn = \"$WARN$\" if disk[:warn].nil?\n\t\t\t\t\t\t\t\tcrit = \"$CRIT$\" if disk[:crit].nil?\n\t\t\t\t\t\t\t\tout.puts \"command[#{disk[:disk].tr(\"/\",\"_\")}::check_disk]=check_nrpe -H #{@host} #{$ssl} -c check_disk -a #{warn} '#{crit} -E' #{disk[:disk]}\" if $isbuggy == true\n\t\t\t\t\t\t\t\tout.puts \"command[#{disk[:disk].tr(\"/\",\"_\")}::check_disk]=check_nrpe -H #{@host} #{$ssl} -c check_disk -a #{warn} #{crit} #{disk[:disk]}\" if $isbuggy == false\n\t\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\t$log.info \"check_multi config written for host #{@host}\"\n\t\tend", "def config\n details = Chassis.pkgs.dup\n details << [\"tags\"] + Chassis.tags unless Chassis.tags.empty?\n details << [\"roles\"] + Chassis.roles unless Chassis.roles.empty?\n details << [\"kind\"] + [Chassis.node_kind] if Chassis.node_kind\n details << Chassis.extra_config if Chassis.extra_config\n details\n end", "def create_volume(options)\n # Creating the volume is part of the server creation\n end", "def create_volume(options)\n # Creating the volume is part of the server creation\n end", "def init_ebs_volumes\n @@client.describe_volumes.volumes\n end", "def adopt_existing_volumes!\n Volume.all.each do |ec2_vol|\n next if ec2_vol.deleted? || ec2_vol.deleting?\n instance = Instance.find(ec2_vol.attached_instance_id) ; p instance ; next unless instance\n cluster_node_id = instance.get_cluster_node_id(self.name) ; next unless cluster_node_id\n cluster_vol_id = cluster_node_id + '-' + ec2_vol.device\n volume_in_cluster = @all_volumes[cluster_vol_id] ; next unless volume_in_cluster\n ec2_vol.update! volume_in_cluster.logical_attributes\n @all_volumes[cluster_vol_id] = ec2_vol\n end\n end", "def nfs_opts_setup()\n @folders.each do |k, opts|\n if !opts[:linux__nfs_options]\n opts[:linux__nfs_options] ||= [\"rw\", \"no_subtree_check\", \"all_squash\", \"insecure\"]\n end\n\n # Only automatically set anonuid/anongid if they weren't\n # explicitly set by the user.\n hasgid = false\n hasuid = false\n opts[:linux__nfs_options].each do |opt|\n hasgid = !!(opt =~ /^anongid=/) if !hasgid\n hasuid = !!(opt =~ /^anonuid=/) if !hasuid\n end\n\n opts[:linux__nfs_options] << \"anonuid=#{opts[:map_uid]}\" if !hasuid\n opts[:linux__nfs_options] << \"anongid=#{opts[:map_gid]}\" if !hasgid\n opts[:linux__nfs_options] << \"fsid=#{opts[:uuid]}\"\n\n # Expand the guest path so we can handle things like \"~/vagrant\"\n expanded_guest_path = @machine.guest.capability(\n :shell_expand_guest_path, opts[:guestpath])\n\n # Do the actual creating and mounting\n @machine.communicate.sudo(\"mkdir -p #{expanded_guest_path}\")\n @machine.communicate.sudo(\"chown -R vagrant:vagrant #{expanded_guest_path}\")\n @machine.communicate.sudo(\"chmod u+rw #{expanded_guest_path}\")\n @machine.communicate.sudo(\"chmod g+rws #{expanded_guest_path}\")\n end\n end", "def addVolume(dev, size, type: \"gp2\", delete_on_termination: false)\n\n if setDeleteOntermination(dev, delete_on_termination)\n MU.log \"A volume #{dev} already attached to #{self}, skipping\", MU::NOTICE\n return\n end\n\n MU.log \"Creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n creation = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).create_volume(\n availability_zone: cloud_desc.placement.availability_zone,\n size: size,\n volume_type: type\n )\n\n MU.retrier(wait: 3, loop_if: Proc.new {\n creation = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).describe_volumes(volume_ids: [creation.volume_id]).volumes.first\n if ![\"creating\", \"available\"].include?(creation.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n creation.state != \"available\"\n })\n\n\n if @deploy\n MU::Cloud::AWS.createStandardTags(\n creation.volume_id,\n region: @region,\n credentials: @credentials,\n optional: @config['optional_tags'],\n nametag: @mu_name+\"-\"+dev.upcase,\n othertags: @config['tags']\n )\n end\n\n MU.log \"Attaching #{creation.volume_id} as #{dev} to #{@cloud_id} in #{@region} (credentials #{@credentials})\"\n attachment = nil\n MU.retrier([Aws::EC2::Errors::IncorrectState], wait: 15, max: 4) {\n attachment = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).attach_volume(\n device: dev,\n instance_id: @cloud_id,\n volume_id: creation.volume_id\n )\n }\n\n begin\n att_resp = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).describe_volumes(volume_ids: [attachment.volume_id])\n if att_resp and att_resp.volumes and !att_resp.volumes.empty? and\n att_resp.volumes.first.attachments and\n !att_resp.volumes.first.attachments.empty?\n attachment = att_resp.volumes.first.attachments.first\n if !attachment.nil? and ![\"attaching\", \"attached\"].include?(attachment.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n end\n end while attachment.nil? or attachment.state != \"attached\"\n\n # Set delete_on_termination, which for some reason is an instance\n # attribute and not on the attachment\n setDeleteOntermination(dev, delete_on_termination)\n end", "def put_config config = { 'room' => [ { 'name' => 'default-room', 'device' => [ 'light' => { 'name' => 'default-device' } ] } ] }\n File.open( self.get_config_file, 'w' ) do | handle |\n handle.write YAML.dump( config )\n end\n self.get_config_file\n end", "def volume_create\n help = [\n '',\n \"Use: #{me} volume create --name=VOLUME --git=URL\",\n \"Use: #{me} volume create --name=VOLUME --dir=PATH\",\n '',\n 'Creates a volume named VOLUME from a git clone of URL',\n 'Creates a volume named VOLUME from a copy of PATH'\n ]\n # asked for help?\n if [nil,'help','--help'].include? ARGV[2]\n show help\n exit failed\n end\n # unknown arguments?\n knowns = ['name','git','dir']\n unknown = ARGV[2..-1].select do |argv|\n knowns.none? { |known| argv.start_with?('--' + known + '=') }\n end\n if unknown != []\n show help\n unknown.each { |arg| puts \"FAILED: unknown argument [#{arg.split('=')[0]}]\" }\n exit failed\n end\n # required known arguments\n args = ARGV[2..-1]\n vol = get_arg('--name', args)\n url = get_arg('--git', args)\n dir = get_arg('--dir', args)\n if vol.nil? || (url.nil? && dir.nil?)\n show help\n exit failed\n end\n if vol.length == 1\n msg = 'volume names must be at least two characters long. See https://github.com/docker/docker/issues/20122'\n puts \"FAILED: [volume create --name=#{vol}] #{msg}\"\n exit failed\n end\n if volume_exists? vol\n msg = \"#{vol} already exists\"\n puts \"FAILED: [volume create --name=#{vol}] #{msg}\"\n exit failed\n end\n # cyber-dojo.sh does actual [volume create]\nend", "def option_definitions\n [\n { :short => 'v', :long => 'volume_store', :argument => :optional,\n :desc => 'specifify the ZFS root of the new iSCSI volume. Defaults to \"store/volumes\".'\n },\n { :short => 'n', :long => 'name', :argument => :optional,\n :desc => 'the name of the new volume. This must be a valid ZFS volume name, and defaults to ' +\n 'an RFC 4122 GUID.'\n },\n { :short => 's', :long => 'size', :argument => :optional,\n :desc => 'the size of the new iSCSI volume. Note that while ZFS allows you to change the size ' +\n 'of the new volume relatively easily, because the iSCSI initiator sees this volume as a raw ' +\n 'device changing the size later may be very easy or very difficult depending on the initiators ' +\n 'operating system (and the specific file system being used). In other words, choose with care: ' +\n 'by default this command uses a size of 20G, which should be enough for most tasks in the labs.' \n },\n ]\n end", "def configure\n if @opts[:docker]\n Souschef::Print.info 'Creating Docker configuration .kitchen.local.yml'\n write(:docker, Souschef::Testkitchen::Docker.new(@cookbook).yaml)\n end\n if @opts[:solusvm]\n Souschef::Print.info 'Creating SolusVM configuraton .kitchen.local.yml'\n write(:solusvm, Souschef::Testkitchen::Solusvm.new(@cookbook).yaml)\n end\n\n Souschef::Print.info 'Creating Vagrant configuration in .kitchen.yml'\n write(:virtualbox, Souschef::Testkitchen::Virtualbox.new(@cookbook).yaml)\n end", "def attach_disks_specs\n attach_disk_array = []\n extraconfig = []\n attach_spod_array = []\n attach_spod_disk_info = {}\n\n pos = { :ide => 0, :scsi => 0 }\n disks_each(:no_exists?) do |disk|\n disk.one_item['TYPE'] == 'CDROM' ? k = :ide : k = :scsi\n\n if disk.storpod?\n spec = calculate_add_disk_spec(disk.one_item, pos[k])\n attach_spod_array << spec\n\n controller_key = spec[:device].controllerKey\n unit_number = spec[:device].unitNumber\n\n unit_ctrl = \"#{controller_key}-#{unit_number}\"\n attach_spod_disk_info[unit_ctrl] = disk.id\n else\n aspec = calculate_add_disk_spec(disk.one_item, pos[k])\n extra_key = \"opennebula.mdisk.#{disk.one_item['DISK_ID']}\"\n extra_value = aspec[:device].key.to_s\n\n attach_disk_array << aspec\n extraconfig << { :key => extra_key, :value => extra_value }\n end\n\n pos[k]+=1\n end\n\n { :disks => attach_disk_array,\n :spods => attach_spod_array,\n :spod_info => attach_spod_disk_info,\n :extraconfig => extraconfig }\n end", "def provision_storage host, vm\n if host['volumes']\n # Lazily create the volume client if needed\n volume_client_create\n host['volumes'].keys.each_with_index do |volume, index|\n @logger.debug \"Creating volume #{volume} for OpenStack host #{host.name}\"\n\n # The node defintion file defines volume sizes in MB (due to precedent\n # with the vagrant virtualbox implementation) however OpenStack requires\n # this translating into GB\n openstack_size = host['volumes'][volume]['size'].to_i / 1000\n\n # Create the volume and wait for it to become available\n vol = @volume_client.volumes.create(\n :size => openstack_size,\n :display_name => volume,\n :description => \"Beaker volume: host=#{host.name} volume=#{volume}\",\n )\n vol.wait_for { ready? }\n\n # Fog needs a device name to attach as, so invent one. The guest\n # doesn't pay any attention to this\n device = \"/dev/vd#{('b'.ord + index).chr}\"\n vm.attach_volume(vol.id, device)\n end\n end\n end", "def defaults\n owner.volume(:root).reverse_merge!({\n :device => '/dev/sda1',\n :mount_point => '/',\n :mountable => false,\n })\n self.reverse_merge!({\n :availability_zones => ['us-east-1d'],\n :backing => 'ebs',\n :flavor => 't1.micro',\n })\n super\n end", "def generate_config(watchers)\n watchers.each do |watcher|\n # if service doesnt have backup block, skip it\n next unless watcher.backup\n log.info \"#{watcher.name} - #{watcher.backends} - #{watcher.haproxy} - #{watcher.backup}\"\n \n @watcher_configs[watcher.name] = parse_watcher_config(watcher)\n backup_conf_file = watcher.backup['backup_conf_file']\n cron_conf_file = watcher.backup['cron_conf_file']\n gzip = watcher.backup['gzip']\n name = watcher.name\n\n # generate backup config\n databases_stanza = generate_stanza(\"#{@backup['databases']['type']}.erb\", @watcher_configs[watcher.name]['databases'])\n storages_stanza = generate_stanza(\"#{@backup['storages']['type']}.erb\", @watcher_configs[watcher.name]['storages'])\n notifiers_stanza = generate_stanza(\"#{@backup['notifiers']['type']}.erb\", @watcher_configs[watcher.name]['notifiers'])\n final_config = generate_backup(name, databases_stanza, storages_stanza, notifiers_stanza, gzip)\n log.info \"config array is #{final_config}\"\n\n write_config(final_config, backup_conf_file)\n\n # generate cron job config \n run_command = \"#{@opts['run_command']} --config-file #{@opts['config_file']} --trigger #{name}\"\n cron_config = \"#{watcher.backup['cron']} #{run_command} > /dev/null 2>&1\"\n\n if @opts['do_cron']\n write_config(cron_config, cron_conf_file)\n end\n end\n end", "def add key, &block\n @config_items << ConfigItem.new(:posix, key, &block)\n end", "def to_configure_vapp_hash\n {\n :name => name,\n :cpus => cpus,\n :memory => memory,\n :disks => disks.map {|d| { :number => d.address.to_s, :size => d.vcloud_size, :resource => d.vcloud_size.to_s } }\n }\n end", "def to_configure_vapp_hash\n {\n :name => name,\n :cpus => cpus,\n :memory => memory,\n :disks => disks.map {|d| { :number => d.address.to_s, :size => d.vcloud_size, :resource => d.vcloud_size.to_s } }\n }\n end", "def volume_from(name)\n volumes << VolumeFrom.new(name)\n end", "def create!\n set_id = generate_set_id\n\n @volume_ids.each do |id|\n snapshot = @fog.snapshots.new\n\n snapshot.description = \"#{@hostname.split(\".\")[0]} #{@mount} (#{self.needed_types.join(\", \")}) (#{set_id})\"\n snapshot.volume_id = id\n\n # Actually do the snapshot\n snapshot.save\n\n # Reload to get snapshot.id so we can add tags\n snapshot.reload\n\n @fog.tags.create(:resource_id => snapshot.id, :key => \"Host\", :value => @hostname)\n @fog.tags.create(:resource_id => snapshot.id, :key => \"Mount\", :value => @mount)\n @fog.tags.create(:resource_id => snapshot.id, :key => \"SetID\", :value => set_id)\n @fog.tags.create(:resource_id => snapshot.id, :key => \"Type\", :value => self.needed_types.join(\",\"))\n end\n end", "def config\n config = {}\n config['recipes'] = []\n config['recipes'] |= hash_path(@default_config, 'recipes', 'global') || []\n config['recipes'] |= hash_path(@default_config, 'recipes', @platform) || []\n @group_configs.each do |group_name, group_config|\n config['recipes'] |= hash_path(group_config, 'recipes', 'global') || []\n config['recipes'] |= hash_path(group_config, 'recipes', @platform) || []\n end\n people_recipes = @people_config['recipes'] || {}\n config['recipes'] |= people_recipes['global'] || []\n config['recipes'] |= people_recipes[@platform] || []\n config['attributes'] = {}\n config['attributes'].deep_merge!(@default_config['attributes'] || {}) { |key, old, new| Array.wrap(old) + Array.wrap(new) }\n @group_configs.each do |group_name, group_config|\n config['attributes'].deep_merge!(group_config['attributes']) { |key, old, new| Array.wrap(old) + Array.wrap(new) } unless group_config['attributes'].nil?\n end\n people_attributes = @people_config['attributes'] || {}\n config['attributes'].deep_merge!(people_attributes) { |key, old, new| Array.wrap(old) + Array.wrap(new) }\n config\n end", "def load_volume cluster_node_id, volume_cfg\n cluster_vol_id = cluster_node_id + '-' + volume_cfg[:device]\n cluster_vol_params = {\n :cluster => self,\n :cluster_vol_id => cluster_vol_id, :cluster_node_id => cluster_node_id,\n }.merge(\n volume_cfg.slice(:mount_point, :size, :from_snapshot_id, :availability_zone, :device))\n @all_volumes[cluster_vol_id] = Volume.new(cluster_vol_params)\n end", "def generate_config(resource)\n resource = symbolize_hash(convert_to_hash(resource))\n config = []\n config << \"lxc.utsname = #{resource[:utsname]}\"\n if(resource[:aa_profile])\n config << \"lxc.aa_profile = #{resource[:aa_profile]}\"\n end\n [resource[:network]].flatten.each do |net_hash|\n nhsh = Mash.new(net_hash)\n flags = nhsh.delete(:flags)\n %w(type link).each do |k|\n config << \"lxc.network.#{k} = #{nhsh.delete(k)}\" if nhsh[k]\n end\n nhsh.each_pair do |k,v|\n config << \"lxc.network.#{k} = #{v}\"\n end\n if(flags)\n config << \"lxc.network.flags = #{flags}\"\n end\n end\n if(resource[:cap_drop])\n config << \"lxc.cap.drop = #{Array(resource[:cap_drop]).join(' ')}\"\n end\n %w(include pts tty arch devttydir mount mount_entry rootfs rootfs_mount pivotdir).each do |k|\n config << \"lxc.#{k.sub('_', '.')} = #{resource[k.to_sym]}\" if resource[k.to_sym]\n end\n prefix = 'lxc.cgroup'\n resource[:cgroup].each_pair do |key, value|\n if(value.is_a?(Array))\n value.each do |val|\n config << \"#{prefix}.#{key} = #{val}\"\n end\n else\n config << \"#{prefix}.#{key} = #{value}\"\n end\n end\n config.join(\"\\n\") + \"\\n\"\n end", "def attach_disk(config, prefix, disk_num, size)\n filename = \"#{prefix}#{disk_num}.vdi\"\n config.vm.provider \"virtualbox\" do |vb|\n if !File.exist?(filename) \n vb.customize ['createhd', '--filename', filename, '--size', (size * 1024).floor, '--variant', 'fixed']\n vb.customize ['modifyhd', filename, '--type', 'shareable']\n end\n\n vb.customize ['storageattach', :id, '--storagectl', 'SATAController', '--port', disk_num + 2, '--device', 0, '--type', 'hdd', '--medium', filename]\n end\nend", "def generate_aws_config(vmname, config)\n {\n :ssh_username => option_handler(config, :ssh_username),\n :use_private_ip_for_ssh => option_handler(config, :use_private_ip_for_ssh),\n :bootstrap_options => {\n :key_name => option_handler(config, :keypair_name),\n :instance_type => option_handler(config, :instance_type),\n :ebs_optimized => option_handler(config, :ebs_optimized),\n :image_id => option_handler(config, :image_id),\n :subnet_id => option_handler(config, :subnet_id),\n :associate_public_ip_address => option_handler(config, :associate_public_ip_address),\n # :user_data => nil, #TODO\n :block_device_mappings => [\n { device_name: option_handler(config, :root_block_device),\n ebs: {\n volume_size: option_handler(config, :root_block_device_size),\n volume_type: option_handler(config, :root_block_device_type),\n delete_on_termination: true\n }\n }\n ] + ephemeral_volumes(option_handler(config, :instance_type))\n },\n :aws_tags => option_handler(config, :aws_tags),\n :convergence_options => {\n :install_sh_arguments => option_handler(config, :install_sh_arguments),\n :bootstrap_proxy => option_handler(config, :bootstrap_proxy),\n :chef_config => option_handler(config, :chef_config),\n :chef_version => option_handler(config, :chef_version)\n }\n }\n end", "def init_config_items( extra_answers={} )\n answers = {}\n things = {\n 'NetworkInterface' => 'br1',\n 'DHCP' => 'static',\n 'Hostname' => 'scli.tasty.bacon',\n 'IPAddress' => '10.0.71.50',\n 'Netmask' => '255.255.255.0',\n 'Gateway' => '10.0.71.1',\n 'DNSServers' => ['10.0.71.7', '8.8.8.8'],\n 'DNSSearch' => 'tasty.bacon',\n }\n things.each do |name,value|\n item = Simp::Cli::Config::Item.const_get(name).new\n if extra_answers.keys.include? item.key\n item.value = extra_answers.fetch( item.key )\n else\n item.value = value\n end\n answers[ item.key ] = item\n end\n answers\n end", "def configure_directories\n processing_dir = @config['processing_dir']\n if @config['directory_formats']\n @spec['origdir'] = @config['origdir'] || parse_directory_format(@config['directory_formats']['origdir']) || File.join(processing_dir, @spec['subid'] + '_orig')\n @spec['procdir'] = @config['procdir'] || parse_directory_format(@config['directory_formats']['procdir']) || File.join(processing_dir, @spec['subid'] + '_proc')\n @spec['statsdir'] = @config['statsdir'] || parse_directory_format(@config['directory_formats']['statsdir']) || File.join(processing_dir, @spec['subid'] + '_stats')\n else\n @spec['origdir'] = @config['origdir'] || File.join(processing_dir, @spec['subid'] + '_orig')\n @spec['procdir'] = @config['procdir'] || File.join(processing_dir, @spec['subid'] + '_proc')\n @spec['statsdir'] = @config['statsdir'] || File.join(processing_dir, @spec['subid'] + '_stats')\n end\n end", "def volume(volume, server)\n new_volume = { server => { 'volume' => volume }}\n unless STREAM_DATA.stream_data[server].nil?\n m = STREAM_DATA.stream_data.merge!(new_volume) { |_key, left, right| left.merge!(right) }\n STREAM_DATA.add(m)\n else\n m = STREAM_DATA.stream_data.merge!(new_volume) { |_key, left, right| left.merge!(right) }\n STREAM_DATA.add(m)\n end\n end", "def setup_lvm_on_partition(part)\n return unless part.lvm\n\n pvol = \"/dev/disk/by-partlabel/#{part.label}\"\n execute!(\"pvcreate -y #{pvol}\")\n execute!(\"vgcreate -y #{part.lvm.vg_name} #{pvol}\")\n\n # any \"open ended\" volumes (no size specified), we deal with last\n unspec_vol = nil\n\n notice(\"Creating LVM partitions\")\n part.lvm.volumes.each do |vol|\n if not vol.size_mb.is_a?(Integer)\n unspec_vol = vol\n next\n end\n\n info(\"Creating #{vol.label} volume\")\n execute!(\"lvcreate -y --name #{vol.label} --size #{vol.size_mb}MiB #{part.lvm.vg_name}\")\n next if not vol.fs\n\n create_filesystem(vol.fs, \"/dev/#{part.lvm.vg_name}/#{vol.label}\", vol.label)\n end\n\n if unspec_vol\n vol = unspec_vol\n info(\"Creating #{vol.label} volume\")\n execute!(\"lvcreate -y --name #{vol.label} -l 100%FREE #{part.lvm.vg_name}\")\n if vol.fs\n create_filesystem(vol.fs, \"/dev/#{part.lvm.vg_name}/#{vol.label}\", vol.label)\n end\n end\n end", "def prompt_resize_volumes(current_volumes, plan_info, provision_type, options={})\n #puts \"Configure Volumes:\"\n no_prompt = (options[:no_prompt] || (options[:options] && options[:options][:no_prompt]))\n\n current_root_volume = current_volumes[0]\n\n volumes = []\n\n plan_size = nil\n if plan_info['maxStorage']\n plan_size = plan_info['maxStorage'].to_i / (1024 * 1024 * 1024)\n end\n\n root_storage_types = []\n if plan_info['rootStorageTypes']\n plan_info['rootStorageTypes'].each do |opt|\n if !opt.nil?\n root_storage_types << {'name' => opt['name'], 'value' => opt['id']}\n end\n end\n end\n\n storage_types = []\n if plan_info['storageTypes']\n plan_info['storageTypes'].each do |opt|\n if !opt.nil?\n storage_types << {'name' => opt['name'], 'value' => opt['id']}\n end\n end\n end\n\n datastore_options = []\n if plan_info['supportsAutoDatastore']\n if plan_info['autoOptions']\n plan_info['autoOptions'].each do |opt|\n if !opt.nil?\n datastore_options << {'name' => opt['name'], 'value' => opt['id']}\n end\n end\n end\n end\n if plan_info['datastores']\n plan_info['datastores'].each do |k, v|\n v.each do |opt|\n if !opt.nil?\n datastore_options << {'name' => \"#{k}: #{opt['name']}\", 'value' => opt['id']}\n end\n end\n end\n end\n\n #puts \"Configure Root Volume\"\n\n field_context = \"rootVolume\"\n\n if root_storage_types.empty?\n # this means there's no configuration, just send a single root volume to the server\n storage_type_id = nil\n storage_type = nil\n else\n #v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'storageType', 'type' => 'select', 'fieldLabel' => 'Root Storage Type', 'selectOptions' => root_storage_types, 'required' => true, 'defaultFirstOption' => true, 'skipSingleOption' => true, 'description' => 'Choose a storage type.'}], options[:options])\n #storage_type_id = v_prompt[field_context]['storageType']\n storage_type_id = current_root_volume['type'] || current_root_volume['storageType']\n storage_type = plan_info['storageTypes'].find {|i| i['id'] == storage_type_id.to_i }\n end\n\n # sometimes the user chooses sizeId from a list of size options (AccountPrice) and other times it is free form\n root_custom_size_options = []\n if plan_info['rootCustomSizeOptions'] && plan_info['rootCustomSizeOptions'][storage_type_id.to_s]\n plan_info['rootCustomSizeOptions'][storage_type_id.to_s].each do |opt|\n if !opt.nil?\n root_custom_size_options << {'name' => opt['value'], 'value' => opt['key']}\n end\n end\n end\n\n volume = {\n 'id' => current_root_volume['id'],\n 'rootVolume' => true,\n 'name' => current_root_volume['name'],\n 'size' => current_root_volume['size'] > 0 ? current_root_volume['size'] : plan_size,\n 'sizeId' => nil,\n 'storageType' => storage_type_id,\n 'datastoreId' => current_root_volume['datastoreId']\n }\n\n if plan_info['rootDiskCustomizable'] && storage_type && storage_type['customLabel']\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'name', 'type' => 'text', 'fieldLabel' => 'Root Volume Label', 'required' => true, 'description' => 'Enter a volume label.', 'defaultValue' => volume['name']}], options[:options])\n volume['name'] = v_prompt[field_context]['name']\n end\n if plan_info['rootDiskCustomizable'] && storage_type && storage_type['customSize']\n # provision_type['rootDiskSizeKnown'] == false means size cannot be changed\n if provision_type['rootDiskSizeKnown'] == false\n # volume['size'] = plan_size if plan_size.to_i != 0\n else\n if root_custom_size_options.empty?\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'size', 'type' => 'number', 'fieldLabel' => 'Root Volume Size (GB)', 'required' => true, 'description' => 'Enter a volume size (GB).', 'defaultValue' => volume['size']}], options[:options])\n volume['size'] = v_prompt[field_context]['size']\n volume['sizeId'] = nil #volume.delete('sizeId')\n else\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'sizeId', 'type' => 'select', 'fieldLabel' => 'Root Volume Size', 'selectOptions' => root_custom_size_options, 'required' => true, 'description' => 'Choose a volume size.'}], options[:options])\n volume['sizeId'] = v_prompt[field_context]['sizeId']\n volume['size'] = nil #volume.delete('size')\n end\n end\n else\n # might need different logic here ? =o\n # volume['size'] = plan_size\n # volume['sizeId'] = nil #volume.delete('sizeId')\n end\n # if !datastore_options.empty?\n # v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'datastoreId', 'type' => 'select', 'fieldLabel' => 'Root Datastore', 'selectOptions' => datastore_options, 'required' => true, 'description' => 'Choose a datastore.'}], options[:options])\n # volume['datastoreId'] = v_prompt[field_context]['datastoreId']\n # end\n\n volumes << volume\n\n # modify or delete existing data volumes\n (1..(current_volumes.size-1)).each do |volume_index|\n current_volume = current_volumes[volume_index]\n if current_volume\n\n field_context = \"dataVolume#{volume_index}\"\n\n action_options = [{'name' => 'Modify', 'value' => 'modify'}, {'name' => 'Keep', 'value' => 'keep'}, {'name' => 'Delete', 'value' => 'delete'}]\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'action', 'type' => 'select', 'fieldLabel' => \"Modify/Keep/Delete volume '#{current_volume['name']}'\", 'selectOptions' => action_options, 'required' => true, 'defaultValue' => 'keep', 'description' => 'Modify, Keep or Delete existing data volume?'}], options[:options])\n volume_action = v_prompt[field_context]['action']\n\n if volume_action == 'delete'\n # deleted volume is just excluded from post params\n next\n elsif volume_action == 'keep'\n volume = {\n 'id' => current_volume['id'].to_i,\n 'rootVolume' => false,\n 'name' => current_volume['name'],\n 'size' => current_volume['size'] > (plan_size || 0) ? current_volume['size'] : plan_size,\n 'sizeId' => nil,\n 'storageType' => (current_volume['type'] || current_volume['storageType']),\n 'datastoreId' => current_volume['datastoreId']\n }\n volumes << volume\n else\n # v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'storageType', 'type' => 'select', 'fieldLabel' => \"Disk #{volume_index} Storage Type\", 'selectOptions' => storage_types, 'required' => true, 'skipSingleOption' => true, 'description' => 'Choose a storage type.'}], options[:options])\n # storage_type_id = v_prompt[field_context]['storageType']\n storage_type_id = current_volume['type'] || current_volume['storageType']\n storage_type = plan_info['storageTypes'].find {|i| i['id'] == storage_type_id.to_i }\n # sometimes the user chooses sizeId from a list of size options (AccountPrice) and other times it is free form\n custom_size_options = []\n if plan_info['customSizeOptions'] && plan_info['customSizeOptions'][storage_type_id.to_s]\n plan_info['customSizeOptions'][storage_type_id.to_s].each do |opt|\n if !opt.nil?\n custom_size_options << {'name' => opt['value'], 'value' => opt['key']}\n end\n end\n end\n\n volume = {\n 'id' => current_volume['id'].to_i,\n 'rootVolume' => false,\n 'name' => current_volume['name'],\n 'size' => current_volume['size'] ? current_volume['size'] : (plan_size || 0),\n 'sizeId' => nil,\n 'storageType' => (current_volume['type'] || current_volume['storageType']),\n 'datastoreId' => current_volume['datastoreId']\n }\n\n if plan_info['customizeVolume'] && storage_type['customLabel']\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'name', 'type' => 'text', 'fieldLabel' => \"Disk #{volume_index} Volume Label\", 'required' => true, 'description' => 'Enter a volume label.', 'defaultValue' => volume['name']}], options[:options])\n volume['name'] = v_prompt[field_context]['name']\n end\n if plan_info['customizeVolume'] && storage_type['customSize']\n if custom_size_options.empty?\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'size', 'type' => 'number', 'fieldLabel' => \"Disk #{volume_index} Volume Size (GB)\", 'required' => true, 'description' => 'Enter a volume size (GB).', 'defaultValue' => volume['size']}], options[:options])\n volume['size'] = v_prompt[field_context]['size']\n volume['sizeId'] = nil #volume.delete('sizeId')\n else\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'sizeId', 'type' => 'select', 'fieldLabel' => \"Disk #{volume_index} Volume Size\", 'selectOptions' => custom_size_options, 'required' => true, 'description' => 'Choose a volume size.'}], options[:options])\n volume['sizeId'] = v_prompt[field_context]['sizeId']\n volume['size'] = nil #volume.delete('size')\n end\n else\n # might need different logic here ? =o\n # volume['size'] = plan_size\n # volume['sizeId'] = nil #volume.delete('sizeId')\n end\n # if !datastore_options.empty?\n # v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'datastoreId', 'type' => 'select', 'fieldLabel' => \"Disk #{volume_index} Datastore\", 'selectOptions' => datastore_options, 'required' => true, 'description' => 'Choose a datastore.'}], options[:options])\n # volume['datastoreId'] = v_prompt[field_context]['datastoreId']\n # end\n\n volumes << volume\n\n end\n\n end\n end\n\n\n if plan_info['addVolumes']\n volume_index = current_volumes.size\n has_another_volume = options[:options] && options[:options][\"dataVolume#{volume_index}\"]\n add_another_volume = has_another_volume || (!no_prompt && Morpheus::Cli::OptionTypes.confirm(\"Add data volume?\", {:default => false}))\n while add_another_volume do\n #puts \"Configure Data #{volume_index} Volume\"\n\n current_root_volume_type = current_root_volume['type']\n storage_type_match = storage_types.find {|type| type['value'] == current_root_volume_type}\n default_storage_type = storage_type_match ? current_root_volume_type : storage_types[0]['value']\n field_context = \"dataVolume#{volume_index}\"\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'defaultValue' => default_storage_type, 'fieldContext' => field_context, 'fieldName' => 'storageType', 'type' => 'select', 'fieldLabel' => \"Disk #{volume_index} Storage Type\", 'selectOptions' => storage_types, 'required' => true, 'defaultFirstOption' => true, 'skipSingleOption' => true, 'description' => 'Choose a storage type.'}], options[:options])\n storage_type_id = v_prompt[field_context]['storageType']\n storage_type = plan_info['storageTypes'].find {|i| i['id'] == storage_type_id.to_i }\n\n # sometimes the user chooses sizeId from a list of size options (AccountPrice) and other times it is free form\n custom_size_options = []\n if plan_info['customSizeOptions'] && plan_info['customSizeOptions'][storage_type_id.to_s]\n plan_info['customSizeOptions'][storage_type_id.to_s].each do |opt|\n if !opt.nil?\n custom_size_options << {'name' => opt['value'], 'value' => opt['key']}\n end\n end\n end\n\n volume_label = (volume_index == 1 ? 'data' : \"data #{volume_index}\")\n volume = {\n 'id' => -1,\n 'rootVolume' => false,\n 'name' => volume_label,\n 'size' => plan_size,\n 'sizeId' => nil,\n 'storageType' => storage_type_id,\n 'datastoreId' => nil\n }\n\n if plan_info['customizeVolume'] && storage_type['customLabel']\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'name', 'type' => 'text', 'fieldLabel' => \"Disk #{volume_index} Volume Label\", 'required' => true, 'description' => 'Enter a volume label.', 'defaultValue' => volume_label}], options[:options])\n volume['name'] = v_prompt[field_context]['name']\n end\n if plan_info['customizeVolume'] && storage_type['customSize']\n if custom_size_options.empty?\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'size', 'type' => 'number', 'fieldLabel' => \"Disk #{volume_index} Volume Size (GB)\", 'required' => true, 'description' => 'Enter a volume size (GB).', 'defaultValue' => plan_size}], options[:options])\n volume['size'] = v_prompt[field_context]['size']\n volume['sizeId'] = nil #volume.delete('sizeId')\n else\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'sizeId', 'type' => 'select', 'fieldLabel' => \"Disk #{volume_index} Volume Size\", 'selectOptions' => custom_size_options, 'required' => true, 'description' => 'Choose a volume size.'}], options[:options])\n volume['sizeId'] = v_prompt[field_context]['sizeId']\n volume['size'] = nil #volume.delete('size')\n end\n else\n # might need different logic here ? =o\n # volume['size'] = plan_size\n # volume['sizeId'] = nil #volume.delete('sizeId')\n end\n \n if datastore_options.empty? && storage_type['hasDatastore'] != false\n begin\n datastore_res = datastores_interface.list({'poolId' => current_root_volume['resourcePoolId'], 'resourcePoolId' => current_root_volume['resourcePoolId'], 'zoneId' => options['zoneId'], 'siteId' => options['siteId']})['datastores']\n datastore_res.each do |opt|\n datastore_options << {'name' => opt['name'], 'value' => opt['id']}\n end\n rescue\n datastore_options = []\n end\n end\n if !datastore_options.empty?\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'defaultValue' => current_root_volume['datastoreId'],'fieldContext' => field_context, 'fieldName' => 'datastoreId', 'type' => 'select', 'fieldLabel' => \"Disk #{volume_index} Datastore\", 'selectOptions' => datastore_options, 'required' => true, 'description' => 'Choose a datastore.'}], options[:options])\n volume['datastoreId'] = v_prompt[field_context]['datastoreId']\n end\n\n volumes << volume\n\n # todo: should maxDisk check consider the root volume too?\n if plan_info['maxDisk'] && volume_index >= plan_info['maxDisk']\n add_another_volume = false\n else\n volume_index += 1\n has_another_volume = options[:options] && options[:options][\"dataVolume#{volume_index}\"]\n add_another_volume = has_another_volume || (!no_prompt && Morpheus::Cli::OptionTypes.confirm(\"Add another data volume?\"))\n end\n\n end\n\n end\n\n return volumes\n end", "def config_lv_define_box2(vm, conf)\n vm.define conf['hostname_box2'] do |box2|\n box2.vm.hostname = conf['hostname_box2']\n box2.vm.box = conf['imagename_box2']\n box2.vm.network :private_network,\n :libvirt__network_name => \"mgmt\",\n :mac => conf['libvirt_mgmt_mac_box2'],\n :ip => conf['libvirt_mgmt_ip_box2'],\n :libvirt__netmask => conf['libvirt_mgmt_netmask_box2'],\n :libvirt__dhcp_enabled => false,\n :libvirt__forward_mode => \"none\",\n :autostart => true\n box2.vm.network :public_network,\n :network_name => \"ext\",\n :ip => conf['libvirt_ext_ip_box2'],\n :netmask => conf['libvirt_ext_netmask_box2'],\n :gateway => conf['libvirt_ext_gateway_box2'],\n :mac => conf['libvirt_ext_mac_box2'],\n :dev => conf['libvirt_dev'],\n :type => conf['libvirt_type'],\n :mode => conf['libvirt_mode']\n box2.vm.network :private_network,\n :libvirt__network_name => \"ceph\",\n :mac => conf['libvirt_ceph_mac_box2'],\n :ip => conf['libvirt_ceph_ip_box2'],\n :libvirt__netmask => conf['libvirt_ceph_netmask_box2'],\n :libvirt__dhcp_enabled => false,\n :libvirt__forward_mode => \"none\",\n :autostart => true\n box2.vm.network :private_network,\n :libvirt__network_name => \"vm_tunnel\",\n :mac => conf['libvirt_tunnel_mac_box2'],\n :ip => conf['libvirt_tunnel_ip_box2'],\n :libvirt__netmask => conf['libvirt_tunnel_netmask_box2'],\n :libvirt__dhcp_enabled => false,\n :libvirt__forward_mode => \"none\",\n :autostart => true\n box2.vm.provider :libvirt do |domain|\n domain.memory = conf['memory_box2']\n domain.cpus = conf['cpus_box2']\n domain.management_network_name = 'vagrantmgmt'\n domain.management_network_address = conf['libvirt_vagrantmgmt_ip_box2']\n domain.management_network_mode = conf['libvirt_mgmt_mode']\n end\n config_provision(box2.vm, conf)\n end\nend", "def configure_disks(vb, server, hostname, name)\n vminfo = vm_info(name)\n disks = server['disks'] || {}\n unless vminfo =~ /Storage Controller Name \\(1\\): *SATA Controller/\n # puts \"Attaching SATA Controller\"\n vb.customize [\n 'storagectl', :id,\n '--name', 'SATA Controller',\n '--add', 'sata',\n '--portcount', disks.size\n ]\n # else\n # puts 'SATA Controller already attached'\n end\n\n disks.each_with_index do |disk, i|\n disk_name = disk.first\n disk_size = disk.last['size']\n disk_uuid = disk.last['uuid']\n real_uuid = \"00000000-0000-0000-0000-#{disk_uuid.rjust(12,'0')}\"\n if server['cluster']\n disk_filename = File.join(VAGRANT_ROOT, \"#{disk_name}_#{server['cluster']}.vdi\")\n else\n disk_filename = File.join(VAGRANT_ROOT, \"#{disk_name}.vdi\")\n end\n\n if File.file?(disk_filename)\n # puts \"Disk #{disk_filename} already created\"\n disk_hash = `VBoxManage showmediuminfo \"#{disk_filename}\"`.scan(/(.*): *(.*)/).to_h\n current_uuid = disk_hash['UUID']\n else\n # puts \"Creating disk #{disk_filename}\"\n current_uuid = '0'\n if server['cluster']\n vb.customize [\n 'createhd',\n '--filename', disk_filename,\n '--size', disk_size.to_s,\n '--variant', 'Fixed'\n ]\n vb.customize [\n 'modifyhd', disk_filename,\n '--type', 'shareable'\n ]\n else\n vb.customize [\n 'createhd',\n '--filename', disk_filename,\n '--size', disk_size.to_s,\n '--variant', 'Standard'\n ]\n end\n end\n\n # Conditional for adding disk_uuid\n if server['cluster'] && current_uuid == real_uuid\n # puts \"Attaching shareable disk #{disk_filename}\"\n vb.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', (i + 1).to_s,\n '--device', 0,\n '--type', 'hdd',\n '--medium', disk_filename,\n '--mtype', 'shareable'\n ]\n elsif server['cluster']\n # puts \"Attaching shareable disk #{disk_filename}, adding UUID #{real_uuid}\"\n vb.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', (i + 1).to_s,\n '--device', 0,\n '--type', 'hdd',\n '--medium', disk_filename,\n '--mtype', 'shareable',\n '--setuuid', real_uuid\n ]\n elsif current_uuid == real_uuid\n # puts \"Attaching normal disk #{disk_filename}\"\n vb.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', (i + 1).to_s,\n '--device', 0,\n '--type', 'hdd',\n '--medium', disk_filename\n ]\n else\n # puts \"Attaching normal disk #{disk_filename}, adding UUID #{real_uuid}\"\n vb.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', (i + 1).to_s,\n '--device', 0,\n '--type', 'hdd',\n '--medium', disk_filename,\n '--setuuid', real_uuid\n ]\n end\n end\nend", "def getservedvolumes\n # only support Linux for now\n return {} unless Facter['kernel'] && Facter['kernel'].value == 'Linux'\n\n # Don't do anything if exports file is not there\n return {} if !File.exists?(\"/etc/exports\")\n\n served = {}\n\n IO.foreach(\"/etc/exports\") do |line|\n if line =~ /(\\S+)\\s+/\n vol = $1\n served[\"volumes[served][#{vol}][config]\"] = \"/etc/exports\"\n served[\"volumes[served][#{vol}][type]\"] = 'nfs'\n end\n end\n return served\n end", "def build_volume_list_file(volume_list, path)\n File.write(path, volume_list.join(\"\\n\"))\n end", "def setup_catalog\n [\n {product_name: 'bamba', photo_url: 'dummy_url1', barcode: 'xxx1', sku: 111, price: 2.23, producer: 'Nestle'},\n {product_name: 'apple', photo_url: 'dummy_url2', barcode: 'xxx2', sku: 555, price: 4.66, producer: 'WFM'},\n {product_name: 'tapu chepse', photo_url: 'dummy_url3', barcode: 'xxx3', sku: 1111444, price: 7.33, producer: 'Nestle'}\n ].each do |data|\n Catalog.create(data)\n end\n end", "def prepare_container_component_dirs; end", "def merge_vm_disks(host, global, vb, controller)\n # Only get the first entry from global['vm_groups'] if more than one entry present\n vb_dir=global['vm_basedir'] ? global['vm_basedir'] + global['vm_groups'].partition(',')[0] + \"/\" + host['vm_name'] + \"/\" : \"./.virtualbox/\"\n if global['vm_disks'] or host['vm_disks']\n merge_hash = merge_2_array_of_hashes(global['vm_disks'], host['vm_disks'])\n merge_hash.each do |key, value|\n diskname=\"#{vb_dir}#{host['vm_name']}-#{key}.vdi\"\n unless File.exist?(diskname)\n vb.customize [\"createmedium\", \"disk\", \"--filename\", diskname, \"--size\", value * 1024 , \"--format\", \"vdi\", \"--variant\", \"Standard\"]\n end\n vb.customize [\"storageattach\", :id , \"--storagectl\", controller, \"--port\", key, \"--device\", \"0\", \"--type\", \"hdd\", \"--medium\", diskname]\n end\n end\nend", "def add_tmp_volume(volume, host_spec = {:host_name => 'localhost'})\n @tmp_volumes_by_hosts[host_spec[:host_name]] << volume\n @hosts_specs[host_spec[:host_name]] ||= host_spec.to_h\n end", "def generate_ansible_configs(hosts, _datastores, _provision)\n ansible_dir = Dir.mktmpdir\n msg = \"Generating Ansible configurations into #{ansible_dir}\"\n\n OneProvisionLogger.debug(msg)\n\n # Generate 'inventory' file\n c = \"[nodes]\\n\"\n\n hosts.each do |h|\n host = Resource.object('hosts')\n host.info(h['id'])\n\n h_vars = host.one['TEMPLATE/ANSIBLE_HOST_VARS']\n\n if h_vars\n c << \"#{host.one['NAME']} #{h_vars}\\n\"\n else\n c << \"#{host.one['NAME']}\\n\"\n end\n end\n\n c << \"\\n\"\n\n c << \"[targets]\\n\"\n\n hosts.each do |h|\n host = Resource.object('hosts')\n host.info(h['id'])\n\n conn = get_host_template_conn(host.one)\n\n c << \"#{host.one['NAME']} \"\n c << 'ansible_connection=ssh '\n c << \"ansible_ssh_private_key_file=#{conn['private_key']} \"\n c << \"ansible_user=#{conn['remote_user']} \"\n c << \"ansible_port=#{conn['remote_port']}\\n\"\n end\n\n fname = \"#{ansible_dir}/inventory\"\n Driver.write_file_log(fname, c)\n @inventories = [fname]\n\n @group_vars = []\n\n # Generate \"host_vars\" directory\n Dir.mkdir(\"#{ansible_dir}/host_vars\")\n\n hosts.each do |h|\n host = Resource.object('hosts')\n host.info(h['id'])\n\n var = host.one['TEMPLATE/PROVISION_CONFIGURATION_BASE64']\n var = YAML.safe_load(Base64.decode64(var)) if var\n var ||= {}\n c = YAML.dump(var)\n fname = \"#{ansible_dir}/host_vars/#{host.one['NAME']}.yml\"\n Driver.write_file_log(fname, c)\n end\n\n host = Resource.object('hosts')\n host.info(hosts[0]['id'])\n\n if host.one['TEMPLATE/PROVISION/ANSIBLE_PLAYBOOK']\n @playbooks = host.one['TEMPLATE/PROVISION/ANSIBLE_PLAYBOOK']\n @playbooks = @playbooks.split(',')\n else\n @playbooks = [ANSIBLE_INVENTORY_DEFAULT]\n end\n\n # Generate \"ansible.cfg\" file\n # TODO: what if private_key isn't filename, but content\n # TODO: store private key / equinix\n # credentials securely in the ONE\n\n c = File.read(\"#{ANSIBLE_LOCATION}/ansible.cfg.erb\")\n c = ERBVal.render_from_hash(c, :ans_loc => ANSIBLE_LOCATION)\n\n Driver.write_file_log(\"#{ansible_dir}/ansible.cfg\", c)\n\n # TODO: site.yaml\n # logger(inventoryContent +\n # File.open(\"#{$ANSIBLE_LOCATION}/site.yml\").read(), true)\n\n ansible_dir\n end", "def add_management_devices\n %i(physical_server\n physical_chassis\n physical_storage).each do |management_device_assoc|\n\n add_collection(physical_infra, \"#{management_device_assoc}_management_devices\".to_sym) do |builder|\n builder.add_properties(\n :model_class => ::GuestDevice,\n :manager_ref => %i(device_type hardware),\n :parent_inventory_collections => [management_device_assoc.to_s.pluralize.to_sym]\n )\n end\n end\n end", "def generate_config(watchers)\n main_stanza = []\n events_stanza = []\n http_stanza = []\n upstream_stanza = []\n location_stanza = []\n server_stanza = []\n watchers.each do |watcher|\n # if service doesnt has nginx block, skip it\n next unless watcher.nginx \n log.info \"#{watcher.name} - #{watcher.backends} - #{watcher.haproxy} - #{watcher.nginx}\"\n @watcher_configs[watcher.name] = parse_watcher_config(watcher)\n log.info @watcher_configs\n upstream_stanza << generate_upstream_stanza(watcher, @watcher_configs[watcher.name]['upstream']) \n location_stanza << generate_location_stanza(watcher,@watcher_configs[watcher.name]['location'])\n end\n\n main_config = get_main_config\n main_stanza = generate_main_stanza(main_config)\n\n event_config = get_event_config\n events_stanza = generate_event_stanza(event_config)\n\n http_config = get_http_config\n http_stanza = generate_http_stanza(http_config)\n close_bracket = [\"}\"]\n\n base_server_config = get_server_base_config\n server_stanza = generate_server_stanza(location_stanza,base_server_config)\n if http_stanza.empty?\n final_config = main_stanza << events_stanza << http_stanza << upstream_stanza << server_stanza \n else\n final_config = main_stanza << events_stanza << http_stanza << upstream_stanza << server_stanza << close_bracket \n end\n log.info \"config array is #{final_config}\"\n\n return final_config\n end", "def initial_agent_settings(vm_id, agent_id, network_spec, ephemeral_disk,\n existing_disk, environment, blobstore, mbus)\n disk_letters = ('a'..'z').to_a\n config = { vm: { name: vm_id }, agent_id: agent_id,\n environment: environment, blobstore: blobstore,\n mbus: mbus, networks: network_spec,\n disks: { system: \"/dev/sd#{disk_letters.shift}\",\n persistent: {} } }\n if ephemeral_disk\n config[:disks][:ephemeral] = \"/dev/sd#{disk_letters.shift}\"\n end\n config\n end", "def configure(vm_config, number)\n vm_config.vm.network \"public_network\", ip: \"192.168.1.24#{number}\"\n vm_config.vm.host_name = \"level0#{number}.seoshop.net\"\n \n vm_config.vm.provision :puppet do |puppet|\n puppet.manifests_path = \"puppet\"\n puppet.module_path = \"puppet/modules\"\n puppet.manifest_file = \"site.pp\"\n end\nend", "def setup_default_structure!\n self.push_app_path(:root, Merb.root / 'slices' / self.identifier)\n \n self.push_path(:application, root_path('app'))\n self.push_app_path(:application, app_dir_for(:root) / 'app')\n \n app_components.each do |component|\n self.push_path(component, dir_for(:application) / \"#{component}s\")\n self.push_app_path(component, app_dir_for(:application) / \"#{component}s\")\n end\n \n self.push_path(:public, root_path('public'), nil)\n self.push_app_path(:public, Merb.dir_for(:public) / 'slices' / self.identifier, nil)\n \n public_components.each do |component|\n self.push_path(component, dir_for(:public) / \"#{component}s\", nil)\n self.push_app_path(component, app_dir_for(:public) / \"#{component}s\", nil)\n end\n end", "def attach_node_volume (volume_label)\n # XXX should check whether this device name is already allocated,\n # and if so throw an exception\n # Helper method, attach an arbitrary volume using an arbitrary label that must be preconfigured in nodes\n Chef::Log.info(\"In attach_node_volume with volume_label #{volume_label}\")\n mount_device = node.application_attributes[volume_label].mount_device\n volume_id = node.application_attributes[volume_label].volume_id\n\n if mount_device.nil?\n Chef::Log.fatal(\"No mount device for volume label #{volume_label}.\tMust supply a volume label configured in nodes\")\n raise\n end\n\n attach_volume(volume_label, volume_id, mount_device)\n end", "def add_common_configs config\n # Mount the synced folders.\n self.synced_folders do | entry |\n config.vm.synced_folder entry[\"path\"], entry[\"name\"]\n end\n # Forward the ssh port. The id ensures that this is used instead of 2222\n # rather than in addition to.\n config.vm.network :forwarded_port, host: self.ssh_port, guest: 22, id: \"ssh\"\n end", "def create_config!\n template = File.read(TEMPLATES + 'montage.yml')\n template.gsub!(/<sprites>/, @sprites_path.to_s)\n template.gsub!(/<sources>/, @sources_path.to_s)\n\n File.open(@dir + '.montage', 'w') do |config|\n config.puts template\n end\n end", "def add_subconfig(config_name)\n define_config_methods(config_name)\n self.send(\"#{config_name}=\", RubyConfig.new)\n end", "def list_volumes\n response = @connection.req(\"GET\", \"/#{@volume_path}\")\n volumes_hash = JSON.parse(response.body)[\"volumes\"]\n volumes_hash.inject([]){|res, current| res << OpenStack::Volume::Volume.new(current); res}\n end", "def generate_vagrant_config\n template = Souffle::Template.new('Vagrantfile.erb')\n temp_binding = OpenStruct.new\n temp_binding.version = Souffle::VERSION\n \n template.render(temp_binding)\n end", "def prompt_volumes(plan_info, provision_type, options={}, api_client=nil, api_params={})\n #puts \"Configure Volumes:\"\n # return [] if plan_info['noDisks']\n\n no_prompt = (options[:no_prompt] || (options[:options] && options[:options][:no_prompt]))\n volumes = []\n plan_size = nil\n if plan_info['maxStorage']\n plan_size = plan_info['maxStorage'].to_i / (1024 * 1024 * 1024)\n end\n root_storage_types = []\n if plan_info['rootStorageTypes']\n plan_info['rootStorageTypes'].sort {|x,y| x['displayOrder'] <=> y['displayOrder'] }.each do |opt|\n if !opt.nil?\n root_storage_types << {'name' => opt['name'], 'value' => opt['id']}\n end\n end\n end\n\n storage_types = []\n if plan_info['storageTypes']\n plan_info['storageTypes'].sort {|x,y| x['displayOrder'] <=> y['displayOrder'] }.each do |opt|\n if !opt.nil?\n storage_types << {'name' => opt['name'], 'value' => opt['id']}\n end\n end\n end\n\n datastore_options = []\n if plan_info['supportsAutoDatastore']\n if plan_info['autoOptions']\n plan_info['autoOptions'].each do |opt|\n if !opt.nil?\n datastore_options << {'name' => opt['name'], 'value' => opt['id']}\n end\n end\n end\n end\n if plan_info['datastores']\n plan_info['datastores'].each do |k, v|\n v.each do |opt|\n if !opt.nil?\n k = 'datastores' if k == 'store'\n k = 'clusters' if k == 'cluster'\n datastore_options << {'name' => \"#{k}: #{opt['name']}\", 'value' => opt['id']}\n end\n end\n end\n end\n # api does not always return datastores, so go get them if needed..\n if plan_info['hasDatastore'] && datastore_options.empty?\n option_results = options_interface.options_for_source('datastores', api_params)\n option_results['data'].each do |it|\n datastore_options << {\"id\" => it[\"value\"] || it[\"id\"], \"name\" => it[\"name\"], \"value\" => it[\"value\"] || it[\"id\"]}\n end\n end\n\n #puts \"Configure Root Volume\"\n\n field_context = \"rootVolume\"\n\n volume_label = options[:root_volume_name] || 'root'\n volume = {\n 'id' => -1,\n 'rootVolume' => true,\n 'name' => volume_label,\n 'size' => plan_size,\n 'sizeId' => nil,\n 'storageType' => nil,\n 'datastoreId' => nil\n }\n if options[:options] && options[:options]['volumes'] && options[:options]['volumes'][0]\n volume = options[:options]['volumes'][0]\n end\n\n if root_storage_types.empty?\n # this means there's no configuration, just send a single root volume to the server\n storage_type_id = nil\n storage_type = nil\n else\n default_storage_type = root_storage_types.find {|t| t['value'].to_s == volume['storageType'].to_s}\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'storageType', 'type' => 'select', 'fieldLabel' => 'Root Storage Type', 'selectOptions' => root_storage_types, 'required' => true, 'defaultFirstOption' => true, 'skipSingleOption' => true, 'description' => 'Choose a storage type.', 'defaultValue' => default_storage_type ? default_storage_type['name'] : volume['storageType']}], options[:options])\n storage_type_id = v_prompt[field_context]['storageType']\n storage_type = plan_info['storageTypes'].find {|i| i['id'] == storage_type_id.to_i }\n volume['storageType'] = storage_type_id\n end\n\n # sometimes the user chooses sizeId from a list of size options (AccountPrice) and other times it is free form\n root_custom_size_options = []\n if plan_info['rootCustomSizeOptions'] && plan_info['rootCustomSizeOptions'][storage_type_id.to_s]\n plan_info['rootCustomSizeOptions'][storage_type_id.to_s].each do |opt|\n if !opt.nil?\n root_custom_size_options << {'name' => opt['value'], 'value' => opt['key']}\n end\n end\n end\n\n if plan_info['rootDiskCustomizable'] && storage_type && storage_type['customLabel']\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'name', 'type' => 'text', 'fieldLabel' => 'Root Volume Label', 'required' => true, 'description' => 'Enter a volume label.', 'defaultValue' => volume['name']}], options[:options])\n volume['name'] = v_prompt[field_context]['name']\n end\n if plan_info['rootDiskCustomizable'] && storage_type && storage_type['customSize']\n # provision_type['rootDiskSizeKnown'] == false means size cannot be changed\n if provision_type['rootDiskSizeKnown'] == false\n # volume['size'] = plan_size if plan_size.to_i != 0\n else\n if root_custom_size_options.empty?\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'size', 'type' => 'number', 'fieldLabel' => 'Root Volume Size (GB)', 'required' => true, 'description' => 'Enter a volume size (GB).', 'defaultValue' => volume['size']}], options[:options])\n volume['size'] = v_prompt[field_context]['size']\n volume['sizeId'] = nil #volume.delete('sizeId')\n else\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'sizeId', 'type' => 'select', 'fieldLabel' => 'Root Volume Size', 'selectOptions' => root_custom_size_options, 'required' => true, 'description' => 'Choose a volume size.', 'defaultValue' => volume['sizeId']}], options[:options])\n volume['sizeId'] = v_prompt[field_context]['sizeId']\n volume['size'] = nil #volume.delete('size')\n end\n end\n else\n # might need different logic here ? =o\n #volume['size'] = plan_size\n #volume['sizeId'] = nil #volume.delete('sizeId')\n end\n \n if !datastore_options.empty?\n default_datastore = datastore_options.find {|ds| ds['value'].to_s == volume['datastoreId'].to_s}\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'datastoreId', 'type' => 'select', 'fieldLabel' => 'Root Datastore', 'selectOptions' => datastore_options, 'required' => true, 'description' => 'Choose a datastore.', 'defaultValue' => default_datastore ? default_datastore['name'] : volume['datastoreId']}], options[:options])\n volume['datastoreId'] = v_prompt[field_context]['datastoreId']\n end\n\n volumes << volume\n\n if plan_info['addVolumes']\n volume_index = 1\n has_another_volume = (options[:options] && options[:options][\"dataVolume#{volume_index}\"]) || (options[:options] && options[:options]['volumes'] && options[:options]['volumes'][volume_index])\n add_another_volume = has_another_volume || (!no_prompt && Morpheus::Cli::OptionTypes.confirm(\"Add data volume?\", {:default => (options[:defaultAddFirstDataVolume] == true && volume_index == 1)}))\n while add_another_volume do\n #puts \"Configure Data #{volume_index} Volume\"\n\n field_context = \"dataVolume#{volume_index}\"\n\n volume_label = (volume_index == 1 ? 'data' : \"data #{volume_index}\")\n volume = {\n #'id' => -1,\n 'rootVolume' => false,\n 'name' => volume_label,\n 'size' => plan_size,\n 'sizeId' => nil,\n 'storageType' => nil,\n 'datastoreId' => nil\n }\n if options[:options] && options[:options]['volumes'] && options[:options]['volumes'][volume_index]\n volume = options[:options]['volumes'][volume_index]\n end\n\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'storageType', 'type' => 'select', 'fieldLabel' => \"Disk #{volume_index} Storage Type\", 'selectOptions' => storage_types, 'required' => true, 'defaultFirstOption' => true, 'skipSingleOption' => true, 'description' => 'Choose a storage type.', 'defaultValue' => volume['storageType']}], options[:options])\n storage_type_id = v_prompt[field_context]['storageType']\n volume['storageType'] = storage_type_id\n storage_type = plan_info['storageTypes'].find {|i| i['id'] == storage_type_id.to_i }\n\n # sometimes the user chooses sizeId from a list of size options (AccountPrice) and other times it is free form\n custom_size_options = []\n if plan_info['customSizeOptions'] && plan_info['customSizeOptions'][storage_type_id.to_s]\n plan_info['customSizeOptions'][storage_type_id.to_s].each do |opt|\n if !opt.nil?\n custom_size_options << {'name' => opt['value'], 'value' => opt['key']}\n end\n end\n end\n\n if plan_info['customizeVolume'] && storage_type['customLabel']\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'name', 'type' => 'text', 'fieldLabel' => \"Disk #{volume_index} Volume Label\", 'required' => true, 'description' => 'Enter a volume label.', 'defaultValue' => volume['name']}], options[:options])\n volume['name'] = v_prompt[field_context]['name']\n end\n if plan_info['customizeVolume'] && storage_type['customSize']\n if custom_size_options.empty?\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'size', 'type' => 'number', 'fieldLabel' => \"Disk #{volume_index} Volume Size (GB)\", 'required' => true, 'description' => 'Enter a volume size (GB).', 'defaultValue' => volume['size']}], options[:options])\n volume['size'] = v_prompt[field_context]['size']\n volume['sizeId'] = nil #volume.delete('sizeId')\n else\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'sizeId', 'type' => 'select', 'fieldLabel' => \"Disk #{volume_index} Volume Size\", 'selectOptions' => custom_size_options, 'required' => true, 'description' => 'Choose a volume size.', 'defaultValue' => volume['sizeId']}], options[:options])\n volume['sizeId'] = v_prompt[field_context]['sizeId']\n volume['size'] = nil #volume.delete('size')\n end\n else\n # might need different logic here ? =o\n volume['size'] = plan_size\n volume['sizeId'] = nil #volume.delete('sizeId')\n end\n if !datastore_options.empty?\n v_prompt = Morpheus::Cli::OptionTypes.prompt([{'fieldContext' => field_context, 'fieldName' => 'datastoreId', 'type' => 'select', 'fieldLabel' => \"Disk #{volume_index} Datastore\", 'selectOptions' => datastore_options, 'required' => true, 'description' => 'Choose a datastore.', 'defaultValue' => volume['datastoreId']}], options[:options])\n volume['datastoreId'] = v_prompt[field_context]['datastoreId']\n end\n\n volumes << volume\n\n volume_index += 1\n if options[:options] && options[:options]['volumes'] && options[:options]['volumes'][volume_index]\n add_another_volume = true\n elsif plan_info['maxDisk'] && volume_index >= plan_info['maxDisk']\n # todo: should maxDisk check consider the root volume too?\n add_another_volume = false\n else\n has_another_volume = options[:options] && options[:options][\"dataVolume#{volume_index}\"]\n add_another_volume = has_another_volume || (!no_prompt && Morpheus::Cli::OptionTypes.confirm(\"Add another data volume?\", {:default => false}))\n end\n\n end\n\n end\n\n return volumes\n end", "def configure(config)\n\t\t\tvalid_configs = [:next_uid, :max_key_age, :tags, :sudo_group]\n\t\t\tto_use_config = config.select{|k,v| valid_configs.include?(k)}\n\t\t\[email protected] do |item_data|\n\t\t\t\titem_data.set(to_use_config)\n\t\t\t\tif(config.has_key?(:user_file_bucket))\n\t\t\t\t\tif(config[:user_file_bucket])\n\t\t\t\t\t\titem_data.set(:user_file_bucket => config[:user_file_bucket])\n\t\t\t\t\telse\n\t\t\t\t\t\titem_data.delete(:user_file_bucket)\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\tif(@metadata.attributes[:identity] == nil)\n\t\t\t\t\titem_data.set(:identity => 0)\n\t\t\t\telse\n\t\t\t\t\titem_data.add(:identity => 1)\n\t\t\t\tend\n\t\t\tend\n\t\tend", "def setup_volumes\n # managing planned volumes is currently only needed in Windows and only if\n # this is not a reboot scenario.\n if !RightScale::Platform.windows? || RightScale::InstanceState.reboot?\n boot\n else\n RightScale::AuditProxy.create(@agent_identity, 'Planned volume management') do |audit|\n @audit = audit\n manage_planned_volumes do\n @audit = nil\n boot\n end\n end\n end\n true\n end", "def add_physical_disks\n add_collection(physical_infra, :physical_disks) do |builder|\n builder.add_properties(\n :manager_ref => %i(physical_storage ems_ref),\n :manager_ref_allowed_nil => %i(ems_ref)\n )\n end\n end", "def volume_create(name)\n @log.info \"Creating volume #{name} from offering id #{DISK_OFFERING}...\"\n ret = @cloud_stack.create_volume(name, ZONE, DISK_OFFERING)\n id = ret[\"createvolumeresponse\"][\"jobid\"]\n wait_for_job id\n vol_id = ret[\"createvolumeresponse\"][\"id\"]\n @log.info \"Created volume id: #{vol_id}\"\n vol_id\n end", "def pull_config\n output = VirtualBox.run_command! ['VBoxManage', '--nologo', 'showvminfo',\n '--machinereadable', uid]\n config = self.class.parse_machine_readable output\n \n self.name = config['name']\n self.uid = config['UUID']\n board.from_params config\n \n nic_count = config.keys.select { |key| /^nic\\d+$/ =~ key }.max[3..-1].to_i\n 1.upto nic_count do |index|\n if config[\"nic#{index}\"] == 'none'\n nics[index - 1] = nil\n else\n nics[index - 1] ||= VirtualBox::Vm::Nic.new\n nics[index - 1].from_params config, index\n end\n end\n\n bus_count = 1 + (config.keys.select { |key|\n /^storagecontrollername\\d+$/ =~ key\n }.max || \"storagecontrollername-1\")[21..-1].to_i\n 0.upto bus_count - 1 do |index|\n io_buses[index] ||= VirtualBox::Vm::IoBus.new\n io_buses[index].from_params config, index\n end\n \n self\n end", "def define_centos_5\n { 'name' => 'centos-5.10',\n 'driver_config' => { 'template' => 'centos-5.10-x86_64-solus',\n 'plan' => 'VPS2',\n 'node' => 'chefsolushv',\n 'privileged' => 'true',\n 'type' => 'xen',\n 'platform' => 'centos',\n 'use_sudo' => 'false',\n 'username' => 'internal' } }\n end", "def block_device_mappings # rubocop:disable all\n return @bdms if @bdms\n bdms = config[:block_device_mappings] || []\n if bdms.empty?\n if config[:ebs_volume_size] || config.fetch(:ebs_delete_on_termination, nil) ||\n config[:ebs_device_name] || config[:ebs_volume_type]\n # If the user didn't supply block_device_mappings but did supply\n # the old configs, copy them into the block_device_mappings array correctly\n # TODO: remove this logic when we remove the deprecated values\n bdms << {\n :ebs_volume_size => config[:ebs_volume_size] || 8,\n :ebs_delete_on_termination => config.fetch(:ebs_delete_on_termination, true),\n :ebs_device_name => config[:ebs_device_name] || \"/dev/sda1\",\n :ebs_volume_type => config[:ebs_volume_type] || \"standard\"\n }\n end\n end\n\n # Convert the provided keys to what AWS expects\n bdms = bdms.map do |bdm|\n b = {\n :ebs => {\n :volume_size => bdm[:ebs_volume_size],\n :delete_on_termination => bdm[:ebs_delete_on_termination]\n },\n :device_name => bdm[:ebs_device_name]\n }\n b[:ebs][:volume_type] = bdm[:ebs_volume_type] if bdm[:ebs_volume_type]\n b[:ebs][:snapshot_id] = bdm[:ebs_snapshot_id] if bdm[:ebs_snapshot_id]\n b[:virtual_name] = bdm[:ebs_virtual_name] if bdm[:ebs_virtual_name]\n b\n end\n\n debug_if_root_device(bdms)\n\n @bdms = bdms\n end", "def create\n @base_rom = BaseRom.find(params[:base_rom_id])\n @configuration = Configuration.new(params[:configuration])\n @configuration.base_rom = @base_rom\n\n # Add packages\n packages = params[:packages] || []\n packages.each do |package|\n package_id = package.first\n package_selected = package.last\n next if package_selected != '1'\n\n package = Package.find(package_id)\n @configuration.packages << package\n end\n\n # Compute APK changes\n changes = params[:apk]\n changes.each do |change|\n apk = Apk.find(change.first)\n destination = change.last\n\n puts \"#{apk} to #{destination}\"\n\n next if apk.location == destination || (destination=='remove' && apk.base_rom.nil?)\n @configuration.changes << Change.new(:apk => apk, :destination => destination)\n end\n\n respond_to do |format|\n if @configuration.save\n format.html { redirect_to(device_base_rom_configurations_path(@base_rom.device, @base_rom), :notice => 'Your configuration was successfully created. It should be ready for download in about 5 minutes.') }\n else\n @extra_apks = Apk.all.select {|a| a.base_rom.nil?}\n format.html { render :action => \"new\" }\n end\n end\n end", "def create_config_base\n # Create keys directory for environment\n FileUtils.cd(self.project_root) { FileUtils.mkdir_p \"config/environments/#{self.name}\" }\n FileUtils.cd(\"#{project_root}/config/environments/#{self.name}\") { FileUtils.mkdir_p %w{steps keys} }\n # Create ssh key for puppet user if environment is vagrant\n generate_puppet_user_keys('vagrant') if self.name == 'vagrant'\n end", "def add_config(name, config)\n\t\tend", "def push_config\n command = ['VBoxManage', 'modifyvm', uid]\n command.concat board.to_params\n nics.each_with_index do |nic, index|\n if nic.nil?\n command.push \"--nic#{index + 1}\", 'none'\n else\n command.concat nic.to_params(index + 1)\n end\n end\n VirtualBox.run_command! command\n \n io_buses.each { |bus| bus.add_to self }\n \n self\n end", "def create_volume(options = {})\n options[:capacity] = options[:capacity] * GB if options[:capacity] < 100000\n vol = pool.create_volume_xml(Volume.to_xml(options))\n Volume.new vol, self\n end", "def prepare!\n cleanup_hostname!\n cleanup_network!\n\n imports = ''\n\n # Find all /etc/nixos/vagrant-*.nix files\n machine.communicate.tap do |c|\n c.execute('find /etc/nixos -maxdepth 1 -type f -name \"vagrant-*.nix\"') do |type, data|\n imports << data\n end\n end\n\n # build\n conf = <<CONF\n{ config, pkgs, ... }:\n{\n imports = [\n #{imports.lines.join(\" \").strip}\n ];\nCONF\n # default NIX_PATH\n conf << <<CONF if config.NIX_PATH\n config.environment.shellInit = ''\n export NIX_PATH=#{config.NIX_PATH}:$NIX_PATH\n '';\nCONF\n conf << '}'\n # output / build the config\n\n write_config(\"vagrant.nix\", conf)\n end", "def volume_mount(local_dir, container_dir)\n local_dir = File.expand_path(local_dir, reference_dir)\n volumes << VolumeMount.new(local_dir, container_dir)\n end", "def config(name, items)\n self[:configMap] = {\n name: name,\n items: items.map do |key, path|\n { key: key, path: path }\n end\n }\n end", "def add(config_or_block)\n self.class.new @config_list + Array.wrap(config_or_block)\n end", "def gem_config(*names)\n options = names.extract_options!\n options[:type] ||= :memory\n config = {}\n names << :simple if names.empty?\n names.each{|n| config.merge!(send(\"#{n}_config\")) }\n config.merge!(send(\"#{options[:type]}_config\"))\n Storehouse.set_spec(config)\n end", "def create_config \n @config = ::Capistrano::Configuration.new\n if @cloud.debug || @cloud.verbose \n @config.logger.level = @cloud.debug ? ::Capistrano::Logger::MAX_LEVEL : ::Capistrano::Logger::INFO\n else\n @config.logger.level = ::Capistrano::Logger::IMPORTANT\n end\n \n capfile = returning Array.new do |arr|\n Dir[\"#{::File.dirname(__FILE__)}/recipies/*.rb\"].each {|a| arr << \"require '#{a}'\" }\n arr << \"ssh_options[:keys] = '#{@cloud.full_keypair_basename_path}'\"\n \n arr << set_poolparty_roles\n end.join(\"\\n\")\n \n @config.provisioner = self\n @config.cloud = @cloud\n \n @config.load(:string => capfile)\n \n @cloud.deploy_file ? @config.load(@cloud.deploy_file) : @config.set(:user, @cloud.user)\n end", "def create_file_config\n template \"config/smart_editor.yml\", \"config/smart_editor.yml\"\n end", "def create_volume(options={}) \n raise \"Volume nickname required\" unless options[:nickname]\n params = {:nickname => options[:nickname],:size => options[:size], :api_version => 1.0}\n params[:description] = options[:description] if options[:description]\n #STDERR.puts \"HERE IS THE URL: #{@api_url}/create_ebs_volume.js (PARAMS: #{params.inspect})\"\n body = RestClient.post @api_url+\"/create_ebs_volume.js\",params\n json = JSON.load(body)\n STDERR.puts \"CREATED_VOLUME: #{json}\"\n json\n rescue => e\n display_exception(e, \"create_volume: #{options.inspect}\")\n end", "def define_vm config, role, index, ip, memory = 512\n id = (index + 1).to_s.rjust(3, '0')\n config.vm.define \"#{role}_#{id}\" do |box|\n box.vm.customize [ \"modifyvm\", :id, \"--memory\", memory ]\n box.vm.box = \"centos_6_3\"\n box.vm.box_url = \"https://dl.dropbox.com/u/7225008/Vagrant/CentOS-6.3-x86_64-minimal.box\"\n box.vm.network :hostonly, \"192.168.34.#{ip}\", :netmask => \"255.255.255.0\"\n box.vm.host_name = \"#{role.downcase.gsub(/[^a-z0-9]+/, '-')}-#{id}.esi.dev\"\n #box.vm.provision :shell, :path => \"script/bootstrap-vm.sh\"\n box.vm.provision :puppet, :module_path => \"modules\" do |p|\n p.manifests_path = \"manifests\"\n p.manifest_file = \"site.pp\"\n end\n end\nend", "def volume\n help = [\n '',\n \"Use: #{me} volume [COMMAND]\",\n '',\n 'Manage cyber-dojo setup volumes',\n '',\n 'Commands:',\n minitab + 'create Creates a new volume',\n minitab + 'rm Removes a volume',\n minitab + 'ls Lists the names of all volumes',\n minitab + 'inspect Displays details of a volume',\n minitab + \"pull Pulls the docker images inside a volume's manifest.json files\",\n '',\n \"Run '#{me} volume COMMAND --help' for more information on a command\",\n ]\n case ARGV[1]\n when 'create' then volume_create\n when 'rm' then volume_rm\n when 'ls' then volume_ls\n when 'inspect' then volume_inspect\n when 'pull' then volume_pull\n else show help\n end\nend", "def build_inventory\n Dir.mkdir 'inventory' unless File.directory?('inventory')\n File.open('inventory/ldap', 'w') do |f|\n f.write(\"controller\\tansible_connection=local\\n\")\n write_machines(f)\n groups = build_groups\n write_groups(groups, f)\n end\n end" ]
[ "0.65570027", "0.6094821", "0.6045592", "0.5926669", "0.58990616", "0.5826286", "0.5758792", "0.5730638", "0.57096523", "0.5702036", "0.56493527", "0.5637249", "0.56333816", "0.5633276", "0.5603496", "0.55852246", "0.5582181", "0.5579442", "0.55753416", "0.5566169", "0.55239165", "0.5517904", "0.547593", "0.54562706", "0.54449123", "0.5415999", "0.54060954", "0.53754586", "0.53754586", "0.53742194", "0.5361732", "0.5347886", "0.53238034", "0.53031546", "0.5290298", "0.52681583", "0.5267258", "0.5266791", "0.5260809", "0.52586484", "0.5252073", "0.52217627", "0.52200514", "0.52200514", "0.51942915", "0.5191117", "0.5188846", "0.51845896", "0.5183675", "0.5176311", "0.51630974", "0.5158843", "0.5156913", "0.51498044", "0.5146546", "0.513286", "0.51299495", "0.5123612", "0.5118307", "0.5114978", "0.5107377", "0.5107363", "0.5107217", "0.5091864", "0.5090156", "0.5087495", "0.5086238", "0.50817955", "0.50716734", "0.50656086", "0.5065374", "0.50571674", "0.5043527", "0.50386626", "0.5028869", "0.5023778", "0.5018842", "0.5017283", "0.5016179", "0.50122577", "0.5009372", "0.5007077", "0.49977657", "0.49973297", "0.49963632", "0.49909577", "0.4990298", "0.49859282", "0.49840295", "0.49801058", "0.49784398", "0.49754834", "0.4970271", "0.4967784", "0.4965865", "0.49461257", "0.4942076", "0.49379352", "0.49371892", "0.4920642" ]
0.720253
0
Prepare the options passed to create the domain
def domain_options @domain_options ||= { name: domain_name, persistent: config[:persistent], cpus: domain_cpus, memory_size: domain_memory, os_type: 'hvm', arch: config[:arch], domain_type: config[:domain_type], nics: [{ type: 'network', network: config[:network_name], bridge: config[:network_bridge_name] }], volumes: domain_volumes } end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_options\n {\n 'action' => \"add#{\"Dynamic\" if domain.dynamic?}Record\",\n 'domain' => domain.host,\n 'field1' => self.host,\n 'type' => TYPE_NUMBERS[self.type],\n 'field2' => self.value,\n 'mxVal' => self.mx,\n 'ttl' => self.ttl.to_s\n }.merge({\n (domain.primary? ? 'did' : 'dynid') => domain.id.to_s\n })\n end", "def create_domain_with_http_info(create_domain_options, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: DomainControllerApi.create_domain ...'\n end\n # verify the required parameter 'create_domain_options' is set\n if @api_client.config.client_side_validation && create_domain_options.nil?\n fail ArgumentError, \"Missing the required parameter 'create_domain_options' when calling DomainControllerApi.create_domain\"\n end\n # resource path\n local_var_path = '/domains'\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['*/*'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:body] || @api_client.object_to_http_body(create_domain_options) \n\n # return_type\n return_type = opts[:return_type] || 'DomainDto' \n\n # auth_names\n auth_names = opts[:auth_names] || ['API_KEY']\n\n new_options = opts.merge(\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: DomainControllerApi#create_domain\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def domain_create(args)\n raise ArgumentError, \"You can't create a domain with ns records, you must do an update afterwards\" if args.key?(:ns)\n raise ArgumentError, \"You can't create a domain with ds or key records, you must do an update afterwards\" if args.key?(:dsData) || args.key?(:keyData)\n super\n end", "def domain_params\n @opts\n end", "def create_domain\n debug(\"Creating domain #{domain_name}\")\n debug(\"Using options: #{domain_options}\")\n domain = client.servers.create(domain_options)\n prepare_domain(domain)\n domain\n end", "def initialize(domain, options = {})\n @domain, @options = domain, RailsERD.options.merge(options)\n end", "def create_agent_domain\n return false unless validate_params\n puts '########## CREATING DOMAIN ##########'\n dme.create_domain(params[:domain])\n puts '########## CREATING DEFAULT RECORDS ##########'\n create_default_records\n puts '########## CREATING ADDITIONAL RECORDS ##########'\n create_additional_records\n puts '########## RENDERING DATA TO CLIENT##########'\n show_domain\n end", "def _normalize_options(options); end", "def create_domain_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: DomainApi.create_domain ...'\n end\n # unbox the parameters from the hash\n service_id = opts[:'service_id']\n version_id = opts[:'version_id']\n # verify the required parameter 'service_id' is set\n if @api_client.config.client_side_validation && service_id.nil?\n fail ArgumentError, \"Missing the required parameter 'service_id' when calling DomainApi.create_domain\"\n end\n # verify the required parameter 'version_id' is set\n if @api_client.config.client_side_validation && version_id.nil?\n fail ArgumentError, \"Missing the required parameter 'version_id' when calling DomainApi.create_domain\"\n end\n # resource path\n local_var_path = '/service/{service_id}/version/{version_id}/domain'.sub('{' + 'service_id' + '}', CGI.escape(service_id.to_s)).sub('{' + 'version_id' + '}', CGI.escape(version_id.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n content_type = @api_client.select_header_content_type(['application/x-www-form-urlencoded'])\n if !content_type.nil?\n header_params['Content-Type'] = content_type\n end\n\n # form parameters\n form_params = opts[:form_params] || {}\n form_params['comment'] = opts[:'comment'] if !opts[:'comment'].nil?\n form_params['name'] = opts[:'name'] if !opts[:'name'].nil?\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'DomainResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"DomainApi.create_domain\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: DomainApi#create_domain\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def _process_options(options); end", "def _process_options(options); end", "def create(opts)\n opts = check_params(opts,[:search_base_dns,:servers])\n super(opts)\n end", "def initialize(options)\n @email = options[:email]\n @remove = options[:remove]\n @domain = options[:domain]\n @username = options[:username]\n end", "def prepare_options(options)\n options = self.class.defaults.merge(self.options).merge(options)\n end", "def make_options(options)\n\t\tresult = {\n\t\t\t:order => :asc,\n\t\t\t:nulls => :first\n\t\t}.merge(options)\n\t\traise 'Invalid order given.' unless [:asc,:desc].include?(result[:order])\n\t\traise 'Invalid null order given.' unless [:first,:last].include?(result[:nulls])\n\t\traise 'Unrecognized options given.' unless result.size == 2\n\t\tresult\n\tend", "def domain_create(domain, fields)\n unless ([ :period, :registrant, :admin, :tech, :billing, :nservers ] - fields.keys).empty?\n raise ArgumentError, \"Required fields not found\"\n end\n query :domain_register, {\n domain: domain,\n period: (fields[:period] * 12),\n owner_c: fields[:registrant],\n admin_c: fields[:admin],\n tech_c: fields[:tech],\n billing_c: fields[:billing],\n ns_list: fields[:nservers].join(':')\n }\n end", "def create_host_only_network(options)\n end", "def create_domain(opts = {})\n data, _status_code, _headers = create_domain_with_http_info(opts)\n data\n end", "def prepare_options!\n @cli_options = ARGV.dup.uniq\n @save_options = !!@cli_options.delete('--save')\n\n if options.rails?\n sources.concat(detect_sources)\n set_rails_defaults\n end\n end", "def normalize_options(options)\n normalized_options = options.dup\n\n normalized_options\n .merge!(\n format: @format,\n api_key: @api_key,\n expire: request_expires_at(normalized_options)\n )\n .merge!(\n sig: Utils.generate_signature(normalized_options, @api_secret)\n )\n end", "def domain\n try_opt(:domain)\n end", "def domain\n @options['domain']\n end", "def set_domain\n if check_fields_google_domain? # google domain\n @domain.push(:Google)\n elsif check_fields_nas_domain? # NAS domain\n @domain.push(:NAS)\n else \n @domain.push(:Cross)\n end\n notify \"DOMAIN : #{@domain}\"\n end", "def domain_params\n params.require(:domain).permit(:name, :ext_id, :origin_server)\n end", "def init\n # Validate and parse the flags\n OptionParser.new do |o|\n# o.on('-n NAME', '--name NAME') { |n| $name = n }\n o.on('-s', '--simulate') { |b| $simulate = b }\n o.on('-h', '--help') { usage }\n o.parse!\n end\n \n # Get the e-mail address from the arguments\n begin \n $domain = ARGV.pop\n end until ( $domain == nil ) || ( $domain[0] != '-' )\n\n # If we didn't get an address, output the usage info\n usage unless $domain\n\nend", "def configuration_from_options(options); end", "def prepare_options(options)\n options[:id] ||= nil\n options[:name] ||= nil\n options[:slot] ||= nil\n options[:level] ||= 0\n options[:class] ||= nil\n\n options[:level] = options[:level].to_i if options[:level].respond_to? 'to_i'\n\n # Tier 12 378 or 391 token; their level is correct but they have no slot\n if options[:name] =~ /^(.+) of the Fiery (.+)$/\n options[:slot] = determine_token_slot(options[:name])\n end\n\n special_weapon_options(options) if special_weapon_slot?(options[:slot])\n options[:slot] = normalize_slot(options[:slot])\n\n options\n end", "def normalize_options(options)\n options = options.inject({}) { |h, (k,v)| h[k.downcase] = v ; h }\n options[:rackup] = options[:config] if options[:config]\n options[:port] = options[:port].to_i if options[:port]\n options\n end", "def create(_options)\n raise NotImplementedError\n end", "def normalize_options(options)\n options.merge!(\n :format => @format,\n :api_key => @api_key,\n :expire => Time.now.to_i + 600 # Grant this request 10 minutes\n ).merge!(:sig => Utils.generate_signature(options, @api_secret))\n end", "def build\n {\n patient: patient,\n consultation: ConsultationPresenter.new(consultation),\n definite_article: definite_article,\n current_date: current_date,\n current_time: current_time,\n start_time: options.fetch(:start_time, ''),\n end_time: options.fetch(:end_time, ''),\n rest_time: options.fetch(:rest_time, ''),\n surgical_treatment: options.fetch(:surgical_treatment, '').upcase,\n surgery_tentative_date: options.fetch(:surgery_tentative_date, '').upcase,\n surgery_cost: options.fetch(:surgery_cost, ''),\n consultations: consultations,\n doctor: consultation.doctor,\n emergency_number: emergency_number,\n website: website\n }\n end", "def options(opts)\n options = opts.dup\n options[:platform] = PLATFORM_DETAILS\n options[:app_name] = Mongoid::Config.app_name if Mongoid::Config.app_name\n if (driver_version <=> [2, 13]) >= 0\n wrap_lib = if options[:wrapping_libraries]\n [MONGOID_WRAPPING_LIBRARY] + options[:wrapping_libraries]\n else\n [MONGOID_WRAPPING_LIBRARY]\n end\n options[:wrapping_libraries] = wrap_lib\n end\n options.reject{ |k, _v| k == :hosts }.to_hash.symbolize_keys!\n end", "def domain_create(args)\n response = send_request(domain_create_xml(args))\n\n get_result(:xml => response, :callback => :domain_create_process)\n end", "def initialize(domain); @domain = domain; end", "def create(opts)\n opts = check_params(opts,[:description,:local_policy,:remote_policy,:requested_hosts])\n super(opts)\n end", "def initialize(options={})\n @options = {:host => nil,\n :vendor => \"Dell\",\n :model => nil,\n :version => \"2c\",\n :community => \"public\"\n }\n @options.merge!(options) if options.is_a?(Hash)\n @debug = false\n end", "def build_options(options)\n validate_options(options)\n merge_default_options_into(options)\n end", "def create(opts)\n opts = check_params(opts,[:servers])\n super(opts)\n end", "def setup_options(options = T.unsafe(nil)); end", "def build_request_options!(datum)\n datadog_configuration(datum[:host]).options_hash.merge(@default_options)\n end", "def run\n super\n\n # Set the dns_record to the appropriate suffix\n dns_record = @entity.name\n\n # Handle cases of *.test.com (pretty common when grabbing\n # DNSRecords from SSLCertificates)\n if dns_record[0..1] == \"*.\"\n dns_record = dns_record[2..-1]\n end\n \n if @options[:subdomain_list]\n subdomain_list = @options['subdomain_list']\n else\n # use the deepmagic list\n subdomain_list = IO.readlines(\"#{Rails.root}/data/dns_sub.list\")\n # Add a builtin domain list \n #subdomain_list = [\"mx\", \"mx1\", \"mx2\", \"www\", \"ww2\", \"ns1\", \"ns2\", \"ns3\", \"test\", \"mail\", \"owa\", \"vpn\", \"admin\",\n # \"gateway\", \"secure\", \"admin\", \"service\", \"tools\", \"doc\", \"docs\", \"network\", \"help\", \"en\", \"sharepoint\", \"portal\",\n # \"public\", \"private\", \"pub\", \"zeus\", \"mickey\", \"time\", \"web\", \"it\", \"my\", \"photos\", \"safe\", \"download\", \"dl\", \n # \"search\", \"staging\"]\n end\n\n @task_logger.good \"Using subdomain list: #{subdomain_list}\"\n\n begin\n # Check for wildcard DNS, modify behavior appropriately. (Only create entities\n # when we know there's a new host associated)\n if Resolv.new.getaddress(\"noforkingway#{rand(100000)}.#{dns_record}\")\n wildcard_domain = true \n @task_logger.error \"WARNING! Wildcard domain detected, only saving validated domains/hosts.\"\n end\n rescue Resolv::ResolvError\n @task_logger.good \"Looks like no wildcard dns. Moving on.\"\n end\n\n subdomain_list.each do |sub|\n sub = sub.chomp\n begin\n # Calculate the domain name\n if @options[:mashed_domains]\n # blatently stolen from HDM's webinar on password stealing, try without a dot to see\n # if this domain has been hijacked by someone - great for finding phishing attempts\n domain = \"#{sub}#{dns_record}\"\n else \n domain = \"#{sub}.#{dns_record}\"\n end\n\n # Try to resolve\n resolved_address = Resolv.new.getaddress(domain)\n @task_logger.good \"Resolved Address #{resolved_address} for #{domain}\" if resolved_address\n \n # If we resolved, create the right entities\n if resolved_address\n unless wildcard_domain\n @task_logger.good \"Creating domain and host entities...\"\n # create new host and domain entitys\n d = create_entity(Entities::DnsRecord, {:name => domain })\n h = create_entity(Entities::Host, {:name => resolved_address})\n else\n # Check to make sure we don't already have this host, if we don't \n # we probably want to save the domain as a new entity (and the host)\n if Entities::Host.where(:name => resolved_address).count == 0\n d = create_entity(Entities::DnsRecord, {:name => domain })\n h = create_entity(Entities::Host, {:name => resolved_address})\n end\n end\n end\n rescue Exception => e\n @task_logger.error \"Hit exception: #{e}\"\n end\n end\n end", "def initialize(options={})\n options = Subtledata.options.merge(options)\n Configuration::VALID_OPTIONS_KEYS.each do |key|\n send(\"#{key}=\", options[key])\n end\n end", "def options\n\t\treturn {\n\t\t\tdomain: self.domain,\n\t\t\tpath: self.path,\n\t\t\tsecure: self.secure?,\n\t\t\thttponly: self.httponly?,\n\t\t\texpires: self.expires,\n\t\t\tmax_age: self.max_age,\n\t\t\tversion: self.version,\n\t\t}\n\tend", "def options() end", "def initialize(options={})\n options = Slideshare.options.merge(options)\n Configuration::VALID_OPTIONS_KEYS.each do |key|\n send(\"#{key}=\", options[key])\n end\n end", "def create_domain(domain_name)\n Adapter.create_domain(domain_name)\n self[domain_name]\n end", "def options=(options = {}); @address_impl.setOptions(convert_options(options)); end", "def process_options\n \n end", "def process_options\n \n end", "def process_options\n \n end", "def initialize(options)\n options.symbolize_keys.each do |key, value|\n raise ArgumentError, \"Invalid key '#{key}'\" unless VALID_KEYS.include?(key)\n instance_variable_set(\"@#{key}\", value)\n end\n\n %i[name active_directory_authority resource_manager_url].each do |key|\n unless instance_variable_get(\"@#{key}\")\n raise ArgumentError, \"Mandatory argument '#{key}' not set\"\n end\n end\n end", "def prepare(options = {})\n raise \"prepare not implemented\"\n end", "def setup(options); end", "def setup(options); end", "def create_domain(create_domain_options, opts = {})\n data, _status_code, _headers = create_domain_with_http_info(create_domain_options, opts)\n data\n end", "def build_domain \n unless self.domain\n self.domain = URI.parse(self.url).host \n self.save\n end\n end", "def initialize(options = {})\n @options = DEFAULT_OPTIONS.merge(options)\n parse_domain_name\n REQUIRED_OPTIONS.each { |key| raise \"No #{key} specified\" unless @options.key?(key) }\n @password = @options.delete(:password)\n @session_id = @options[:session_id]\n @api_path = @options[:path]\n @ca_certs = @options[:ca_certs]\n\n # The temporary file to store the CA certificates will be created when needed:\n @ca_file = nil\n end", "def initialize(options)\n options.symbolize_keys!\n\n if options.has_key?(:ldap_uid)\n @ldap_uid = options.fetch(:ldap_uid)\n elsif options.has_key?(:ldap_person)\n @ldap_person = options.fetch(:ldap_person)\n else\n raise InvalidOption, \"Expects :ldap_uid or :ldap_person as options\"\n end\n end", "def initialize(_options = {}); end", "def initialize(options, args)\n defaults = {\n interval: 1\n }\n @options = defaults.merge((options || {}).each { |k, v| { k => v } })\n return unless args.first\n\n resolve_addressing args.first\n normalise_options\n end", "def allowed_options\n []\n\n # :subdomain_list => list of subdomains to brute\n # :mashed_domains => try domain names w/o a dot, see if anyone's hijacked a common \"subdomain\"\n # :mashed_domains - blatently stolen from @hdmoore's webinar on password stealing, try without a dot to see\n # if this domain has been hijacked by someone - great for finding phishing attempts\n \n\nend", "def initialize(options={})\n puts \"initiliaze entry\"\n @options = OPTIONS.merge(options) \n end", "def initialize(options); @options = options end", "def set_domain( domain )\n if StringOption.domain_ok?( domain )\n @domain = domain\n else\n @domain = nil\n raise \"invalid domain: #{ domain.to_s }\"\n end \n end", "def initialize(options = {})\n # merge default options with options hash\n o = {\n :domain => 'http://www.dev.usys.org',\n :key => '',\n :user_agent => 'FsCommunicator/0.1 (Ruby)', # should be overridden by options user_agent\n :session => nil,\n :handle_throttling => false,\n :logger => nil,\n :timeout => nil\n }.merge(options)\n @domain = o[:domain]\n @key = o[:key]\n @user_agent = o[:user_agent]\n @session = o[:session]\n @handle_throttling = o[:handle_throttling]\n @logger = o[:logger]\n @timeout = o[:timeout]\n end", "def parse_options(options)\n parsed_options = \"\"\n parsed_options << translate_option_key(:admin_email, \"ServerAdmin\", options)\n parsed_options << translate_option_key(:document_root, \"DocumentRoot\", options)\n parsed_options << translate_option_key(:aliases, \"ServerAlias\", options)\n parsed_options << translate_option_key(:directory_index,\"DirectoryIndex\", options)\n parsed_options << translate_option_key(:error_log, \"ErrorLog\", options)\n parsed_options << translate_option_key(:custom_log, \"CustomLog\", options)\n end", "def option_params\n params.require(:option).permit(:auto_complete, :validate_address, :pobox_warning, :streetnum_warning, :domain)\n end", "def revive options\n setup options\n end", "def configure(*args)\n @options = args[0] || {}\n @fetcher_opts = @options.dup\n @fetcher_opts.delete(:backends_file)\n @fetcher_opts.delete(:domainmap_file)\n @fetcher_opts.delete(:interval)\n end", "def full_domain(options= {})\n # assume that if port is used in domain config, it should\n # be added to the end of the full domain for links to work\n # This concerns usually mostly testing and development\n default_host, default_port = APP_CONFIG.domain.split(':')\n port_string = options[:port] || default_port\n if options[:testmode] == true \n default_host = 'taxi-hawk.com'\n else \n default_host = 'yelo.red' \n end \n if domain.present? && use_domain? # custom domain\n dom = domain\n else # just a subdomain specified\n dom = \"#{self.ident}.#{default_host}\"\n dom += \":#{port_string}\" unless port_string.blank?\n end\n\n if options[:with_protocol] \n #dom = \"#{(APP_CONFIG.always_use_ssl.to_s == \"true\" ? \"https://\" : \"http://\")}#{dom}\"\n dom = \"#{\"https://\"}#{dom}\"\n end\n\n if options[:admin]\n\n dom = \"#{dom}/en/admin/getting_started_guide\"\n puts \" the domain link will be : \"\n\n end\n\n return dom\n\n end", "def process_options\n \n end", "def prepare_options\n normalize_options\n String.allow_color = !options[:no_color]\n end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end", "def options; end" ]
[ "0.6602272", "0.6290079", "0.62878805", "0.6255625", "0.6244514", "0.6240076", "0.60917586", "0.5976988", "0.5966741", "0.593528", "0.593528", "0.58982664", "0.5893893", "0.5780929", "0.5748723", "0.5740976", "0.57197964", "0.5717144", "0.5716074", "0.5715341", "0.5670532", "0.5667497", "0.5666545", "0.56593823", "0.5648598", "0.56477106", "0.5645987", "0.56309897", "0.56257313", "0.5608192", "0.560243", "0.55998665", "0.5585311", "0.55798596", "0.5574327", "0.55714536", "0.5561913", "0.5561422", "0.55427265", "0.55400944", "0.55397034", "0.5537979", "0.55276644", "0.55196106", "0.5508633", "0.5488219", "0.5461686", "0.54445237", "0.54445237", "0.54445237", "0.54336274", "0.54146963", "0.53987324", "0.53987324", "0.5396275", "0.5385971", "0.5383849", "0.5382611", "0.5379175", "0.53739804", "0.5373493", "0.53702563", "0.53651386", "0.53516424", "0.5350764", "0.5344251", "0.53348", "0.53315854", "0.53242373", "0.5318232", "0.5310941", "0.53055733", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706", "0.53052706" ]
0.69818443
0
Returns the domain memory size in KBs
def domain_memory (config[:memory] || 512) * 1024 end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def memory\n domain_info[:memory]\n end", "def memory\n domain_info[:memory]\n end", "def memsize\n end", "def max_memory\n domain_info[:maxMem]\n end", "def max_memory\n domain_info[:maxMem]\n end", "def size_mb \n return size / 1048576 # (1024 * 1024) \n end", "def memsize; RAtlas::memsize(@storage);end", "def host_memory\n case RbConfig::CONFIG['host_os']\n when /darwin/\n Integer(`sysctl -n hw.memsize`.to_i / 1024 / 1024)\n when /linux/\n Integer(`grep 'MemTotal' /proc/meminfo | sed -e 's/MemTotal://' -e 's/ kB//'`.to_i / 1024)\n else\n Integer(`wmic ComputerSystem get TotalPhysicalMemory`.split(\"\\n\")[2].to_i / 1024 / 1024)\n end\nend", "def size\n @real_time_memory.size + @user_memory.size\n end", "def size\n vm.hardware_profile.vm_size\n end", "def size\n\t\t7500\n\tend", "def d_size\n @desc[:size].to_i\n end", "def size_used\n info[\"size-used\"]\n end", "def get_physical_memory\n # only support Linux and FreeBSD right now\n os = Facter['kernel']\n return if os.nil? or (os.value != 'Linux' and os.value != 'FreeBSD')\n\n physical_memory = 0\n dmi_data = get_dmi_data\n\n return if dmi_data.nil? or dmi_data['Memory Device'].nil?\n\n dmi_data['Memory Device'].each do |mem_dev|\n\n size = mem_dev['Size']\n form_factor = mem_dev['Form Factor']\n locator = mem_dev['Locator']\n # Some systems report little chunks of memory other than\n # main system memory as Memory Devices, the 'DIMM' as\n # form factor seems to indicate main system memory.\n # Unfortunately some DIMMs are reported with a form\n # factor of '<OUT OF SPEC>'. In that case fall back to\n # checking for signs of it being a DIMM in the locator\n # field.\n if (size != 'No Module Installed' &&\n ((form_factor == 'DIMM' || form_factor == 'FB-DIMM' || form_factor == 'SODIMM') ||\n (form_factor == '<OUT OF SPEC>' && locator =~ /DIMM/)))\n megs, units = size.split(' ')\n\n next if units != 'MB'\n physical_memory += megs.to_i;\n end\n end\n physical_memory\n end", "def size\n stats[:dataSize]\n end", "def size\n stats[:dataSize]\n end", "def computeMemory()\n mem = `grep 'MemTotal' /proc/meminfo | sed -e 's/MemTotal://' -e 's/ kB//'`.to_i\n mem / 1024 / 4\n end", "def get_size\n\t\tend", "def memory(in_gb=false)\n line = ssh_cmd 'cat /proc/meminfo | grep MemTotal'\n matches = line.match /(?<size>\\d+)\\s+(?<unit>kB|mB|gB|B)/\n size = matches[:size].to_i\n multipliers = {kB: 1024, mB: 1024**2, gB: 1024**3, B: 1}\n size *= multipliers[matches[:unit].to_sym]\n in_gb ? size / 1024**3 : size\n end", "def to_mb\n self / 1048576\n end", "def size\n @size \n end", "def size\n @size \n end", "def disk_usage_mb \n return disk_usage \n end", "def d_size\n @diskObj.info[:capacity]\n end", "def size\n @info[:size]\n end", "def file_total_size_gb\n ((total_ram_mb.to_f * 2).to_f / 1024).ceil\n end", "def get_size\n\t\treturn @size\n\tend", "def d_size\n @capacity / @blockSize\n end", "def memory_usage \n\t`ps -o rss= -p #{Process.pid}`.to_i # in kilobytes \nend", "def size\n @size.size\n end", "def size\n info[\"size\"]\n end", "def size_in_bytes\n to_i description['SizeInBytes']\n end", "def kilobytes\n self * KILOBYTE\n end", "def getSize() \n @obj.getSize()\n end", "def memory_total\n File.foreach('/proc/meminfo').each do |line|\n return line.split(' ')[1].to_i if line =~ /MemTotal:/\n end\n end", "def size\n\t\treturn @storage.size\n\tend", "def size\n return instance_get(:size)\n end", "def mem_usage\n ScoutApm::Instruments::Process::ProcessMemory.new(@agent_context).rss\n end", "def byte_size()\n @value.length * 4\n end", "def byte_size()\n @value.length * 4\n end", "def byte_size()\n if @record and RECORD_INFO[@record.type].size > 0 then\n RECORD_INFO[@record.type].size * @value.length\n else\n sum = 0\n @value.each do |val|\n sum += (val.length % 2 == 0) ? val.length : val.length + 1\n end\n sum\n end\n end", "def memused\n @memtotal - @memfree\n end", "def size_in_byte\n return @size_in_byte\n end", "def memory_fraction(fraction = 4)\n os_memory / 1024 / fraction\n end", "def d_size\n uint64(header, 'size') / @blockSize\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def size\n @size\n end", "def to_gb\n self / 1073741824\n end", "def memory_usage\n return File.read('/proc/self/status').match(/VmRSS:\\s+(\\d+)/)[1].to_i * 1024\nend", "def size\n @gapi[\"size\"]\n end", "def size\n\t\tstat[:size]\n\tend", "def size\n\t\tstat[:size]\n\tend", "def max_allocated_storage\n data[:max_allocated_storage]\n end", "def size\n size = popen(%W(du -s), full_path).first.strip.to_i\n (size.to_f / 1024).round(2)\n end", "def size\n Float::MAX.to_i\n end", "def size_mum_record\n 8\n end", "def redis_memory_usage\n redis_info[\"used_memory_human\"]\n end", "def size\n execute_request(:get, '/size').body.to_i\n end", "def C_GetObjectSize()\n @pk.C_GetObjectSize(@sess, @obj)\n end", "def get_system_used_memory_mb\n # (`free -ml | grep 'Mem:' | awk -F' ' '{ print $3 }'`.strip.to_i rescue 0).round(MEMORY_PRECISION)\n get_system_memory_info_mb[:used_memory]\n end", "def md_size\n self[:md_size]\n end", "def disk_current_size\n\t\treturn 1024\n\tend", "def _size(domain, key)\n dmid = get_dmid(domain)\n\n sql = <<-EOS\n SELECT length FROM file\n WHERE dmid = #{dmid} AND dkey = '#{@my.quote(key)}'\n LIMIT 1\n EOS\n\n res = query(sql).fetch_row\n return res[0].to_i if res && res[0]\n raise MogileFS::Backend::UnknownKeyError\n end", "def size\n\t\t\[email protected]\n\t\tend", "def sizeX()\n return @device.sizeX() ;\n end", "def size\n\n DataMapper.repository(@dm_repository) { DmExpression.count }\n end", "def part_size_in_bytes\n data.part_size_in_bytes\n end", "def size\n return @size\n end", "def size\n return @size\n end", "def size\n @info.size\n end", "def size\n @info.size\n end", "def size\n @info.size\n end", "def size ; data['size'] ; end", "def guest_memory\n begin\n host_memory / 2\n rescue\n 8192\n end\nend", "def byte_size; size.y * line_byte_size; end", "def megabytes\n self * MEGABYTE\n end", "def size\n @size\n end", "def size\n @size\n end" ]
[ "0.76297045", "0.76297045", "0.75725186", "0.7562596", "0.7562596", "0.7260716", "0.712562", "0.7067322", "0.6961244", "0.6940051", "0.6827923", "0.68090016", "0.6799701", "0.6757472", "0.6748452", "0.6748452", "0.6739316", "0.6707153", "0.6704731", "0.6701376", "0.669562", "0.669562", "0.6678137", "0.6674894", "0.6674114", "0.66725004", "0.6629024", "0.657502", "0.65663", "0.65447766", "0.65138763", "0.65009415", "0.64793617", "0.6463178", "0.6462983", "0.6454287", "0.6447442", "0.6444946", "0.6421796", "0.6421796", "0.64168984", "0.64026785", "0.64014345", "0.63929266", "0.63851726", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6375674", "0.6371188", "0.6370143", "0.6365237", "0.636004", "0.636004", "0.6357155", "0.63565695", "0.63530105", "0.63504726", "0.634421", "0.63397294", "0.6339162", "0.6320419", "0.63038623", "0.6300607", "0.6297798", "0.6293421", "0.62932533", "0.62921965", "0.62882185", "0.62868935", "0.62868935", "0.6283375", "0.6283375", "0.6283375", "0.62690896", "0.6266057", "0.62657464", "0.6255447", "0.6253604", "0.6253604" ]
0.7438267
5
Return domain cpu count
def domain_cpus config[:cpus] || 1 end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cpu_time_used\n domain_info[:cpuTime]\n end", "def cpu_time_used\n domain_info[:cpuTime]\n end", "def virtual_cpus\n domain_info[:nrVirtCpu]\n end", "def virtual_cpus\n domain_info[:nrVirtCpu]\n end", "def count\n\t\t\t\t# CPU count can change during the program runtime\n\t\t\t\tcpuinfo.count { |x| x.start_with?('processor') }\n\t\t\tend", "def cpu_count\n File.read(\"/proc/cpuinfo\").split(\"\\n\").select { |s| s.start_with?(\"processor\\t:\") }.size\n end", "def physical_processor_count\n @physical_processor_count.value\n end", "def get_number_of_cpu\n return `cat /proc/cpuinfo | grep processor | wc -l`.to_i\n end", "def cpu_count\n File.read(\"/proc/cpuinfo\").split(\"\\n\").count { |s| s.start_with?(\"processor\\t:\") }\n end", "def get_cpu_core_count\n # only support Linux for now\n os = Facter['kernel'].value\n physicalid = nil\n coreid = nil\n corecount = nil\n cores = {}\n if os == 'Linux'\n IO.foreach(\"/proc/cpuinfo\") do |line|\n if line =~ /^processor\\s*: (\\d+)/\n physicalid = nil\n coreid = nil\n elsif line =~ /^physical id\\s*: (\\d+)/\n physicalid = $1\n elsif line =~ /^core id\\s*: (\\d+)/\n coreid = $1;\n end\n if physicalid && coreid\n cores[\"#{physicalid}:#{coreid}\"] = 1;\n end\n end # IO.foreach\n corecount = cores.size\n end # if statement\n return corecount\n end", "def ncpus\n case os\n when 'darwin'\n Integer(`hwprefs cpu_count`)\n when 'linux'\n cores = File.read(\"/proc/cpuinfo\").scan(/core id\\s+: \\d+/).uniq.size\n cores > 0 ? cores : 1\n else raise \"don't know how to determine CPU count on #{os}\"\n end\n end", "def physical_processor_count\n @physical_processor_count ||= begin\n ppc =\n case RbConfig::CONFIG[\"target_os\"]\n when /darwin[12]/\n IO.popen(\"/usr/sbin/sysctl -n hw.physicalcpu\").read.to_i\n when /linux/\n cores = {} # unique physical ID / core ID combinations\n phy = 0\n File.read(\"/proc/cpuinfo\").scan(/^physical id.*|^core id.*/) do |ln|\n if ln.start_with?(\"physical\")\n phy = ln[/\\d+/]\n elsif ln.start_with?(\"core\")\n cid = \"#{phy}:#{ln[/\\d+/]}\"\n cores[cid] = true unless cores[cid]\n end\n end\n cores.count\n when /mswin|mingw/\n require 'win32ole'\n result_set = WIN32OLE.connect(\"winmgmts://\").ExecQuery(\n \"select NumberOfCores from Win32_Processor\"\n )\n result_set.to_enum.collect(&:NumberOfCores).reduce(:+)\n else\n processor_count\n end\n # fall back to logical count if physical info is invalid\n ppc > 0 ? ppc : processor_count\n end\n end", "def processor_count\n case RbConfig::CONFIG['host_os']\n when /darwin9/\n `hwprefs cpu_count`.to_i\n when /darwin/\n (`which hwprefs` != '' ? `hwprefs thread_count` : `sysctl -n hw.ncpu`).to_i\n when /linux|cygwin/\n `grep -c ^processor /proc/cpuinfo`.to_i\n when /(net|open|free)bsd/\n `sysctl -n hw.ncpu`.to_i\n when /mswin|mingw/\n require 'win32ole'\n wmi = WIN32OLE.connect(\"winmgmts://\")\n cpu = wmi.ExecQuery(\"select NumberOfLogicalProcessors from Win32_Processor\")\n cpu.to_enum.first.NumberOfLogicalProcessors\n when /solaris2/\n `psrinfo -p`.to_i # this is physical cpus afaik\n else\n $stderr.puts \"Unknown architecture ( #{RbConfig::CONFIG[\"host_os\"]} ) assuming one processor.\"\n 1\n end\nend", "def cpu_cores\n `cat /proc/cpuinfo | grep bogomips | wc -l`.strip\n end", "def number_of_cpu_cores\n case RbConfig::CONFIG['host_os']\n when /linux/\n Dir.glob('/sys/devices/system/cpu/cpu[0-9]*').count\n when /darwin|bsd/\n Integer(`sysctl -n hw.ncpu`)\n when /solaris/\n Integer(`kstat -m cpu_info | grep -w core_id | uniq | wc -l`)\n else\n 5\n end\n end", "def maxcpucount\n case RbConfig::CONFIG['host_os']\n when /darwin9/\n `hwprefs cpu_count`.to_i\n when /darwin/\n ((`which hwprefs` != '') ? `hwprefs thread_count` : `sysctl -n hw.ncpu`).to_i\n when /linux/\n `cat /proc/cpuinfo | grep processor | wc -l`.to_i\n when /freebsd/\n `sysctl -n hw.ncpu`.to_i\n when /mswin|mingw/\n require 'win32ole'\n wmi = WIN32OLE.connect(\"winmgmts://\")\n cpu = wmi.ExecQuery(\"select NumberOfLogicalProcessors from Win32_Processor\")\n cpu.to_enum.first.NumberOfLogicalProcessors\n end\nend", "def numvcpus\n begin\n os_cpu_cores\n rescue\n 4\n end\nend", "def processor_count\n @processor_count.value\n end", "def os_cpu_cores\n case RbConfig::CONFIG['host_os']\n when /darwin/\n Integer(`sysctl -n hw.ncpu`)\n when /linux/\n Integer(`cat /proc/cpuinfo | grep processor | wc -l`)\n else\n raise StandardError, \"Unsupported platform\"\n end\nend", "def cpu_metrics\n super\n end", "def processor_count; end", "def processor_count; end", "def percent_cpu\n (metrics['CPU'] / 12).round\n end", "def cpu\n cpus = []\n procfs_file(\"stat\") do |file|\n file.read.scan(CPU_DATA) do |i, user, nice, system, idle|\n cpus << Cpu.new(i.to_i, user.to_i, system.to_i, nice.to_i, idle.to_i)\n end\n end\n cpus\n end", "def guest_cpus\n begin\n host_cpus / 2\n rescue\n 4\n end\nend", "def processor_count # rubocop:disable all\n @processor_count ||=\n begin\n if Overcommit::OS.windows?\n require 'win32ole'\n result = WIN32OLE.connect('winmgmts://').ExecQuery(\n 'select NumberOfLogicalProcessors from Win32_Processor'\n )\n result.to_enum.collect(&:NumberOfLogicalProcessors).reduce(:+)\n elsif File.readable?('/proc/cpuinfo')\n IO.read('/proc/cpuinfo').scan(/^processor/).size\n elsif File.executable?('/usr/bin/hwprefs')\n IO.popen('/usr/bin/hwprefs thread_count').read.to_i\n elsif File.executable?('/usr/sbin/psrinfo')\n IO.popen('/usr/sbin/psrinfo').read.scan(/^.*on-*line/).size\n elsif File.executable?('/usr/sbin/ioscan')\n IO.popen('/usr/sbin/ioscan -kC processor') do |out|\n out.read.scan(/^.*processor/).size\n end\n elsif File.executable?('/usr/sbin/pmcycles')\n IO.popen('/usr/sbin/pmcycles -m').read.count(\"\\n\")\n elsif File.executable?('/usr/sbin/lsdev')\n IO.popen('/usr/sbin/lsdev -Cc processor -S 1').read.count(\"\\n\")\n elsif File.executable?('/usr/sbin/sysctl')\n IO.popen('/usr/sbin/sysctl -n hw.ncpu').read.to_i\n elsif File.executable?('/sbin/sysctl')\n IO.popen('/sbin/sysctl -n hw.ncpu').read.to_i\n else\n # Unknown platform; assume 1 processor\n 1\n end\n end\n end", "def processor_count\n @processor_count ||= begin\n os_name = RbConfig::CONFIG[\"target_os\"]\n if os_name =~ /mingw|mswin/\n require 'win32ole'\n result = WIN32OLE.connect(\"winmgmts://\").ExecQuery(\n \"select NumberOfLogicalProcessors from Win32_Processor\")\n result.to_enum.collect(&:NumberOfLogicalProcessors).reduce(:+)\n elsif File.readable?(\"/proc/cpuinfo\")\n IO.read(\"/proc/cpuinfo\").scan(/^processor/).size\n elsif File.executable?(\"/usr/bin/hwprefs\")\n IO.popen(\"/usr/bin/hwprefs thread_count\").read.to_i\n elsif File.executable?(\"/usr/sbin/psrinfo\")\n IO.popen(\"/usr/sbin/psrinfo\").read.scan(/^.*on-*line/).size\n elsif File.executable?(\"/usr/sbin/ioscan\")\n IO.popen(\"/usr/sbin/ioscan -kC processor\") do |out|\n out.read.scan(/^.*processor/).size\n end\n elsif File.executable?(\"/usr/sbin/pmcycles\")\n IO.popen(\"/usr/sbin/pmcycles -m\").read.count(\"\\n\")\n elsif File.executable?(\"/usr/sbin/lsdev\")\n IO.popen(\"/usr/sbin/lsdev -Cc processor -S 1\").read.count(\"\\n\")\n elsif File.executable?(\"/usr/sbin/sysconf\") and os_name =~ /irix/i\n IO.popen(\"/usr/sbin/sysconf NPROC_ONLN\").read.to_i\n elsif File.executable?(\"/usr/sbin/sysctl\")\n IO.popen(\"/usr/sbin/sysctl -n hw.ncpu\").read.to_i\n elsif File.executable?(\"/sbin/sysctl\")\n IO.popen(\"/sbin/sysctl -n hw.ncpu\").read.to_i\n else\n $stderr.puts \"Unknown platform: \" + RbConfig::CONFIG[\"target_os\"]\n $stderr.puts \"Assuming 1 processor.\"\n 1\n end\n end\n end", "def cpu_usage\n containers = []\n @hosts.each do |host|\n Docker.url = \"tcp://#{host}:#{@docker_port}/\"\n containers.concat Docker::Container.all(all: true, filters: { ancestor: [@ws_image],status:['running'] }.to_json)\n end\n \n cpuPercent = 0.0\n status = containers.first.stats\n \n cpuDelta = status['cpu_stats']['cpu_usage']['total_usage'] - status['precpu_stats']['cpu_usage']['total_usage']\n systemDelta = status['cpu_stats']['system_cpu_usage'] - status['precpu_stats']['system_cpu_usage']\n\n if systemDelta > 0.0 and cpuDelta > 0.0 \n cpuPercent = (cpuDelta.round(16) / systemDelta.round(16)).round(16) * status['cpu_stats']['cpu_usage']['percpu_usage'].size * 100.0\n end\n return cpuPercent.round(2)\n end", "def load_per_cpu(_)\n cpu_per_source = {}\n @client.query(\n '(count(node_cpu{mode=\"system\"})by(instance))'\n ).each do |result|\n source = result['metric']['instance']\n cpu_per_source[source] = result['value'][1]\n end\n\n metrics = []\n @client.query('node_load5').each do |result|\n source = result['metric']['instance']\n value = result['value'][1].to_f.round(2)\n load_on_cpu = value / cpu_per_source[source].to_f\n log.debug(\n \"[load_per_cpu] value: '#{load_on_cpu}', source: '#{source}'\"\n )\n metrics << {\n 'source' => source,\n 'value' => load_on_cpu\n }\n end\n metrics\n end", "def cpu_usage_information\n super\n end", "def domain_count\n @attributes[:domain_count]\n end", "def cpus(arg=nil)\n set_or_return(:cpus, arg, :kind_of => Integer)\n end", "def get_domain_count\n query_push 'Command' => 'GetDomainCount'\n get_response\n end", "def detect_cpu_cores_nix\n # On Unix platforms trying to use etc module to determine accessible\n # number of cores\n require 'etc'\n Etc.nprocessors\n end", "def xen_capacity\n return nil unless self.is_xen_dom0? and !self.model.nil?\n\n cap = {}\n cap[:cpu_cap] = self.model.cpu_cores\n cap[:mem_cap] = self.model.megabytes_memory\n \n cap[:cpu_use] = 0\n cap[:mem_use] = 0\n\n self.xen_guests.each do |n|\n unless n.guest.model.nil?\n cap[:cpu_use] += n.guest.model.cpu_cores\n cap[:mem_use] += n.guest.model.megabytes_memory\n else\n cap[:cpu_use] += 2\n cap[:mem_use] += 4096\n end\n end\n\n cap\n end", "def get_cpu\n return @m_cpu\n end", "def CPU_max_number_of_cpu(value)\n if resource.class.name.ends_with?(\"Container\")\n numcpus = resource.vim_performance_states.last.state_data[:numvcpus]\n else\n numcpus = if resource.methods.include?(:cpu_total_cores) then resource.cpu_total_cores else 0 end\n end\n [value, numcpus].max.to_i\n end", "def calc_total_gpus\n @total_gpus = 0\n #if @cluster_title.eql?(\"Ruby\")\n # # For the Ruby cluster, pbsnodes takes into account two debug nodes with two GPUs along with the other Ruby GPU nodes. The debug nodes will not be considered in the total GPUs and unallocated GPUs calculation, as they cannot be allocated as part of a regular job request with other GPU nodes. Here np = 20 is the number of processors for a GPU node rather than a debug node (np = 16) in a Ruby cluster.\n # @total_gpus = nodes_info.scan(/np = 20/).size\n # else\n # @total_gpus = nodes_info.lines(\"\\n\\n\").size\n #end\n end", "def get_proc_count\n procs = Facter.value('processors')\n if procs.key?('count') then\n procs['count'].to_i\n else\n 1\n end\nend", "def cores\n return @cores if @cores\n count = ssh_cmd %q{cat /proc/cpuinfo|grep 'processor\\s*:' | wc -l}\n @cores = (count ? count.to_i : 1)\n end", "def getCPUUsage\n\tRandom.new.rand(0..100) \nend", "def cpu_percent(attrs)\n hertz = cpu_tck\n sec = uptime - attrs[:proc_uptime] / hertz\n if attrs[:cpu_time] > 0 && sec > 0\n cpu = (attrs[:cpu_time] * 1000 / hertz) / sec\n \"#{cpu / 10}.#{cpu % 10}\".to_f\n else\n return 0.0\n end\n end", "def get_host_utilization(host, model=nil, limit=90)\n if model\n return nil unless host_has_cpu_model? host, model\n end\n return nil if host.runtime.inMaintenanceMode\n return nil unless host.overallStatus == 'green'\n\n cpu_utilization = cpu_utilization_for host\n memory_utilization = memory_utilization_for host\n\n return nil if cpu_utilization > limit\n return nil if memory_utilization > limit\n\n [ cpu_utilization + memory_utilization, host ]\n end", "def total_capacity_job_nodes_cpu\n cpus = job_nodes.map {|n| n['status']['capacity']['cpu'].to_i}\n cpus.inject {|sum,n| sum + n}\n end", "def report_cpu\n self.report('cpu_report')\n end", "def concurrent_count\n debug(\"Getting puppet status\")\n\n running = 0\n\n @puppet.status do |resp|\n begin\n running += resp[:body][:data][:running].to_i\n rescue Exception => e\n debug(\"Failed to get node status: #{e}, continuing\")\n end\n end\n\n running\nend", "def determine_cpu\n cpu = @info[:cpu] = {}\n\n lscpu = @shell.query('LSCPU', 'lscpu')\n if lscpu.empty?\n cpuinfo = @shell.query('cat /proc/cpuinfo')\n count = cpuinfo.lines.select { |l| l =~ /^processor\\s*: [0-9]/}\n speed = cpuinfo.lines.select { |l| l =~ /MHz/ }\n cpu[:count] = count.size\n cpu[:speed] = speed[0].to_s.gsub(/.* /, '')\n else\n cpu[:count] = lscpu.select { |l| l =~ /CPU\\(s\\)/ }.gsub(/.* /, '')\n cpu[:speed] = lscpu.select { |l| l =~ /MHz/ }.gsub(/.* /, '')\n end\n end", "def processors_count\n @processors.length\n end", "def processor_percents\n reply = `sar -P ALL #{@sleep} 1`\n lines = reply.split(\"\\n\")\n header = lines.shift\n num_cpus = header[/\\((\\d+) CPU\\)$/, 1].to_i\n # read a cycle\n 3.times { lines.shift }\n @cpus = num_cpus.times.map do\n line = lines.shift\n line.chomp!\n # the total usage (100% - idle)\n 100.0 - line[/\\s+([\\d\\.]+)$/,1].to_f\n end\n end", "def cpu_options\n data[:cpu_options]\n end", "def total_allocatable_job_nodes_cpu\n cpus = job_nodes.map {|n| n['status']['allocatable']['cpu'].to_i}\n cpus.inject {|sum,n| sum + n}\n end", "def cpu_time\n @cpu_time_finish - @cpu_time_start\n end", "def get_cpu(snmp = nil)\n snmp = snmp_manager unless snmp\n\n get_routing_engines(snmp) if @routing_engines.empty?\n res = gather_snmp_metrics_by_name(\"CPU\", @routing_engines, ROUTING_ENGINE_CPU, snmp)\n NewRelic::PlatformLogger.debug(\"Device: Got #{res.size}/#{@routing_engines.size} CPU metrics\")\n return res\n end", "def cpu_usage\r\n # all usage\r\n total_used_cpu = IO.popen(\"ps aux\").read.split(\"\\n\").inject 0 do |a, i|\r\n a += i.split(' ')[2].to_f\r\n end\r\n # mysqld usage\r\n tread_desc = IO.popen(\"ps aux|grep mysqld\").read.split(\"\\n\")[0].split \" \" #get total info about mysql-process\r\n #form json\r\n return {\r\n rows: [{\r\n type: \"CPU free\",\r\n total: 100\r\n },{\r\n type: \"Mysql used\",\r\n total: tread_desc[2].to_f\r\n },{\r\n type: 'Other',\r\n total: total_used_cpu - tread_desc[2].to_f\r\n }]\r\n }.to_json\r\nend", "def cpus\n flavor[2]\n end", "def cpu_loadavg\n `cat /proc/loadavg`.split\n end", "def get_cpu_info(workstation_id); end", "def cores\n @cores ||= (@node[\"cores\"] || []) + @node.find_all(\"core\")\n end", "def percent_resources_used\n guests = self.xen_guests.map{|xm| m = xm.guest.machine; [m.ram, m.cpu_cores ]}.transpose.map{|c| c.sum }\n return [ 0, 0] if guests.empty?\n my_resources = [machine.ram,machine.cpu ]\n\n if my_resources.first == 0 or my_resources.last == 0 then\n return [ 0, 0] \n end\n \n# => [17152, 23]\n#>> [xen.model.ram, xen.model.cpu ]\n#=> [32768, 8]\n#>> (17152.to_f / 32768.to_f * 100) .to_i\n#=> 52\n#>> [ [17152, 23], [32768, 8]].transpose.map{|r| (r.first.to_f / r.last.to_f * 100).to_i }\n [ guests , my_resources ].transpose.map{|r| (r.first.to_f / r.last.to_f * 100).to_i }\n end", "def number_of_processors\n if RUBY_PLATFORM =~ /linux/\n return `cat /proc/cpuinfo | grep processor | wc -l`.to_i\n elsif RUBY_PLATFORM =~ /darwin/\n return `sysctl -n hw.logicalcpu`.to_i\n elsif RUBY_PLATFORM =~ /win32/\n # this works for windows 2000 or greater\n require 'win32ole'\n wmi = WIN32OLE.connect(\"winmgmts://\")\n wmi.ExecQuery(\"select * from Win32_ComputerSystem\").each do |system| \n begin\n processors = system.NumberOfLogicalProcessors\n rescue\n processors = 0\n end\n return [system.NumberOfProcessors, processors].max\n end\n end\n raise \"can't determine 'number_of_processors' for '#{RUBY_PLATFORM}'\"\nend", "def vcpus\n OpenStruct.new(count: max_vcpus)\n end", "def get_cpu_usage_for_user(username)\n return(@cpuusage[username])\n end", "def get_cpu_limit\n cpus = Parallel.processor_count\n env_cpus = ENV.fetch( 'SIMP_RAKE_LIMIT_CPUS', '-1' ).strip.to_i\n\n env_cpus = 1 if env_cpus == 0\n env_cpus += cpus if env_cpus < 0\n # sanitize huge numbers\n env_cpus = (cpus - 1) if env_cpus >= cpus\n env_cpus = 1 if env_cpus < 0\n\n env_cpus\n end", "def thread_count\n @worker_threads_count.value\n end", "def device_count\n return @device_count\n end", "def device_count\n return @device_count\n end", "def device_count\n return @device_count\n end", "def device_count\n return @device_count\n end", "def detect_cpu_cores(platform, default_cores_count = 4)\n case platform\n when :windows\n detect_cpu_cores_win\n when :linux, :mac\n detect_cpu_cores_nix\n end\n rescue LoadError\n warn \"Can't detect number of CPUs for sure. \" \\\n \"Using default: #{default_cores_count}\"\n default_cores_count\n end", "def co_managed_device_count\n return @co_managed_device_count\n end", "def global_cpu_usage_extended_information\n super\n end", "def fork_count\n ENV['OPAL_PREFORK_THREADS']&.to_i || (Etc.nprocessors * 3 / 4.0).ceil\n end", "def cpu_percent_available?\n true\n end", "def non_compliant_device_count\n return @non_compliant_device_count\n end", "def get_cpu_usage_1m(snmp = nil)\n snmp = @snmp_manager unless snmp\n\n get_names(snmp) if @names.empty?\n res = gather_snmp_metrics_by_name(\"Virtual Servers/CPU Usage/1m\", @names, OID_LTM_VIRTUAL_SERV_STAT_VS_USAGE_RATIO_1M, snmp)\n NewRelic::PlatformLogger.debug(\"Virtual Servers: Got #{res.size}/#{@names.size} CPU metrics\")\n\n unless res.nil?\n sorted_report = res.sort_by { |k,v| v }.reverse\n sorted_report.each_with_index do |row, index|\n @f5_agent.report_metric row[0], \"%\", row[1]\n break if index >= (MAX_RESULTS - 1)\n end\n end\n end", "def procs_percent\n (procs_used.to_f / procs_avail.to_f) * 100\n end", "def total_device_count\n return @total_device_count\n end", "def all_cpu_usage_extended_information\n super\n end", "def using_cpu?\n return @status[ 'CPU' ][ 'Active' ]\n end", "def devices_count\n return @devices_count\n end", "def count_used_instances\n count = 0\n return count\n end", "def get_cpu_price(workstation_id) # ...\n 120\n end", "def num_threads\n logger.debug { \"#{self.class}##{__method__}\" }\n if @num_threads.nil?\n @num_threads = 0\n\n threadgroups_threads_count_properties.each do |property_name|\n value = properties_map[property_name]\n logger.debug(\"#{property_name} -> #{value}\")\n\n if serialize_threadgroups?\n @num_threads = value if value > @num_threads\n else\n @num_threads += value\n end\n end\n end\n @num_threads\n end", "def cpu_temp\n\t\t`vcgencmd measure_temp`.chomp\n\tend", "def get_host_utilization(host, model = nil, limit = 90)\n limit = @config[:config]['utilization_limit'] if @config[:config].key?('utilization_limit')\n return nil if model && !host_has_cpu_model?(host, model)\n return nil if host.runtime.inMaintenanceMode\n return nil unless host.overallStatus == 'green'\n return nil unless host.configIssue.empty?\n\n cpu_utilization = cpu_utilization_for host\n memory_utilization = memory_utilization_for host\n\n return nil if cpu_utilization.nil?\n return nil if cpu_utilization.to_d == 0.0.to_d\n return nil if memory_utilization.nil?\n return nil if memory_utilization.to_d == 0.0.to_d\n\n return nil if cpu_utilization > limit\n return nil if memory_utilization > limit\n\n [cpu_utilization, host]\n end", "def usage_by_ps\n memory = cmd(\"ps -o rsz #{process}\").split(\"\\n\")[1].to_f / 1.kilobyte\n return nil if memory <= 0\n\n memory\n end", "def active_device_count\n return @active_device_count\n end", "def enrolled_device_count\n return @enrolled_device_count\n end", "def compliant_device_count\n return @compliant_device_count\n end", "def default_fault_domain_count\n @azure_properties['environment'] == ENVIRONMENT_AZURESTACK ? 1 : (@use_managed_disks ? 2 : 3)\n end", "def compute_workers(ncpus)\n return 0 unless Process.respond_to?(:fork)\n\n compute_workers_from_env(LISTENER_WORKERS) ||\n compute_workers_from_env(PUMA_WORKERS) ||\n ncpus * PUMA_WORKERS_CPUMULT\n end", "def index\n @users = User.find_owners(params[:page])\n @total_domains = Domain.count\n @system_domains = Domain.count( :conditions => 'user_id IS NULL' )\n end", "def counts\n\t\tputs \"Counting number of IPs within the CIDR store:\" if @verbose\n\t\tcnt=0\n\t\t@known_cidr_blks.keys.map do |key|\n\t\t\tcnt=cnt+size(key)\n\t\tend\n\t\tputs \"Total number of trusted IPs: #{cnt}\" if @verbose\n\t\treturn cnt\n\trescue => ee\n\t\tputs \"Exception on method #{__method__}: #{ee}\" if @verbose\n\tend", "def memory_usage \n\t`ps -o rss= -p #{Process.pid}`.to_i # in kilobytes \nend", "def getconnectioncount\n coind.getconnectioncount\n end", "def oml2_nmetrics_cpu(stream)\n opts = {:name => 'CPU', :schema => [:ts, :server, :user, :sys, :nice, :idle, :wait], :max_size => 200}\n select = [:oml_ts_server, :oml_sender, :user, :sys, :nice, :idle, :wait, :total]\n tss = {}\n t = stream.capture_in_table(select, opts) do |ts, cid, user, sys, nice, idle, wait, total|\n user = user.u64; sys = sys.u64; nice = nice.u64; idle = idle.u64; wait = wait.u64; total = total.u64\n last = tss[cid]\n tss[cid] = [user, sys, nice, idle, wait, total]\n if last\n l_user, l_sys, l_nice, l_idle, l_wait, l_total = last\n f = 1.0 * (total - l_total)\n [ts, cid, (user - l_user) / f, (sys - l_sys) / f, (nice - l_nice) / f, (idle - l_idle) / f, (wait - l_wait) / f] \n else\n nil\n end\n end\n gopts = {\n :schema => t.schema,\n :mapping => {\n :x_axis => {:property => :ts},\n :y_axis => {:property => :user},\n :group_by => {:property => :server},\n :stroke_width => 4 \n },\n :margin => {:left => 80, :bottom => 40},\n :yaxis => {:ticks => 6, :min => 0},\n :ymin => 0\n }\n init_graph(t.name, t, 'line_chart', gopts)\n t\nend", "def loadcpu( num1, num2 )\n res = 0.0\n (1..1000000).each do |i|\n res += num1 / num2 * i\n end\n res\nend", "def worker_count()\n @workers.size\n end", "def worker_count()\n @workers.size\n end", "def index\n @users = User.where(:admin => false).paginate(:page => params[:page])\n @total_domains = Domain.count\n @system_domains = Domain.where('user_id IS NULL').count\n end" ]
[ "0.7719498", "0.7719498", "0.75121003", "0.75121003", "0.71274376", "0.6922787", "0.6892276", "0.6885346", "0.6882397", "0.68118423", "0.6788717", "0.6687688", "0.6669076", "0.66608906", "0.6637468", "0.66232055", "0.65831506", "0.65215737", "0.63739204", "0.63573784", "0.63326126", "0.63326126", "0.6296497", "0.6285067", "0.627526", "0.625195", "0.6209612", "0.61825204", "0.6167705", "0.612359", "0.61233795", "0.60935974", "0.60654753", "0.6051297", "0.6002102", "0.5973693", "0.5973315", "0.59431994", "0.59194636", "0.5917328", "0.59041303", "0.58895737", "0.58667785", "0.5857364", "0.58551055", "0.58288336", "0.58256906", "0.58222085", "0.57907885", "0.5785666", "0.5777863", "0.57449067", "0.57335234", "0.5732331", "0.57238823", "0.57145715", "0.56975573", "0.56951874", "0.5686443", "0.5684719", "0.56390786", "0.56123364", "0.5611213", "0.5560205", "0.5557274", "0.5557274", "0.5557274", "0.5557274", "0.5550927", "0.55457556", "0.55421776", "0.55418265", "0.5534515", "0.55237", "0.5513", "0.55085206", "0.55016315", "0.54861295", "0.5478764", "0.54740626", "0.5472748", "0.545925", "0.54419297", "0.5441673", "0.543791", "0.5423343", "0.5399086", "0.5395034", "0.5392169", "0.5390961", "0.5386702", "0.53819406", "0.5376777", "0.53651637", "0.5359889", "0.5358633", "0.5353904", "0.5349594", "0.5349594", "0.53455746" ]
0.75273395
2
Return the domain type
def domain_type config[:domain_type] end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def valid_type?\n type == \"domain\"\n end", "def valid_type?\n type == \"domain\"\n end", "def create_types\n\t[Domain]\nend", "def type\n fetch('company.type')\n end", "def allowed_types\n [Domain]\nend", "def allowed_types\n [Domain]\nend", "def allowed_types\n [Domain]\nend", "def get_type\n\n end", "def type\n @type.name\n end", "def org_type\n\t\torg_type = organisation.organisation_type.name\n\t\treturn org_type\n\tend", "def type\n entity_type.name\n end", "def type\n @type.to_s\n end", "def type\n @type\n end", "def type\n _type.split(\"::\").last.downcase\n end", "def type\n @type\n end", "def type\n @type\n end", "def type\n @type\n end", "def type\n @type\n end", "def allowed_types\n\t[Organization, Domain]\nend", "def type\n return @type\n end", "def type\n return @type\n end", "def type\n return @type\n end", "def type\n return @type\n end", "def type\n return @type\n end", "def type\n return @type\n end", "def type\n return @type\n end", "def resourceType\n 'DomainResource'\n end", "def entity_type\n return @entity_type\n end", "def domain_class\n self.class.const_get(:DSL)\n end", "def type\n return @type\n end", "def allowed_types\n\t\t[Domain,Device]\n\tend", "def type\n @type\n end", "def type\n self.class.name.split(':').last.downcase\n end", "def type\n self.class.class_name.downcase\n end", "def type\n\t\t@type\n\tend", "def type\n self.class.type\n end", "def type\n self.class.type\n end", "def type\n self.class.type\n end", "def allowed_types\n\t\t[Device, Domain]\n\tend", "def type\n munson.type\n end", "def type\n self.class.type\n end", "def type\n self.class.type\n end", "def type\n self.class.type\n end", "def type\n TYPES[@type_id]\n end", "def _type\n self.class.to_s\n end", "def get_data_type\n get_field_config['type']\n end", "def type_name; end", "def type_name; end", "def type_name; end", "def type_name; end", "def type_name; end", "def type_name; end", "def type_name; end", "def type_name; end", "def type_name; end", "def type_name; end", "def type_name; end", "def type_name; end", "def type_name; end", "def type_name; end", "def type_name; end", "def type\n if validator_hash[:numericality] == true ||\n validator_hash[:numericality] == { allow_nil: true }\n 'Decimal'\n elsif validator_hash.dig(:numericality, :only_integer)\n 'Integer'\n elsif validator_hash[:ingested_date]\n 'Date'\n elsif validator_hash.dig(:case_insensitive_inclusion, :in) == %w[Y N]\n 'YesNo'\n elsif inclusion_list_with_lookup_values?(validator_hash.dig(:case_insensitive_inclusion, :in))\n attr.name.tr(' ', '')\n else\n 'String'\n end\n end", "def type\n\t\tself.class.type\n\tend", "def type\n\t\tself.class.type\n\tend", "def type\n self.class.to_s.split('::').last.downcase.to_sym\n end", "def type\n end", "def type\n field[:type]\n end", "def type\n @@type\n end", "def type\n @@type\n end", "def type\n self.class.to_s.downcase\n end", "def type\n Type.new(type_param).yard_type_string\n end", "def type_name\n @type_name ||= StringHelpers.underscore(StringHelpers.demodulize(@value.class.name)).to_sym\n end", "def type\n return @type if defined? @type\n\n @type = self.to_s.gsub(/.*::/, '')\n end", "def type\n self.class.name.downcase\n end", "def os_type\n FFI::Libvirt.virDomainGetOSType(self)\n end", "def type\n @type ||= self.class.name.split('::').last\n end", "def type_to_s\n self.class.to_s.split(':').last.downcase\n end", "def type\n self.class.type\n end", "def type\n self.class.type\n end", "def type\n self.class.type\n end", "def type\n return @type if defined? @type\n @type = self.to_s.gsub(/.*::/, '')\n end", "def type\n @type ||= @data[:edupersonaffiliation].last\n end", "def type_name\n @type_name ||= self.name.demodulize.underscore\n end", "def type\n types.first\n end", "def get_type_name\n\t\treturn campaign_type.type_name\n\tend", "def type\n self[:type]\n end", "def type\n _type\n end", "def type\n types.first\n end", "def type\n self.class::TYPE\n end", "def type\n @type ||= \"#{as}_type\" if polymorphic?\n end", "def type\n return @type if @type != \"unknown\"\n info\n @type\n end", "def type\n self.class.type(self)\n end", "def type_name\n @type_name ||= determine_type_name(descriptor)\n end", "def type\n read_attr :type, :to_sym\n end", "def get_type(dns_query, parsed_dns)\n RECORD_TYPE[get_rdata_value(dns_query, parsed_dns, SHORT_LENGTH).to_i]\n end", "def get_field_type\n\t\tend", "def type\n self[:type]\n end", "def type_klass; end", "def type\n object.class.name\n end", "def type\n object.class.name\n end" ]
[ "0.70587915", "0.70587915", "0.69794697", "0.6878422", "0.68655974", "0.68655974", "0.68655974", "0.6860498", "0.678803", "0.67498976", "0.67173016", "0.6676866", "0.66450995", "0.66329795", "0.6616654", "0.6616654", "0.6616654", "0.66165864", "0.6593503", "0.6581494", "0.6581494", "0.6581494", "0.6581494", "0.6581494", "0.6581494", "0.6581494", "0.65807104", "0.6580366", "0.6575648", "0.65623367", "0.650116", "0.6495539", "0.64826286", "0.6480332", "0.64673865", "0.6448259", "0.6448259", "0.6448259", "0.6446354", "0.64373845", "0.64291966", "0.64291966", "0.64291966", "0.64121467", "0.64004946", "0.63832206", "0.63713646", "0.63713646", "0.63713646", "0.63713646", "0.63713646", "0.63713646", "0.63713646", "0.63713646", "0.63713646", "0.63713646", "0.63713646", "0.63713646", "0.63713646", "0.63713646", "0.63713646", "0.6368963", "0.63654083", "0.63654083", "0.6362615", "0.6350553", "0.6340328", "0.6333972", "0.6333972", "0.63333017", "0.63321257", "0.6320217", "0.6319761", "0.6314377", "0.630973", "0.6304997", "0.629922", "0.62972564", "0.62972564", "0.62972564", "0.62962395", "0.6285057", "0.6283842", "0.6273481", "0.6268116", "0.6266832", "0.6266365", "0.62587655", "0.62567246", "0.6251637", "0.6248686", "0.6245856", "0.6239964", "0.62339383", "0.6229798", "0.6224049", "0.6215925", "0.6215555", "0.6207418", "0.6206041" ]
0.84000885
0
Return the domain name
def domain_name @domain_name ||= default_name end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def domain_name\n return @domain_name\n end", "def domain_name\n return @domain_name\n end", "def domain\n unless @domain\n if defined? ActiveSupport::CoreExtensions::String::Inflections\n @domain = name.tableize\n else\n @domain = name.downcase\n end\n end\n @domain\n end", "def domain\n @domain ||= PublicSuffix.parse(@fqdn).domain\n end", "def fqdn domain_name\n Service.fqdn domain_name, dns\n end", "def domain_name\n Faker::Internet.domain_name\n end", "def domain_name\n Faker::Internet.domain_name\n end", "def get_domain url\n uri = URI.parse url\n host = uri.host.downcase\n host.start_with?('www.') ? host[4..-1] : host\n end", "def domain\n components = rdns.map {|rdn| rdn[:dc]}.compact\n components.join('.') unless components.empty?\n end", "def get_url_domain\n uri = URI.parse(url)\n host = uri.host.downcase\n host.start_with?('www.') ? host[4..-1] : host\n end", "def get_domain()\n\t\tdomain = \"\"\n\t\tbegin\n\t\t\tsubkey = \"HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\Group Policy\\\\History\"\n\t\t\tv_name = \"DCName\"\n\t\t\tdomain_dc = registry_getvaldata(subkey, v_name)\n\t\t\tdom_info = domain_dc.split('.')\n\t\t\tdomain = dom_info[1].upcase\n\t\trescue\n\t\t\tprint_error(\"This host is not part of a domain.\")\n\t\tend\n\t\treturn domain\n\tend", "def get_domain()\n\t\tdomain = \"\"\n\t\tbegin\n\t\t\tsubkey = \"HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\Group Policy\\\\History\"\n\t\t\tv_name = \"DCName\"\n\t\t\tdomain_dc = registry_getvaldata(subkey, v_name)\n\t\t\tdom_info = domain_dc.split('.')\n\t\t\tdomain = dom_info[1].upcase\n\t\trescue\n\t\t\tprint_error(\"This host is not part of a domain.\")\n\t\tend\n\t\treturn domain\n\tend", "def domain\n server_name || http_host\n end", "def domain\n return nil if @domain.empty?\n @domain\n end", "def domain(tld_length = 1)\n host.split('.').last(1 + tld_length).join('.')\n end", "def domain(tld_length = 1)\n host.split(\":\").first.split(\".\").last(1 + tld_length).join(\".\")\n end", "def subdomain\n self.name.gsub('_', '.')\n end", "def default_domain_name\n return @default_domain_name\n end", "def default_domain_name\n return @default_domain_name\n end", "def domain\n try_opt(:domain)\n end", "def subdomain\n ([self.short_name] + self.class.dalianshops.domain.split('.')[1..-1]).join('.')\n end", "def get_domain\n @domain\n end", "def dns_name\n [\"public\", fqdn].join(\".\")\n end", "def subdomain\n host.split(\".\").first\n end", "def get_domain_name(host)\n domain = nil\n search = nil\n resolv_conf = if host['platform'].include?('windows')\n if host.is_cygwin?\n host.exec(Command.new(\"cat /cygdrive/c/Windows/System32/drivers/etc/hosts\")).stdout\n else\n host.exec(Command.new('type C:\\Windows\\System32\\drivers\\etc\\hosts')).stdout\n end\n else\n host.exec(Command.new(\"cat /etc/resolv.conf\")).stdout\n end\n resolv_conf.each_line do |line|\n if (match = /^\\s*domain\\s+(\\S+)/.match(line))\n domain = match[1]\n elsif (match = /^\\s*search\\s+(\\S+)/.match(line))\n search = match[1]\n end\n end\n return_value ||= domain\n return_value ||= search\n\n return unless return_value\n\n return_value.gsub(/\\.$/, '')\n end", "def domain_name(url)\n url.gsub(\"www.\",\"\").split(\"//\")[1].split(\"/\")[0].split('.')[0]\nend", "def getdomain()\n domain = \"\"\n begin\n subkey = \"HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\Group Policy\\\\History\"\n v_name = \"DCName\"\n domain_dc = registry_getvaldata(subkey, v_name)\n dom_info = domain_dc.split('.')\n domain = dom_info[1].upcase\n rescue\n print_error(\"This host is not part of a domain.\")\n end\n return domain\n end", "def domain_name(subdomain: T.unsafe(nil), domain: T.unsafe(nil)); end", "def domain_name=(value)\n @domain_name = value\n end", "def domain_name=(value)\n @domain_name = value\n end", "def getDomain(payload)\n\tdomainName = \"\"\n\t\n\twhile true\n\n\t\t# Get length of domain name section\n\t\tlength = payload[0].unpack('c*')[0]\n\t\t#length = payload[0].to_i\n\n\t\tif(length != 0)\n\n\t\t\t# Add domain section to overall domain name string\n\t\t\tdomainName += payload[1, length] + \".\"\n\t\t\tpayload = payload[length + 1..-1]\n\t\telse\n\t\t\t# Return overall domain name string\n\t\t\treturn domainName = domainName[0, domainName.length - 1]\n\t\tend\n\tend\n\tputs \"Domain Info: \" + domainName\nend", "def domain_name(url)\n return nil unless url\n if m=url.match(/([^.\\/ ]+)\\.(com|net|info|org|name|biz|gov|\\w\\w)(\\.\\w+)?(\\/.*)*(\\?.*)*$/)\n \"#{m[1]}.#{m[2]}\"\n else\n url\n end\n end", "def domain\n URI(base_url).host.downcase\n end", "def domain\n @domain\n end", "def domain\n @domain\n end", "def domain\n @uri[:domain]\n end", "def get_server_domain\n @hostname ||= Socket.gethostname\n end", "def domain\n return @domain if defined? @domain\n\n @domain = begin\n PublicSuffix.parse(normalized_domain, default_rule: nil)\n rescue PublicSuffix::DomainInvalid, PublicSuffix::DomainNotAllowed\n nil\n end\n end", "def getDomain\n @domain\n end", "def domain_name(url)\n if url.match(/^www/)\n p url.split(\".\")[1]\n elsif url.match(/^http/)\n x = url.split(\"/\")[2]\n if x.match(/^www/)\n p x.split(\".\")[1]\n else\n p x.split(\".\")[0]\n end\n else\n p url.split(\".\")[0]\n end\nend", "def domain_name(url)\n #url.gsub(/http:|https:|www.|\\/\\/|.com.*/,'')\n url.gsub(/http:|https:|www.|\\/\\//,'').split('.').first\nend", "def parse_domain_name\n mdata = /ip domain-name ([\\w.]+)/.match(config)\n { domain_name: mdata.nil? ? '' : mdata[1] }\n end", "def get_domain(payload)\n domain_name = \"\"\n while(true)\n # Get length fields\n len = payload[0].unpack('H*')[0].to_i\n \n if len != 0 then\n domain_name += payload[1, len] + \".\"\n payload = payload[len + 1..-1]\n else\n domain_name = domain_name[0, domain_name.length - 1]\n return domain_name\n end # if len != 0 then\n end # while(true)\n end", "def fqdn\n [ hostname, domain ].join('.') unless hostname.nil? and domain.nil?\n end", "def domain_info\n @domain = normalized_email.split('@').last\n domain\n end", "def extract_domain_name(payload)\n\t\tif(payload) then\n\t\t domain_name = \"\"\n while(true)\n \t\n \t len = payload[0].unpack('H*')[0].to_i\n \t # to understand below you might need to read up on dns packets. they take the form of [length][string][length][string][...]0\n \t if len != 0 then \n \n domain_name += payload[1, len] + \".\" #grab the first chunk from the begining, until the length specified by the packet\n payload = payload[len + 1..-1]\n else\n domain_name = domain_name[0, domain_name.length - 1] # -1 to truncate the 0 at the end of the payload\n \t\n \n return domain_name\n \t\n end # if len != 0 then\n end\n end\n\tend", "def domain_name(str)\n str = str.split('//')\n str = str[str.size - 1].split('.')\n str.delete('www')\n str[0]\nend", "def fqdn\n [name, tag, domain].compact.join('.')\n end", "def build_domain_name(env)\n config = env[:machine].provider_config\n domain_name =\n if config.default_prefix.nil?\n env[:root_path].basename.to_s.dup.concat('_')\n elsif config.default_prefix.empty?\n # don't have any prefix, not even \"_\"\n String.new\n else\n config.default_prefix.to_s.dup\n end\n domain_name << env[:machine].name.to_s\n domain_name.gsub!(/[^-a-z0-9_\\.]/i, '')\n domain_name << \"_#{Time.now.utc.to_i}_#{SecureRandom.hex(10)}\" if config.random_hostname\n domain_name\n end", "def absolute_domain\n domain.end_with?(\".\") ? domain : \"#{domain}.\"\n end", "def domain\n email.split(\"@\")[1]\n end", "def domain\n Domain.new((address.split('@')[1] || '').strip)\n end", "def domain_name=(d)\n @domain_name = d\n end", "def get_domain\n domain = nil\n begin\n subkey = 'HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Group Policy\\History'\n v_name = 'DCName'\n domain_dc = registry_getvaldata(subkey, v_name)\n rescue\n print_error 'Could not determine if the host is part of a domain.'\n return nil\n end\n if !domain_dc.nil?\n # lets parse the information\n dom_info = domain_dc.split('.').drop(1)\n domain = dom_info.join('.')\n else\n print_status 'Host is not part of a domain.'\n end\n domain\n end", "def new_domain\n domain || tag('Domain') || local_domain_name\n end", "def base_distinguished_name\n base_name = \"\"\n AD_DOMAIN.split('.').each do |item|\n base_name+=\"dc=#{item},\"\n end\n base_name.chop\n end", "def return_email_domain \n return \"@\" + self.email.split('@')[1]\n end", "def domain_name(url)\n url.gsub(/http(s)?:\\/\\/(www.)?/, '').match(/[^.]+/)[0]\nend", "def get_domain_name_from_email_address(email)\nend", "def domain\n domain = request.host\n domain << \":#{request.port}\" unless [80, 443].include?(request.port)\n domain\n end", "def domain\n URI.parse(@config.split('<')[0].split('->')[0])\n end", "def domain(domain)\n get(\"/dns/domain/#{domain}\")\n end", "def domain_name(url)\n url.match(/(http[s]?:\\/\\/[\\\\w]{3}?\\.?)(\\w+-?\\w+)/)[-1]\nend", "def username_domain(username = nil)\n username ||= options[:username] if options\n return unless username\n username.to_s.split('@').last\n end", "def base_domain(response)\n if response.respond_to? :request\n host = response.request.host.sub /:\\d+$/, ''\n return if host =~ /^([\\d.]+|localhost)$/\n\n host =~ /([^.]*)\\.([^.]*|..\\...|...\\...|..\\....)$/\n \".#{$1}.#{$2}\"\n end\n end", "def host\n domain\n end", "def domain\n @attributes[:domain]\n end", "def domain\n @attributes[:domain]\n end", "def domain_name(url)\n url.match(%r{(http(s)?://)?(www.)?([a-zA-Z0-9-]*)}).to_a.last\nend", "def www\n \"www.#{domain}\"\n end", "def dns_name instance\n instance.dns_name\n end", "def domain\n settings.service.domain \n end", "def fqdn\n \"#{to_label}.example.com\"\n end", "def email_domain\n if !self[:email].blank?\n split_host = URI.parse(\"#{self[:email]}\").path.split('@')\n \"#{split_host.last}\" if 2 == split_host.size\n else\n self.organization.site_domain if self.organization\n end\n rescue URI::InvalidURIError\n nil\n end", "def short\n return '' if name == domain.name\n return '' if name.blank?\n\n File.basename(name, \".#{domain.name}\")\n end", "def subdomain\n #for debug at localhost\n return self.class.system_top_domain if self.class.system_top_domain == 'localhost'\n short_name + '.' + self.class.system_top_domain\n end", "def subdomain\n ([self.short_name] + self.class.admin_site.domain.split('.')[1..-1]).join('.')\n end", "def website_domain\n 'www.' + base_domain\n end", "def get_domain_name_from_email_address(email)\n index_no = email.index('@') + 1\n email[index_no...email.length - 4]\nend", "def canonical_hostname(domain)\n # Allow hostname overrides\n return $override_dashboard if $override_dashboard && domain == 'studio.code.org'\n return $override_pegasus if $override_pegasus && domain == 'code.org'\n\n return \"#{name}.#{domain}\" if ['console', 'hoc-levels'].include?($node_name)\n return domain if $node_env == 'production'\n\n # our HTTPS wildcard certificate only supports *.code.org\n # 'env', 'studio.code.org' over https must resolve to 'env-studio.code.org' for non-prod environments\n sep = (domain.include?('.code.org')) ? '-' : '.'\n return \"localhost#{sep}#{domain}\" if $node_env == 'development'\n return \"translate#{sep}#{domain}\" if $node_name == 'crowdin'\n \"#{$node_env}#{sep}#{domain}\"\nend", "def domain(dom)\n domain = URI.extract(dom)\n raise ArgumentError, 'The domain must be a URL.' if domain.blank?\n @domain = URI.parse(domain[0]).normalize.to_s\n end", "def canonical\n dns_host_name\n end", "def cname\n self[:cname] || domain_names&.first&.canonicalize_cname\n end", "def cname\n self[:cname] || domain_names&.first&.canonicalize_cname\n end", "def getDomainName(payload)\n domainName = \"\"\n while(true)\n len = payload[0].to_i\n if (len != 0)\n domainName += payload[1,len] + \".\"\n payload = payload[len+1..-1]\n else\n return domainName = domainName[0,domainName.length-1]\n end\n end\nend", "def normalized_domain\n if @text.empty?\n nil\n elsif parsed_domain\n parsed_domain.host\n end\n end", "def domain\n @options['domain']\n end", "def smtp_domain\n @smtp_username.split('@').last\n end", "def get_domain_name_from_email_address(email)\n\temail = '[email protected]'\n\tn = email.gsub(/.+@([^.]+).+/, '\\1')\nend", "def dns_host_name\n @dns_host_name ||= ::SimpleIDN.to_ascii(@host_name)\n end", "def dn_to_domain(dn)\n if dn.include? \"DC=\"\n return dn.gsub(',','').split('DC=')[1..-1].join('.')\n else\n return dn\n end\n end", "def get_domain\n UserNotifierMailer.get_domain(\"http://localhost:3000\")\n end", "def sf_domain(uri)\n uri = uri.to_s.split('/')\n uri.empty? ? '' : uri[2]\n end", "def get_domain_name_from_email_address(email)\n med = email[/[@].*[.]/]\n domain = med[1..-2]\nend", "def domain\n @domain ||= 'https://api.sirportly.com'\n end", "def fqdn(gear_name = nil)\n \"#{gear_name || canonical_name}-#{domain_namespace}.#{Rails.configuration.openshift[:domain_suffix]}\"\n end", "def domain \n @node[\"domain\"]\n end", "def request_subdomain\n request.host.split(/\\./).first\n end", "def full_domain(options= {})\n # assume that if port is used in domain config, it should\n # be added to the end of the full domain for links to work\n # This concerns usually mostly testing and development\n default_host, default_port = APP_CONFIG.domain.split(':')\n port_string = options[:port] || default_port\n\n if domain.present? && use_domain? # custom domain\n dom = domain\n else # just a subdomain specified\n dom = \"#{self.ident}.#{default_host}\"\n dom += \":#{port_string}\" unless port_string.blank?\n end\n\n if options[:with_protocol]\n dom = \"#{(APP_CONFIG.always_use_ssl.to_s == \"true\" ? \"https://\" : \"http://\")}#{dom}\"\n end\n\n return dom\n\n end", "def domain_name(url)\n regex = /(?:(http|https):\\/\\/)?(?:www\\.)?(?<domain_name>.*?)\\./\n return url.match(regex)[:domain_name]\n \n # original solution:\n # regex = /(?:(?:(?:http:\\/\\/)?(?:www\\.)?)|(?:(?:https:\\/\\/)?(?:www\\.)?))([\\w-]+)\\./\n # matches = regex.match(url)\n # return matches.to_a.last\nend" ]
[ "0.8715293", "0.8715293", "0.84037054", "0.8108712", "0.78313756", "0.7807225", "0.7807225", "0.7793504", "0.77840286", "0.7726059", "0.77096456", "0.77096456", "0.7699835", "0.76884186", "0.7661856", "0.7659795", "0.76500666", "0.76437926", "0.76437926", "0.7618178", "0.7588071", "0.75708365", "0.7554034", "0.75350714", "0.7529329", "0.7528144", "0.750936", "0.7485786", "0.748288", "0.748288", "0.7478591", "0.74694055", "0.74636024", "0.7459038", "0.7459038", "0.74563396", "0.74436295", "0.7438957", "0.74365324", "0.7435811", "0.74282944", "0.7419447", "0.7409165", "0.7371563", "0.7356605", "0.73280275", "0.7327529", "0.7308721", "0.72772217", "0.7276836", "0.7273119", "0.72697103", "0.7235321", "0.72278994", "0.7224097", "0.7217384", "0.7208398", "0.72058547", "0.719928", "0.719519", "0.7188281", "0.7182583", "0.71823984", "0.7177194", "0.7172363", "0.71695226", "0.7161523", "0.7161523", "0.7150884", "0.71456206", "0.7137028", "0.71292967", "0.711473", "0.71105886", "0.71042925", "0.7074242", "0.7073529", "0.70654976", "0.7060984", "0.704833", "0.7047972", "0.70393175", "0.7037108", "0.7037108", "0.7009744", "0.7000871", "0.69951695", "0.69541407", "0.69535995", "0.6953154", "0.6952283", "0.69492406", "0.6943922", "0.69312143", "0.6928651", "0.6915267", "0.6906923", "0.6891908", "0.6891652", "0.6882796" ]
0.858935
2
Find and return a domain by it's id
def load_domain(domain_id) client.servers.get(domain_id) rescue ::Libvirt::RetrieveError debug("Domain with id #{domain_id} was not found.") nil end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def find_domain(id)\n logger.debug \"Finding domain #{id}\" if @mydebug\n domains.each { |domain| return domain if domain.id == id }\n\n raise RHC::DomainNotFoundException.new(\"Domain #{id} does not exist\")\n end", "def find_domain(id)\n debug \"Finding domain #{id}\"\n if link = api.link_href(:SHOW_DOMAIN, ':name' => id)\n request(:url => link, :method => \"GET\")\n else\n domains.find{ |d| d.name.downcase == id.downcase }\n end or raise DomainNotFoundException.new(\"Domain #{id} not found\")\n end", "def find_by_id(id)\n nil_or_object(FFI::Libvirt.virDomainLookupByID(interface, id), Domain)\n end", "def find_by_id(id)\n domain_record = finder(\n :select => [:id, :domain, :description],\n :conditions => {:id => id})\n return nil if domain_record.nil?\n\n new(domain_record)\n end", "def find_by_id(id)\n domain_record = finder(:select => [:id, :name], :conditions => {:id => id})\n return nil unless domain_record\n\n new(domain_record)\n end", "def domain\n Domain.find_by_id(domain_id)\n end", "def domain\n @domain = Domain.find( \n params[:id], \n :user => current_user\n )\n end", "def domain\n @domain ||= self.domain_id ? Domain.find_by_id(self.domain_id) : nil\n end", "def domain\n @domain ||= self.domain_id ? Domain.find_by_id(self.domain_id) : nil\n end", "def find(id)\n result = connection.domains.find(id)\n return nil if !result\n VM.new(connection, result)\n end", "def find_by_name(name)\n domain_record = finder(\n :select => [:id, :domain, :description],\n :conditions => {:domain => name})\n return nil if domain_record.nil?\n\n new(domain_record)\n end", "def find(name)\n domains = list(100)\n index = domains.index(name)\n domains[index] unless index.nil?\n end", "def get_domain(session,id)\n result = @connection.rest_request(\n method: :get,\n path: \"/mps_setup/get_domain/#{id}\",\n session: session\n )\n xml = result.body.sub(/(?<=\\<n:get_domainResponse).*?(?=\\>)/,\"\")\n xml = xml.gsub(/(?=n:get_domainResponse).*?(?=\\>)/,\"get_domainResponse\")\n return Hash.from_xml(xml).to_json\n end", "def find_by_name(name)\n nil_or_object(FFI::Libvirt.virDomainLookupByName(interface, name), Domain)\n end", "def show\n @domain = Domain.find(params[:id])\n end", "def set_domain\n @domain = Domain.find(params[:id])\n end", "def set_domain\n @domain = Domain.find(params[:id])\n end", "def by_domain_name(domain_name)\n domain_name = domain_name.parameterize.to_s.gsub(/\\-/, '')\n self::find_by_domain_name(domain_name)\n end", "def getDomainBy(by)\n call(DOMAIN_BY_PATH + by)\n end", "def findRecord(domain)\n return CouchPotato.database.load_document domain\n end", "def find(id)\n response = get(\"/domains/#{@@parent_id}/records/#{id}\")[\"record\"]\n response or return nil\n Record.new response\n end", "def domain_lookup(domain_name)\n d = nil\n @top_level.domains.each do |domain|\n d = domain if domain.name == domain_name\n end\n d\n end", "def find_by_id(id)\n find(id)\n end", "def show\n @domain = DOMAIN.first_or_get!(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @domain }\n end\n end", "def find_by_uuid(uuid)\n nil_or_object(FFI::Libvirt.virDomainLookupByUUIDString(interface, uuid), Domain)\n end", "def find_by_name(name)\n domain_record = finder(:select => [:id, :name], :conditions => {:name => name})\n return nil if domain_record.nil?\n\n new(domain_record)\n end", "def find_by_id(id)\n find_by(:id, id)\n end", "def find_by_id(id)\n self.select { |record| record.id == id.to_s }.first\n end", "def find_by_id(id)\n self.select { |record| record.id == id.to_s }.first\n end", "def get_domain(id, opts = {})\n data, _status_code, _headers = get_domain_with_http_info(id, opts)\n data\n end", "def find(id)\n id = id.to_i\n contacts = Contact.all\n contact = nil\n contact = contacts[id-1] unless contacts[id-1].nil?\n end", "def find(id); end", "def find(id); end", "def find(id)\n where({'id' => \"#{id}\"}).first\n end", "def domain(domain)\n get(\"/dns/domain/#{domain}\")\n end", "def find_by_id(id)\n find_by_attributes(:id => id).first\n end", "def find(id)\n end", "def set_crm_domain\n @crm_domain = Crm::Domain.find(params[:id])\n end", "def find(id)\n first(\"Id = '#{id}'\")\n end", "def find(id)\n klass.find(id)\n end", "def find_by_id(id)\n id = id.to_i\n\n @id_hash[id]\n end", "def get_by_id(domain_id, opts = {})\n data, _status_code, _headers = get_by_id_with_http_info(domain_id, opts)\n data\n end", "def fetch_and_store_domain(user, account_id = 'urn:theplatform:auth:root')\n account_id ||= 'urn:theplatform:auth:root'\n result = fetch_domain user, account_id\n store_domain result, account_id\n domains[account_id]\n end", "def find(id)\n @objects[id]\n end", "def find(id)\n @data[id]\n end", "def set_ad_domain\n @ad_domain = AdDomain.find(params[:id])\n end", "def find_company(company_id)\n company = Company.find_by_id(company_id)\n halt 404 unless company\n\n company\n end", "def find(id)\n self.detect{|x| x.id == id.to_i}\n end", "def find_by_id(id)\n models.each do |model|\n el = model.find_by_id(id)\n return el if el\n end\n nil\n end", "def set_domain\n unless @domain = Domain.friendly.find(params[:id])\n flash[:alert] = 'Domain not found.'\n redirect_to domains_url\n end\n end", "def find_subdomain_resource(subdomain)\n resource = self.basecamper[:subdomain_class].to_s.camelize.constantize\n subdomain_field = self.basecamper[:subdomain_field]\n\n return resource.to_adapter.find_first(subdomain_field => subdomain)\n end", "def find_host_by_id(id)\n begin\n json_response = servers_interface.get(id.to_i)\n return json_response['server']\n rescue RestClient::Exception => e\n if e.response && e.response.code == 404\n print_red_alert \"Host not found by id #{id}\"\n exit 1\n else\n raise e\n end\n end\n end", "def find_by_id(goid)\n self[goid]\n end", "def find_one(id)\n response = request(:get, \"/#{resource_name}/#{id}\")\n #puts response\n construct_record_from_singular(response)\n end", "def get_by_id_with_http_info(domain_id, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: DomainsApi.get_by_id ...'\n end\n # verify the required parameter 'domain_id' is set\n if @api_client.config.client_side_validation && domain_id.nil?\n fail ArgumentError, \"Missing the required parameter 'domain_id' when calling DomainsApi.get_by_id\"\n end\n # resource path\n local_var_path = '/cms/v3/domains/{domainId}'.sub('{' + 'domainId' + '}', CGI.escape(domain_id.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json', '*/*'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'Domain'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['oauth2']\n\n new_options = opts.merge(\n :operation => :\"DomainsApi.get_by_id\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: DomainsApi#get_by_id\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def find(id)\n @entries_by_id[id]\n end", "def id\n FFI::Libvirt.virDomainGetID(self)\n end", "def id\n FFI::Libvirt.virDomainGetID(self)\n end", "def get_by_domain\n render json: Url.where(\"domain LIKE :dom\", {:dom => \"%#{params[:domain]}%\"})\n end", "def find(id)\r\n find_one do |record|\r\n record.id == id\r\n end\r\n end", "def find_by_id(id)\n results = one.find_by_id_ne(id)\n results && !results['id'].blank? && new(results) || nil\n end", "def find(id)\n @collection[id.to_s]\n end", "def find_by_id(id)\n find_by_id!(id)\n rescue TopdeskAPI::Error::RecordNotFound\n nil\n end", "def find(id)\n Contact.all.select do |contact|\n contact.id == id\n end\n end", "def find_by_id(id)\n filtered_set(entities, key: 'id', value: id)\n end", "def get_dominfo_by_domain(domain)\n\t\t\t\tinfo = nil\n\t\t\t\ttraverse_text_file(domain_data_file) do |line|\n\t\t\t\t\tif line.index(domain + ':') == 0\n\t\t\t\t\t\tinfo = parse_domain_data_line(line)\n\t\t\t\t\t\tbreak\n\t\t\t\t\tend\n\t\t\t\tend\n\n\t\t\t\tinfo\n\t\t\tend", "def find(id)\n result = connection.exec_params(\"SELECT * FROM contacts WHERE id=$1::int LIMIT 1;\", [id])\n contact = result[0] # put a guard here to make sure not nil \n self.new(contact['name'], contact['email'], contact['id'])\n end", "def _fedora_object(id)\n id_segments = id.to_s.split('/')\n connection.find(id_segments[1])\n end", "def get_domain\n @domain\n end", "def whoisgem(domain)\n client = Whois::Client.new\n return client.lookup(domain) # returns the Whois::Record relevant to the domain\n end", "def find_by_id(id)\n @to_dos.each { |item| return item if item.id == id}\n end", "def find(id)\n all.find { |contact| contact.id == id }\n end", "def findid(id)\n return nil unless exist?(id)\n\n { id: id, name: @names[id], pw: @passwords[id], email: @emails[id] }\n end", "def find_for_domain(string)\n token = string\n defs = _definitions(TYPE_TLD)\n\n while token != \"\"\n if (found = defs[token])\n return factory(:tld, *found)\n else\n index = token.index(\".\")\n break if index.nil?\n\n token = token[(index + 1)..-1]\n end\n end\n\n nil\n end", "def find_by_id(id)\n configs.each do |config|\n if config.config_name.eql?(id)\n return config.new\n end\n end\n nil\n end", "def find id\n return nil if node.ids.empty?\n node.send(:orm_class).find id\n end", "def find_contact(id)\n \[email protected] {|contact| contact.id == id}\n \tend", "def find(id)\n @@conn.exec_params(\"SELECT * FROM contacts WHERE id=$1;\", [id]) do |results|\n return create_from_row(results[0])\n end \n end", "def find_by_id!(id)\n found = entities.detect { |elm| elm.id == id }\n raise Occi::Core::Errors::CollectionLookupError, \"Entity #{id.inspect} not found in the collection\" unless found\n found\n end", "def getDomain\n @domain\n end", "def find(id)\n find_by_index(id: id)\n end", "def find(id)\n # Used where so no exception will be raised if the instance\n # does not exist.\n @model.unscoped.where(@model_data['mappings']['id'].to_sym => id).first\n end", "def domain\n try_opt(:domain)\n end", "def domain\n children[1]\n end", "def domain\n children[1]\n end", "def get_domain(subnet)\n domains = foreman('GET', \"/api/subnets/#{subnet['id']}/domains\")\n domain = domains && domains['results'] && domains['results'].first\n domain or raise Puppet::Error.new(\"Unable to locate domain for subnet \\\"#{subnet['name']}\\\"\")\n end", "def get_entity id\n @entities.each do |e|\n return e if e.id == id\n end\n end", "def find(id)\n new.from_json(db_root.join(id).read)\n end", "def get(id)\n emsg = \"The %s argument cannot be nil, empty, or a zero length string\"\n raise(ArgumentError, emsg % ['id']) if !id.kind_of?(Integer)\n account = @accounts.select{|a| a.id == id}\n account = account.nil? || account.empty? ? nil : account[0]\n return account\n end", "def domain\n @domain\n end", "def domain\n @domain\n end", "def get_domain\n domain = nil\n begin\n subkey = 'HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Group Policy\\History'\n v_name = 'DCName'\n domain_dc = registry_getvaldata(subkey, v_name)\n rescue\n print_error 'Could not determine if the host is part of a domain.'\n return nil\n end\n if !domain_dc.nil?\n # lets parse the information\n dom_info = domain_dc.split('.').drop(1)\n domain = dom_info.join('.')\n else\n print_status 'Host is not part of a domain.'\n end\n domain\n end", "def find(id, optional = {})\n find_all([id], optional).first\n end", "def find(id)\n all.find { |obj| obj.id == id }\n end", "def find_by_slug_id(id)\n if Moped::BSON::ObjectId.legal?(id)\n Topic.find(id)\n else\n Topic.where(:slug_pretty => id.parameterize).first\n end\n end", "def get_object_by_id(class_name, id)\n obj = nil\n get_objects_of_class(class_name).each do |o|\n if o.id == id\n obj = o\n break\n end\n end\n obj\n end", "def find_by_id(id)\n unless id.class == BSON::ObjectId\n if BSON::ObjectId.legal? id\n id = BSON::ObjectId.from_string(id)\n else\n nil\n end\n end\n\n find('_id' => id).first\n end", "def find(id)\n repository.find(self, id)\n end", "def find_by_id(id)\n resp = get(\"/#{exposed_as}/#{id}\")\n case resp.response.code.to_i\n when 200\n result = MultiJson.load resp.parsed_response\n new(result)\n when 404\n nil\n else\n raise \"#{self.class.name}#try_find with ID #{id.inspect} returned unexpected response: #{resp.inspect}\"\n end\n end", "def get_by_id(class_name, id)\n @data = get_all()\n for item in @data[class_name]\n if item[\"id\"] == id\n return item\n end\n end\n raise \"#{class_name} id #{id} not found.\"\n end" ]
[ "0.85829324", "0.8505713", "0.823245", "0.8121154", "0.80650884", "0.7813009", "0.73053604", "0.7104519", "0.7104519", "0.7049735", "0.69487494", "0.6835949", "0.6674546", "0.6646798", "0.66343254", "0.65661484", "0.65661484", "0.6547493", "0.6513821", "0.64812", "0.6474236", "0.64473134", "0.6435712", "0.6433203", "0.6358658", "0.6356945", "0.63562644", "0.62980914", "0.62980914", "0.62732446", "0.62723976", "0.62508523", "0.62508523", "0.62368935", "0.62220955", "0.61900425", "0.6139459", "0.61382085", "0.61121595", "0.61076665", "0.6095538", "0.6090698", "0.6083256", "0.6065931", "0.60371363", "0.60231465", "0.60189897", "0.60152394", "0.6010795", "0.60062736", "0.6003301", "0.60011417", "0.5996892", "0.59956276", "0.5981172", "0.59587324", "0.5941083", "0.5941083", "0.5940703", "0.5940171", "0.5931877", "0.59243256", "0.59229726", "0.59216654", "0.5920861", "0.59193826", "0.59091204", "0.58899224", "0.587351", "0.5873003", "0.5869744", "0.5857007", "0.58566165", "0.5851014", "0.584053", "0.5825945", "0.58249897", "0.58236086", "0.58051705", "0.5796429", "0.57953405", "0.5793005", "0.579249", "0.579062", "0.579062", "0.57759905", "0.57710093", "0.5767472", "0.57664406", "0.5763515", "0.5763515", "0.5754703", "0.5751538", "0.5749878", "0.5738007", "0.5722589", "0.5722019", "0.5721922", "0.5720399", "0.5719706" ]
0.695631
10
Wait for the domain to receive its IP address
def wait_for_ip_address(domain, timeout = 300) debug('Waiting for domain network to assign an IP address') loop do break if timeout <= 0 if domain.public_ip_address debug("IP address: #{domain.public_ip_address}") break else debug('IP address not found...') timeout -= 5 sleep 5 end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def wait_for_startup\n TCPSocket.wait_for_service_with_timeout(:host => http_ip,\n :port => http_port,\n :timeout => 10)\n end", "def wait_connection; end", "def wait_connection; end", "def prepare_domain(domain)\n domain.start unless domain.active\n wait_for_ip_address(domain)\n end", "def wait_for_ipv4(timeout, vm)\n sleep_time = 5\n print 'Waiting for ipv4 address.'\n tries = 0\n start_search_ip = true\n max_tries = timeout > sleep_time ? timeout / sleep_time : 1\n while start_search_ip && (tries += 1) <= max_tries\n print '.'\n sleep sleep_time\n vm_ip = vm.guest.ipAddress if vm_guest_ip?(vm)\n start_search_ip = false if @vm_helper.open_port?(vm_ip, @vm_helper.port, 1)\n end\n raise 'Timed out waiting for ipv4 address!' if tries > max_tries\n puts 'Found ipv4 address!'\n true\n end", "def wait_for_irc\n while @mockirc.ready?\n sleep 0.05\n end\n\n # For safety, we need to wait yet again to be sure YAIL has processed the data it read.\n # This is hacky, but it decreases random failures quite a bit\n sleep 0.1\n end", "def got_address(_)\n self.getting = false\n connect!\n end", "def wait\n byte = @receiver.read(1)\n raise DeadWakerError, \"can't wait on a dead waker\" unless byte == PAYLOAD\n rescue IOError, RuntimeError\n raise DeadWakerError, \"can't wait on a dead waker\"\n end", "def wait\n config.ui.logger.debug { \"Waiting for socket to become available; timeout after #{config.wait} seconds.\" }\n Timeout.timeout(config.wait) do\n until ready?\n config.ui.logger.debug { \"Sleeping 1 second.\" }\n sleep(1)\n end\n end\n true\n rescue Timeout::Error => e\n config.ui.logger.warn { \"socket(#{config.host}:#{config.port}) timeout!\" }\n false\n end", "def wait_for_guest_ip vm, params={}\n raise ArgumentError.new('vm is not a VirtualMachine') unless vm.is_a? RbVmomi::VIM::VirtualMachine\n raise ArgumentError.new('Argument is not a Hash') unless params.is_a? Hash\n params = {:maxtime => 300, :delay => 10}.merge(params)\n start_time = Time.now.to_f\n ip = nil\n while (Time.now.to_f - start_time) < params[:maxtime]\n if vm.guest.guestState == 'running' && vm.guest.ipAddress != ''\n ip = vm.guest.ipAddress\n break\n end\n sleep params[:delay]\n end\n raise \"Timeout while wait_for_guest_ip\" unless ip\n ip\n end", "def listen()\n\t\t\n\t\tputs \"Listening for dns traffic...\"\n\t\t#setup the filter to only grab dns traffic from the victim\n\t\tfilter = \"udp and port 53 and src \" + @victim_ip\n\n\t\t# Start packet sniffing\n cap = PacketFu::Capture.new(:iface => @ifname, :start => true,\n :promisc => true, :filter => filter, :save => true)\n cap.stream.each do |pkt|\n\n \t if PacketFu::UDPPacket.can_parse?(pkt) then\n packet = PacketFu::Packet.parse(pkt)\n\n dns_type = packet.payload[2].unpack('h*')[0].chr + \\\n packet.payload[3].unpack('h*')[0].chr\n\n\t\t\t\t\tif dns_type == '10' #not really ten, rather 1-0 (binnary) flag\n\n\t\t\t\t\t\tdomain_name = extract_domain_name(packet.payload[12..-1])\t\n\n\t\t\t\t\t # Check if domain name field is empty\n if domain_name.nil? then\n puts \"Empty domain name field\"\n next\n end # domain_name.nil?\n\n send_response(packet, domain_name)\n end\n end # UDPPacket.can_parse?\n end #end packet capturing\n end", "def wait_connection=(_arg0); end", "def wait\n \n # If the command has not been sent, or a response was already received. a\n # call to this method doesn't make any sense.\n raise Exception.new(\"Can't wait for response. The command was not send yet, or a response was already received.\") if @response_received == nil\n \n # Wait until a response was received from the server.\n synchronize do\n @response_received.wait_until { response != nil }\n end\n \n end", "def receive_ping\n done\n end", "def wait_for_domain(bootstrap_options, vm, machine_spec, action_handler)\n return unless bootstrap_options[:customization_spec]\n\n domain = if bootstrap_options[:customization_spec].is_a?(String) && is_windows?(vm)\n spec = vsphere_helper.find_customization_spec(bootstrap_options[:customization_spec])\n spec.identity.identification.joinDomain\n elsif bootstrap_options[:customization_spec].is_a?(String) && !is_windows?(vm)\n spec = vsphere_helper.find_customization_spec(bootstrap_options[:customization_spec])\n spec.identity.domain\n else\n bootstrap_options[:customization_spec][:domain]\n end\n\n return unless domain\n\n if is_windows?(vm) && domain != 'local'\n start = Time.now.utc\n trimmed_name = machine_spec.name.byteslice(0, 15)\n expected_name = \"#{trimmed_name}.#{domain}\"\n action_handler.report_progress(\n \"waiting to domain join and be named #{expected_name}\"\n )\n until (Time.now.utc - start) > 30 ||\n (vm.guest.hostName == expected_name)\n print '.'\n sleep 5\n end\n end\n end", "def ssh_ip\n 5.times do\n ip = read_guest_ip_dhcp\n return ip unless ip.empty?\n\n ip = read_guest_ip_prlctl\n return ip unless ip.empty?\n\n sleep 2\n end\n\n # We didn't manage to determine IP - return nil and\n # expect SSH client to do a retry\n return nil\n end", "def wait\n @lock.synchronize do\n @wait_count += 1\n if @wait_count == hosts.size\n @wait_count = 0\n @cond.broadcast\n else\n @cond.wait(@lock)\n end\n end\n end", "def wait_until_not_full; end", "def listening\n make_promise(@core.web3.JS[:net], 'getListening')\n end", "def dns\n if @dns.nil?\n begin\n @dns = Timeout::timeout(TIMEOUT) do\n without_warnings do\n Net::DNS::Resolver.start(absolute_domain).answer if domain\n end\n end\n @dns ||= false\n rescue Exception\n @dns = false\n end\n end\n @dns || nil\n end", "def getNameserverIPs(domain, addrtype = Resolv::DNS::Resource::IN::A)\n myresolv = Resolv::DNS.new()\n\n nameserver_addresses=Array.new\n myresolv.each_resource(domain, Resolv::DNS::Resource::IN::NS) do |nsrsc|\n nameserver_addresses.push(myresolv.getresource(nsrsc.name, addrtype).address)\n end\n\n myresolv.close()\n\n return nameserver_addresses\nend", "def wait_for_host(seconds=5)\n platform.wait_for_host(seconds)\n end", "def wait_connection_attempt_result(timeout: nil)\n connection_future.wait(timeout)\n connection_future.complete?\n end", "def handle_getaddr\n @node.connections.sample.send_getaddr\n {:state => \"Sending getaddr...\"}\n end", "def test_wait_for_public_ip\n assert inst.wait_for_public_ip\n end", "def wait\n self.listener.wait if self.listener\n end", "def wait_for_up!\n instance.wait_for_up!('eth1')\n end", "def receive_ehlo_domain domain\n true\n end", "def receive_ehlo_domain domain\n true\n end", "def receive_ehlo_domain domain\n true\n end", "def wait_for_network\n\t\t\n\t\t# keep retrying until the\n\t\t# network comes up (if ever)\n\t\tuntil csq = signal_strength\n\t\t\tsleep 1\n\t\tend\n\t\t\n\t\t# return the last\n\t\t# signal strength\n\t\treturn csq\n\tend", "def wait_readable\n if evented?\n Celluloid.current_actor.wait_readable(@socket)\n else\n Kernel.select([@socket])\n end\n end", "def success_confirm_address_hp\n\tsleep 5\n\t page.has_content?('Su dirección ha sido guardada exitosamente')\nend", "def wait_until_ready\n # this method may be left unimplemented if that is applicable\n end", "def whoisQuery(domain)\n\n\t#- Begin routine\n\tbegin\n\t\t#- Attempt \n\t\tr = Whois.whois(domain)\n\t#- Failsafe triggered\n\trescue \n\t\t#- Silence: Nothing to do\n\t#- End routine\n\tend\n\t\n\t#- Connection was successful, port was open\n\tif r\n\t\t#- Let user know port was open\n\t\tputs r\n\t#- End connection state routine\n\tend\n\t\n#- End whoisQuery method\nend", "def ping()\n\n ip = Resolv.getaddress(@host)\n puts ('ip: ' + ip.inspect).debug if @debug\n valid = pingecho(ip)\n puts ('valid: ' + valid.inspect).debug if @debug \n \n @results[:ping] = if valid then\n a = [valid]\n 4.times {sleep 0.01; a << pingecho(ip)}\n (a.min * 1000).round(3)\n else\n nil\n end\n\n end", "def test_IP_IPOK\r\n msg_out = \"IP #{$dominio_registrado}\"\r\n msg_esperada = \"IPOK #{$ip_registrado}\"\r\n sock = UDPSocket.new\r\n sock.connect($central_ip, $central_porta)\r\n sock.print(msg_out)\r\n msg_in = sock.recvfrom(20)[0]\r\n assert_equal(msg_esperada, msg_in)\r\n end", "def wait_link_alive\n wait_link_alive_sync.synchronize do\n while !disconnected? && !link_alive?\n Distributed.warn \"#{self}: link lost, waiting\"\n wait_link_alive_cond.wait(wait_link_alive_sync)\n end\n socket\n end\n end", "def check_ip; end", "def serverup?(ip, port)\n http = Net::HTTP.start(ip, port, {open_timeout:3, read_timeout:3})\n response = http.send_request('GET', '/')\n JSON.parse(response.body)\nrescue Timeout::Error, SocketError, Errno::ECONNREFUSED\n nil\nend", "def on_getaddr\n addrs = @node.config[:announce] ? [@node.addr] : []\n addrs += @node.addrs.select{|a| a.time > Time.now.to_i - 10800 }.shuffle[0..250]\n log.debug { \"<< addr (#{addrs.size})\" }\n send_data P::Addr.pkt(*addrs)\n end", "def wait_for_message\n listen_for_messages\n loop { publish_message(@current_user, gets.strip) }\n end", "def fetch_pg2(cid)\n params2 = {\"c\"=>\"app\",\"m\"=>\"show_legal_person\", \"legal_code_id\"=>cid}\n pg2 = nil\n begin\n Timeout::timeout(5) {\n pg2 = @br.post(BASE_URL + \"/main.php\",params2,HDR)\n }\n rescue Exception => exc\n puts \"ERROR: #{exc.message} in scrape() pg2! \\nTrying again in 5 seconds.\"\n sleep 5\n begin\n #continues if connection is active\n break if Ping.pingecho(\"google.com\",10,80)\n puts \"waiting for ping google.com\"\n sleep 2\n end while(true)\n return fetch_pg2(cid)\n end\n return pg2\nend", "def ipaddr?; end", "def wait_linux_payload\r\n print_status(\"#{rhost}:#{rport} - Waiting for the victim to request the ELF payload...\")\r\n\r\n waited = 0\r\n while (not @elf_sent)\r\n select(nil, nil, nil, 1)\r\n waited += 1\r\n if (waited > datastore['HTTP_DELAY'])\r\n fail_with(Exploit::Failure::Unknown, \"#{rhost}:#{rport} - Target didn't request request the ELF payload -- Maybe it cant connect back to us?\")\r\n end\r\n end\r\n end", "def wait; end", "def wait; end", "def wait; end", "def pinger\n binding.pry\n @livehosts = []\n @ips_to_check.each do |ip|\n if(ip.split(\".\").last != \"0\" || ip.split(\".\").last != \"255\")\n if alive?(ip)\n @livehosts << ip\n end\n end\n end\n puts @livehosts\n end", "def recvfrom( length=65535, timeout=def_read_timeout )\n\t\tresult = nil\n\t\t# force a timeout on the wait for an incoming datagram\n\t\tbegin\n\t\t\tTimeout.timeout( timeout ) {\n\t\t\t\twhile( true )\n\t\t\t\t\t# wait untill we have at least one datagram in the queue\n\t\t\t\t\tif( @datagrams.empty? )\n\t\t\t\t\t\tRex::ThreadSafe.sleep( 0.2 )\n\t\t\t\t\t\tnext\n\t\t\t\t\tend\n\t\t\t\t\t# grab the oldest datagram we have received...\n\t\t\t\t\tresult = @datagrams.shift\n\t\t\t\t\t# break as we have a result...\n\t\t\t\t\tbreak\n\t\t\t\tend\n\t\t\t}\n\t\trescue Timeout::Error\n\t\t\tresult = nil\n\t\tend\n\t\t# if no result return nothing\n\t\tif( result == nil )\n\t\t\treturn [ '', nil, nil ]\n\t\tend\n\t\t# get the data from this datagram\n\t\tdata = result[0]\n\t\t# if its only a partial read of this datagram, slice it, loosing the remainder.\n\t\tresult[0] = data[0,length-1] if data.length > length\n\t\t# return the result in the form [ data, host, port ]\n\t\treturn result\n\tend", "def wait\n\t\t\t\[email protected]\n\t\t\tend", "def wait(timeout = nil)\n\t\t\tbegin\n\t\t\t\[email protected](timeout)\n\t\t\trescue DRb::DRbConnError\n\t\t\tend\n\t\tend", "def domain_check(name)\n options = { \"domains\" => [ {\"dname\" => name} ] }\n request_v2(\"domain\", \"check\", options)\n if is_success?\n record = response[\"answer\"][\"domains\"].first\n record && record[\"error_code\"].nil? && record[\"result\"] == \"Available\"\n end\n end", "def wait_for_data(timeout)\n return if timeout == -1\n\n ready = false\n begin\n ready = IO.select([self], nil, [self], timeout)\n rescue IOError => exception\n logger.warn \"#read Connection failure while waiting for data: #{exception.class}: #{exception.message}\"\n close if close_on_error\n raise Net::TCPClient::ConnectionFailure.new(\"#{exception.class}: #{exception.message}\", address.to_s, exception)\n rescue Exception\n # Close the connection on any other exception since the connection\n # will now be in an inconsistent state\n close if close_on_error\n raise\n end\n\n unless ready\n close if close_on_error\n logger.warn \"#read Timeout after #{timeout} seconds\"\n raise Net::TCPClient::ReadTimeout.new(\"Timedout after #{timeout} seconds trying to read from #{address}\")\n end\n end", "def wait_linux_payload\r\n print_status(\"#{rhost}:#{rport} - Waiting for the target to request the ELF payload...\")\r\n\r\n waited = 0\r\n while (not @elf_sent)\r\n select(nil, nil, nil, 1)\r\n waited += 1\r\n if (waited > datastore['HTTP_DELAY'])\r\n fail_with(Exploit::Failure::Unknown, \"#{rhost}:#{rport} - Target didn't request request the ELF payload -- Maybe it can't connect back to us?\")\r\n end\r\n end\r\n end", "def wait_until_unhealthy!\n agent = consul.get(\"/agent/self\")[\"Member\"][\"Name\"]\n consul.get_while(\"/health/node/#{agent}\") do |data|\n status = data.detect {|x| x[\"CheckID\"] == \"service:#{name}\" }[\"Status\"]\n status == 'passing'\n end\n end", "def listen\n return @listener if @listener and @listener.alive?\n\n @listener = Thread.start do\n loop do\n response, (family, port, hostname, address) = @socket.recvfrom 1024\n\n begin\n adv = parse response\n\n info = case adv\n when Notification then adv.type\n when Response then adv.target\n when Search then adv.target\n else 'unknown'\n end\n\n response =~ /\\A(\\S+)/\n log :debug, \"SSDP recv #{$1} #{hostname}:#{port} #{info}\"\n\n @queue << adv\n rescue\n warn $!.message\n warn $!.backtrace\n end\n end\n end\n end", "def get(timeout=nil)\n data, saddr, sport = recvfrom(65535, timeout)\n return data\n end", "def wait(timeout: nil)\n if connected?\n client.wait(timeout: timeout)\n else\n wait_connection_attempt_result(timeout: timeout)\n end\n end", "def portainer?(ip, segundos=1) \n Timeout::timeout(segundos) do \n begin\n TCPSocket.new(ip, \"#{$port}\").close # Check Portainer port.\n true\n rescue Errno::ECONNREFUSED, Errno::EHOSTUNREACH, SocketError\n false # If Exception, do not count this host.\n end\n end\n rescue Timeout::Error # If Timeout, do not count this host.\n false\nend", "def on_getaddr\n #addrs = @node.addrs.select{|a| a.time > Time.now.to_i - 10800 }.shuffle[0..250]\n #p \"<< addr (#{addrs.size})\"\n #send_data P::Addr.pkt(*addrs)\n end", "def check_connection\n one_wait = 5\n max_wait = 5\n request = Net::HTTP::Get.new('/')\n wait = 0;\n while (wait < max_wait)\n begin\n response = Net::HTTP.start(@url.host, @url.port) {|http|\n http.request(request)\n }\n break if Net::HTTPForbidden === response\n break if Net::HTTPNotFound === response\n break if Net::HTTPSuccess === response\n # When we try to connect to a down server with an Apache proxy, \n # we'll get Net::HTTPBadGateway and get here\n rescue Errno::ECONNREFUSED\n # When we try to connect to a down server without an Apache proxy, \n # such as a dev instance, we'll get here\n end\n sleep one_wait;\n wait += one_wait\n end\n if (wait == max_wait)\n puts(\"-- ERROR: couldn't connect to test host on \" + @url.host.to_s)\n return false\n end\n puts(\"-- SUCCESS: test host is alive !\\n\")\n return true\nend", "def read_wait(timeout: nil)\n !!IO.select([io], [], [], timeout)\n end", "def wait_for_host(entry)\n ssh_options = { auth_methods: ['password'],\n config: false,\n password: cobbler_root_password,\n user_known_hosts_file: '/dev/null' }\n prompts = { number_of_password_prompts: 0 }\n options = {}\n config = { log_level: :warn }\n\n ssh_transport =\n Chef::Provisioning::Transport::SSH.new(entry[:ip_address],\n 'ubuntu',\n ssh_options.merge(prompts),\n options,\n config)\n\n #\n # If it takes more than half an hour for the node to respond,\n # something is really broken.\n #\n # This will make 60 attempts with a 1 minute sleep between attempts,\n # or timeout after 61 minutes.\n #\n Timeout.timeout(3720) do\n max = 60\n 1.upto(max) do |idx|\n break if ssh_transport.available?\n\n puts \"Waiting for #{entry[:hostname]} to respond to SSH \" \\\n \"on #{entry[:ip_address]} (attempt #{idx}/#{max})\"\n sleep 60\n end\n end\n\n if ssh_transport.available?\n puts \"Reached #{entry[:hostname]} via SSH, continuing\"\n true\n else\n raise \"Failed to reach #{entry[:hostname]} via SSH!\"\n end\nend", "def wait\n @notifier.wait if @notifier\n end", "def wait\n @notifier.wait if @notifier\n end", "def wait_end()\n begin\n loop do\n sleep(TICK/1000.0) while (self.connected?() rescue nil)\n break\n end\n rescue Exception => e\n end\n end", "def resolvable?(domain)\n Resolv.getaddress domain\n true\n rescue Resolv::ResolvError => _e\n false\n end", "def resolvable?(domain)\n Resolv.getaddress domain\n true\n rescue Resolv::ResolvError => _e\n false\n end", "def wait_on_access\n if @server.running?\n @running_threads -= 1\n #puts \"waiting\"\n @condvar.wait(@mutex)\n #puts \"woken\"\n @running_threads += 1\n end\n end", "def resolve(node)\n begin\n Timeout::timeout(@timeout) do\n Resolv.each_address(host) do |ip|\n if ip =~ Resolv::IPv4::Regex\n @ip ||= ip\n break\n end\n end\n raise Resolv::ResolvError unless @ip\n end\n @resolved ||= \"#{ip}:#{port}\"\n rescue Timeout::Error, Resolv::ResolvError\n Loggable.warn(\" MOPED:\", \"Could not resolve IP for: #{original}\", \"n/a\")\n node.down! and false\n end\n end", "def checkConnect(dns, sshport)\n connectionTimeout = Time.now \n connectionTimeout = connectionTimeout + (10 * 10)\n connection = false\n\n while ((connection == false) && ( Time.now < connectionTimeout )) do \n if dns.nil? || dns.empty? then\n log(\"ERROR\", \"Invalid dns\" + dns)\n return false\n end\n log(\"INFO\", \"Checking connectivity to: \" + dns)\n\n connection = ssh2_connect(dns, sshport)\n if connection == false then\n sleep(10)\n end\n end\n\n if connection == false then\n log(\"ERROR\", \"Failed to connect to \" + dns )\n return false\n else\n return true\n end\n end", "def startup_wait!\n begin\n Timeout::timeout(startup_wait) do\n sleep 1 until (Felixwrapper.is_port_in_use? self.port and Felixwrapper.is_responding? self.port)\n end \n rescue Timeout::Error\n logger.warn \"Waited #{startup_wait} seconds for felix to start, but it is not yet listening on port #{self.port}. Continuing anyway.\"\n end\n end", "def connection_completed\n puts \"The connection for #{@my_address} has been successfully completed.\"\n end", "def wait_until_ready!\n Timeout.timeout(timeout) do\n begin\n Chef::Log.debug \"trying to open #{endpoint}\"\n open(endpoint)\n rescue SocketError,\n Errno::ECONNREFUSED,\n Errno::ECONNRESET,\n Errno::ENETUNREACH,\n OpenURI::HTTPError => e\n # If authentication has been enabled, the server will return an HTTP\n # 403. This is \"OK\", since it means that the server is actually\n # ready to accept requests.\n return if e.message =~ /^403/\n Chef::Log.debug(\"Redmine is not accepting requests - #{e.message}\")\n sleep(0.5)\n retry\n end\n end\n rescue Timeout::Error\n raise RedmineNotReady.new(endpoint, timeout)\n end", "def wait_for_unrouted_messages_processing(timeout:)\n sleep(0.05) # gives exchange some time to receive retuned message\n\n return unless @unrouted_message_processing\n\n logger.warn { \"Waiting up to #{timeout} seconds for unrouted messages handling\" }\n\n Timeout.timeout(timeout) { sleep 0.01 while @unrouted_message_processing }\n rescue Timeout::Error\n logger.warn { 'Some unrouted messages are lost on process exit!' }\n end", "def listen_wait\n\t\t\tif block_given?\n\t\t\t\[email protected] { yield }\n\t\t\telse\n\t\t\t\[email protected]\n\t\t\tend\n\t\tend", "def wait_for_ip(vm, machine_options, machine_spec, action_handler)\n bootstrap_options = machine_options[:bootstrap_options]\n ip_to_bootstrap(bootstrap_options, vm)\n ready_timeout = machine_options[:ready_timeout] || 300\n msg1 = \"waiting up to #{ready_timeout} seconds for customization\"\n msg2 = \" and find #{machine_spec.location['ipaddress']}\" unless machine_spec.location['ipaddress'].nil? # unless vm_ip == vm.guest.ipAddress # RuntimeError: can't modify frozen String\n msg = [msg1, msg2].join\n action_handler.report_progress msg\n\n vm_ip = ip_to_bootstrap(bootstrap_options, vm) || vm.guest.ipAddress\n machine_spec.location['ipaddress'] = vm_ip\n until transport_for(\n machine_spec,\n machine_options[:bootstrap_options][:ssh],\n vm_ip\n ).available? || remaining_wait_time(machine_spec, machine_options) < 0\n action_handler.report_progress(\n \"IP addresses found: #{all_ips_for(vm)}\"\n )\n vm_ip = ip_to_bootstrap(bootstrap_options, vm) || vm.guest.ipAddress\n machine_spec.location['ipaddress'] = vm_ip\n if has_ip?(vm_ip, vm)\n transport_for(\n machine_spec,\n machine_options[:bootstrap_options][:ssh],\n vm_ip\n ).available?\n end\n sleep 5\n end\n end", "def negotiate!(socket, timeout); end", "def wait\n true\n end", "def wait\n\t\t\t\[email protected]\n\t\t\tend", "def wait_for_TIME_WAIT_to_clear(threshold=20)\n while true\n count = `netstat -a inet -n|grep TIME_WAIT|wc -l`.to_i\n return if count < threshold\n puts \"TIME_WAIT count: #{count}\"\n sleep 1\n end\nend", "def wait_for_url(url)\n browser.wait_until(timeout: 5, message: \"URL did not become `#{url}`\") {\n browser.url.include? url\n }\n end", "def waiting?\n Puppet::SSL::CertificateRequest.search(\"*\").collect { |r| r.name }\n end", "def wait\n\tend", "def remote_host\n # NOTE: Celluloid::IO does not yet support non-blocking reverse DNS\n @socket.peeraddr(true)[2]\n end", "def waiting; end", "def waiting; end", "def ip\n orig, Socket.do_not_reverse_lookup = Socket.do_not_reverse_lookup, true # turn off reverse DNS resolution temporarily\n UDPSocket.open do |s|\n s.connect '64.233.187.99', 1\n s.addr.last\n end\n ensure\n Socket.do_not_reverse_lookup = orig\n end", "def resolve_host(host)\n sleep_time = 5\n timeout_at = Time.now + 60\n msg = \"Waiting to resolve hostname '#{host}'; next attempt in #{sleep_time} seconds until #{timeout_at}\"\n resolved_host = \"\"\n wait_until(msg, timeout_at.to_i, sleep_time, {}) do\n resolved_host = `dig +short #{host} | head -n1`.rstrip\n !resolved_host.empty?\n end\n resolved_host\n end", "def recvfrom_nonblock(maxlen)\n Socketry::UDP::Datagram.new(*@socket.recvfrom_nonblock(maxlen))\n rescue ::IO::WaitReadable\n :wait_readable\n rescue => ex\n # TODO: more specific exceptions\n raise Socketry::Error, ex.message, ex.backtrace\n end", "def post_init\n get_ip_address_and_port_or_close_connection\n end", "def resolve!\n Resolv.each_address(host) do |address|\n return @ip = address if address =~ pattern\n end\n end", "def check_dns_available(vm_name)\n begin\n dns_ip = Resolv.getaddress(vm_name)\n rescue Resolv::ResolvError\n # this is the expected case, swallow the error\n # eg \"no address for blah-daisy.example.com\"\n return ['', true]\n end\n [dns_ip, false]\n end", "def wait_port(port, timeout=5)\n deadline = Time.now + timeout\n begin # Wait for the port to be connectible\n TCPSocket.open(\"\", $port).close\n rescue Errno::ECONNREFUSED\n if Time.now > deadline then\n raise TestError, \"timed out waiting for port #{port}\"\n end\n sleep(0.1)\n retry\n end\nend", "def httpu_get(ip, port, data)\r\n socket = UDPSocket.new\r\n debug(\"Sending httpu data to #{ip} #{port}\")\r\n socket.send(data, 0, ip, port)\r\n sleep(0.25)\r\n max_times = 3\r\n times = 1\r\n begin\r\n debug(\"Attempt to receive data n°#{times}\")\r\n return_data = socket.recvfrom_nonblock(4096)\r\n return return_data\r\n rescue IO::WaitReadable\r\n if times < max_times\r\n times += 1\r\n IO.select([socket], nil, nil, 0.5)\r\n debug(\"Resending data httpu data\")\r\n socket.send(data, 0, ip, port)\r\n retry\r\n end\r\n end\r\n return nil\r\n end", "def recvfrom(maxlen, timeout: @read_timeout)\n set_timeout(timeout)\n\n begin\n while (result = recvfrom_nonblock(maxlen)) == :wait_readable\n next if @socket.wait_readable(time_remaining(timeout))\n\n raise Socketry::TimeoutError, \"recvfrom timed out after #{timeout} seconds\"\n end\n ensure\n clear_timeout(timeout)\n end\n\n result\n end", "def get_ip_address\n items = `ifconfig | grep \"inet addr\"`.split\n addresses = []\n items.each do |item|\n addresses << item if item =~ /addr:/\n end\n ip = \"\"\n addresses.each do |address|\n ip = address.split(':')[1]\n if ip != '127.0.0.1'\n break\n end\n end\n ip\nend", "def readWithTimeout(socket, length, timeout)\n rc = IO.select([socket], nil, nil, timeout)\n if ! rc\n raise \"Waiting for response from UDP tracker #{@host}:#{@trackerPort} timed out after #{@timeout} seconds\"\n elsif rc[0].size > 0\n socket.recvfrom(length)[0]\n else\n raise \"Error receiving response from UDP tracker #{@host}:#{@trackerPort}\"\n end\n end", "def wait_until_running\n until @running\n ControllableThread.sleep POLL_INTERVAL\n check_key\n end\n end" ]
[ "0.5925808", "0.5883973", "0.5883973", "0.58535105", "0.57947624", "0.5675791", "0.5657998", "0.5605223", "0.55885917", "0.5574352", "0.5573449", "0.54210705", "0.53711146", "0.5324268", "0.53106207", "0.5306715", "0.5298207", "0.52940786", "0.52822006", "0.5257564", "0.5251236", "0.5250785", "0.52149665", "0.52136743", "0.5203861", "0.52021635", "0.5168322", "0.51596075", "0.51596075", "0.51596075", "0.5157225", "0.51534003", "0.5147544", "0.51346433", "0.5126954", "0.5126566", "0.51221263", "0.51156354", "0.50876683", "0.5087109", "0.5074371", "0.50687826", "0.5068064", "0.506403", "0.50607866", "0.5035427", "0.5035427", "0.5035427", "0.5031786", "0.5025745", "0.50159454", "0.5009054", "0.5008363", "0.50062835", "0.5004925", "0.5004737", "0.5002835", "0.49932665", "0.49850243", "0.49842262", "0.49833524", "0.49788764", "0.4978412", "0.49693802", "0.49639082", "0.49639082", "0.49599528", "0.4945389", "0.4945389", "0.49443886", "0.49437353", "0.49418056", "0.49395564", "0.49377888", "0.49375182", "0.49355498", "0.49338523", "0.49312982", "0.49296343", "0.492711", "0.4926709", "0.49191", "0.4917127", "0.49156561", "0.4913229", "0.49119622", "0.49107787", "0.49107787", "0.49085915", "0.49063033", "0.49006614", "0.48999068", "0.48980388", "0.48868352", "0.48836735", "0.48823926", "0.48724812", "0.4872442", "0.48714545", "0.48700204" ]
0.7786815
0
PUT /resource We need to use a copy of the resource because we don't want to change the current user in place.
def update @admin = Admin.find(params[:admin][:id]) self.resource = @admin @address = resource.address @address = resource.build_address if !@address prev_unconfirmed_email = resource.unconfirmed_email if resource.respond_to?(:unconfirmed_email) resource_updated = update_resource(resource, account_update_params) yield resource if block_given? if resource_updated AdminRoles.create!(:role_id => params[:role_id], :admin_id => resource.id) if params[:role_id] if params[:role_id] AdminRoles.where(:admin_id => resource.id).destroy_all AdminRoles.create!(:role_id => params[:role_id], :admin_id => resource.id) end if params[:admin][:attachment] resource.attachments.destroy_all if resource.attachments resource.attachments.create(:attachment => params[:admin][:attachment]) end resource.address.update_attributes(:street => params[:admin][:address][:street], :suit => params[:admin][:address][:suit], :city => params[:admin][:address][:city], :state => params[:admin][:address][:state], :country => params[:admin][:address][:country], :zip_code => params[:admin][:address][:zip_code]) if params[:admin][:address] #end if is_flashing_format? flash_key = update_needs_confirmation?(resource, prev_unconfirmed_email) ? :update_needs_confirmation : :updated set_flash_message :notice, flash_key end respond_with resource, location: after_update_path_for(resource) else clean_up_passwords resource respond_with resource end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def put\n if(resource.collection?)\n Forbidden\n elsif(!resource.parent_exists? || !resource.parent_collection?)\n Conflict\n else\n resource.lock_check if resource.supports_locking?\n status = resource.put(request, response)\n response['Location'] = \"#{scheme}://#{host}:#{port}#{url_format(resource)}\" if status == Created\n response.body = response['Location']\n status\n end\n end", "def set_resource\n @user = current_user\n @resource = Resource.find(params[:id])\n end", "def put(resource, body = \"\", headers = {})\n prepare_request(:put, resource, body, headers)\n end", "def update_resource(resource, params)\n resource.role_id = 1\n resource.update_without_password(params)\n end", "def _http_put resource, path\n uri = ::URI.parse(resource.auth_uri)\n path = _path uri, path\n request = Net::HTTP::Put.new(path)\n _build_request resource, request\nend", "def _http_put resource, path\n uri = ::URI.parse(resource.auth_uri)\n path = _path uri, path\n request = Net::HTTP::Put.new(path)\n _build_request resource, request\nend", "def put(resource_path, body:, headers: {}, prefix: API_PREFIX)\n request(method: :put, resource_path: resource_path, headers: headers, body: body, prefix: prefix)\n end", "def update_resource(resource, attributes)\n resource.attributes = attributes\n resource.save\n resource\n end", "def put(resource, **params)\n\n execute(Net::HTTP::Put, 'PUT', resource, **params)\n\n end", "def update_resource(resource, params)\n resource.update_without_password(params)\n end", "def update_resource(resource, params)\n resource.update_without_password(params)\n end", "def update_resource(resource, params)\n resource.update_without_password(params)\n end", "def update_resource(resource, params)\n resource.update_without_password(params)\n end", "def update_resource(resource, params)\n resource.update_without_password(params)\n end", "def update_resource(resource, params)\n resource.update_without_password(params)\n end", "def update_resource(resource, params)\n resource.update_without_password(params)\n end", "def update_resource(resource, params)\n resource.update_without_password(params)\n end", "def update_resource(resource, params)\n resource.update_without_password(params)\n end", "def update_resource(resource, params)\n resource.update_without_password(params)\n end", "def update\n resource.update_attributes params[params_key], as: current_role\n respond_with resource\n end", "def update\n @user_resource = UserResource.find(params[:id])\n\n respond_to do |format|\n if @user_resource.update_attributes(params[:user_resource])\n format.html { redirect_to user_preferences_path(@user), notice: 'User resource was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @user_resource.errors, status: :unprocessable_entity }\n end\n end\n end", "def update_resource(resource, params)\n check_id_uniformity(params)\n resource.assign_attributes(json_api_attributes(params))\n authorize resource, :update?\n resource.save!\n resource\n end", "def update_resource(resource, params)\n resource.update_without_password(params)\n end", "def update\n if @resource.update(resource_params)\n flash[:notice] = notification_message('edit_success')\n render json: { redirect_url: request.referrer }, status: :created\n else\n render json: { message: notification_message('edit_failure') }, status: :unprocessable_entity\n end\n end", "def manage_resource(resource)\n unless resource.is_a?(OMF::SFA::Model::Resource)\n raise \"Resource '#{resource}' needs to be of type 'Resource', but is '#{resource.class}'\"\n end\n\n resource.account_id = _get_nil_account.id\n resource.save\n resource\n end", "def update\n unless User.admin_by_token?(request.cookies[\"token\"])\n render json: { error: \"invalid_token\" }, status: :unauthorized\n return\n end\n\n if @resource.update(resource_params)\n render json: @resource, status: :ok\n else\n render json: @resource.errors, status: :unprocessable_entity\n end\n end", "def put(path, request_options = {}, resource_options = {})\n response(:put, resource(resource_options)[path], request_options)\n end", "def update_resource(resource, params)\n resource.update_with_password(params)\n end", "def update_resource(resource, params)\n resource.update_with_password(params)\n end", "def update_resource(resource, params)\n resource.update_with_password(params)\n end", "def update_resource(resource, params)\n resource.update_with_password(params)\n end", "def update_resource(resource, params)\n resource.update_with_password(params)\n end", "def update_resource(resource, params)\n resource.update_with_password(params)\n end", "def update_resource(resource, params)\n resource.update_with_password(params)\n end", "def update\n respond_to do |format|\n if @resource.update(resource_params)\n @resource.saved_by(current_admin)\n format.html { redirect_to @resource, notice: 'Resource was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @resource.errors, status: :unprocessable_entity }\n end\n end\n end", "def update(resource, attributes = {})\n resource.client = self\n resource.update(attributes)\n end", "def update_resource object, attributes\n object.update attributes\n end", "def update_resource(resource_desc, resource_type, authorizer, new_attributes)\n resource = find_resource(resource_desc, resource_type, authorizer)\n authorizer.can_modify_resource?(resource, resource_type)\n resource.update(new_attributes)\n resource\n end", "def update_resource(object, attrs)\n object.update_with_password(*attrs)\n end", "def update_resource(resource, params)\n # abort params.inspect\n resource.update_without_password(params)\n end", "def set_resource(resource = nil)\n resource ||= resource_class.find(params[:id])\n check_action_whitelisted!(params[:action])\n authorize! params[:action].to_sym, resource\n instance_variable_set(\"@#{resource_name}\", resource)\n end", "def update\n authorize! :update, resource\n current_model_service.update resource, params\n yield if block_given? # after_update\n respond_with resource, location: helpers.show_path(resource)\n end", "def update(resource, id, format=@default_format)\n options = { resource: resource.class, id: id, format: format }\n reply = put resource_url(options), resource, fhir_headers(options)\n reply.resource = parse_reply(resource.class, format, reply)\n reply.resource_class = resource.class\n reply\n end", "def update\n @resource = Resource.find(params[:id])\n\n respond_to do |format|\n if @resource.update_attributes(params[:resource])\n format.html { redirect_to @resource, notice: 'Resource was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @resource.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @resource = Resource.find(params[:id])\n\n respond_to do |format|\n if @resource.update_attributes(params[:resource])\n format.html { redirect_to @resource, notice: 'Resource was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @resource.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @resource = Resource.find(params[:id])\n \n respond_to do |format|\n if @resource.update_attributes(params[:resource])\n flash[:notice] = 'Resource was successfully updated.'\n format.html { redirect_to(@resource) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @resource.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update_resource(resource, params)\n if [\"facebook\", \"github\", \"google_oauth2\"].include? current_user.provider\n params.delete(\"email\")\n params.delete(\"avatar\")\n resource.update_without_password(params)\n else\n resource.update_with_password(params)\n end\n end", "def update!(**args)\n @resource_id = args[:resource_id] if args.key?(:resource_id)\n end", "def update\n @resource = Resource.find(params[:id])\n\n if @resource.update_attributes(params[:resource])\n flash[:notice] = 'Resource was successfully updated.'\n redirect_to @resource\n else\n render :action => \"edit\"\n end\n end", "def new\n super\n @resource.user = current_user\n end", "def send_put(resource, data)\n\n url = URI.parse(primavera_path(resource))\n req = Net::HTTP::Put.new(url.to_s, initheader = {'Content-Type' => 'application/json'})\n req.body = data\n\n puts 'Sending PUT request to ' + url.to_s\n\n send_request(url, req)\n end", "def put\n conn = @client.authorized_connection(url: @client.object_api_url)\n res = conn.put do |req|\n req.headers['Content-Type'] = \"application/json\"\n req.url resource_uri\n req.body = raw.to_json\n end\n if res.success?\n data = JSON.parse(res.body)\n self.class.new(data, @client)\n else\n nil\n end\n end", "def update\n begin\n @resource = Entity.find params[:id]\n @resource.update_attributes! params[:entity]\n render :response => :PUT\n rescue Exception => e\n @error = process_exception(e)\n render :response => :error\n end\n end", "def update_profile_resource(resource, params)\n resource.update_without_password(params)\n end", "def update \n begin\n @resource = Account.find(params[:id])\n @resource.update_attributes!(params[:account])\n render :response => :PUT\n rescue Exception => e\n @error = process_exception(e)\n render :response => :error\n end\n end", "def resource=(new_resource)\n @resource = @resource.merge(new_resource)\n end", "def update\n @resource = Resource.find(params[:id])\n\n respond_to do |format|\n if @resource.update_attributes(params[:resource])\n flash[:notice] = 'Resource was successfully updated.'\n format.html { redirect_to(@resource) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @resource.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @resource = Resource.find(params[:id])\n\n respond_to do |format|\n if @resource.update_attributes(params[:resource])\n flash[:notice] = 'Resource was successfully updated.'\n format.html { redirect_to(@resource) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @resource.errors, :status => :unprocessable_entity }\n end\n end\n end", "def put(url, resource_name, options = {})\n build_response(resource_name) do\n connection.put do |req|\n req.url url\n req.body = options.to_json\n end\n end\n end", "def update_resource(resource, params)\n # if params['email'] != current_user.email || params['password'].present?\n # resource.update_with_password(params)\n # else\n resource.update_without_password(params.except('password', 'password_confirmation', 'current_password'))\n # end\n end", "def update\n user = self.resource = User.to_adapter.get!(send(:\"current_user\").to_key)\n\n if user.update_with_password(resource_params)\n set_flash_message :notice, :updated\n sign_in resource_name, user, :bypass => true\n respond_with user, :location => after_update_path_for(user)\n else\n clean_up_passwords user\n respond_with user\n end\n end", "def update\n @resource = Resource.find(params[:id])\n\n respond_to do |format|\n if @resource.update_attributes(params[:resource])\n flash[:success] = 'Resource was successfully updated.'\n format.html { redirect_to admin_resource_path(@resource.id) }\n format.json { head :ok }\n else\n flash[:error] = @resource.errors.full_messages.join('')\n format.html { render action: \"edit\" }\n format.json { render json: @resource.errors.full_messages.join(''), status: :unprocessable_entity }\n end\n end\n end", "def put(request, response)\n @resource.put(request, response)\n end", "def update_current_logged_in_user(args = {}) \n put(\"/users.json/current\", args)\nend", "def put(header = {})\n url = \"#{ApiClient.config.path}#{self.class.resource_path}\"\n response = ApiClient::Dispatcher.put(url, self.to_hash, header)\n attributes = ApiClient::Parser.response(response, url)\n update_attributes(attributes)\n end", "def update\n @resource = Resource.find(params[:id])\n respond_to do |format|\n if @resource.update_attributes(params[:resource])\n format.html { redirect_to(edit_admin_resource_path(@resource), :notice => 'Resource was successfully updated.') }\n format.xml { head :ok }\n else\n get_resource_info\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @resource.errors, :status => :unprocessable_entity }\n end\n end\n end", "def base_update(resource, id, options, format = nil, headers = nil)\n headers ||= {}\n headers[:accept] = \"#{format}\" if format\n format ||= @default_format\n headers[:content_type] = \"#{format}\"\n headers[:prefer] = @return_preference if @use_return_preference\n options = {} if options.nil?\n options[:resource] = resource.class\n options[:format] = format\n options[:id] = id\n reply = put resource_url(options), resource, fhir_headers(headers)\n reply.resource = parse_reply(resource.class, format, reply) if reply.body.present?\n reply.resource_class = resource.class\n reply\n end", "def find_and_update_resource\n model = class_name.find(params[:id])\n model.tap do |m|\n m.update get_secure_params\n set_resource_ivar m\n end\n end", "def put!\n request! :put\n end", "def update_resource(object, attributes)\n object.update(*attributes)\n end", "def modify_user(user)\n query_api_object Model::User, '/rest/user', user.to_hash, 'PUT'\n end", "def update_card_resource(card_id, resource, *paths)\n paths, options = extract_options(camp(resource), *paths)\n put card_path(card_id, *paths), options\n end", "def updateUser\n options = {\n :body => params.to_json,\n :headers => {\n 'Content-Type' => 'application/json',\n 'Authorization' => request.headers['Authorization']\n }\n }\n results = HTTParty.put(\"http://192.168.99.101:4051/users/\"+@current_user[\"id\"].to_s, options)\n render json: results.parsed_response, status: results.code\n end", "def put(name,&block)\n build_resource(name, :put, &block)\n end", "def update_resource(resource, params)\n if !current_user.provider.nil?\n params.delete(\"current_password\")\n resource.update_without_password(params)\n else\n resource.update_with_password(params)\n end\n end", "def update!(**args)\n @full_resource_name = args[:full_resource_name] if args.key?(:full_resource_name)\n @permission = args[:permission] if args.key?(:permission)\n @principal = args[:principal] if args.key?(:principal)\n end", "def update!(**args)\n @full_resource_name = args[:full_resource_name] if args.key?(:full_resource_name)\n @permission = args[:permission] if args.key?(:permission)\n @principal = args[:principal] if args.key?(:principal)\n end", "def set_resource\n @resource = Resource.find(params[:id])\n end", "def set_resource\n @resource = Resource.find(params[:id])\n end", "def set_resource\n @resource = Resource.find(params[:id])\n end", "def set_resource\n @resource = Resource.find(params[:id])\n end", "def set_resource\n @resource = Resource.find(params[:id])\n end", "def set_resource\n @resource = Resource.find(params[:id])\n end", "def set_resource\n @resource = Resource.find(params[:id])\n end", "def set_resource\n @resource = Resource.find(params[:id])\n end", "def set_resource\n @resource = Resource.find(params[:id])\n end", "def set_resource\n @resource = Resource.find(params[:id])\n end", "def set_resource\n @resource = Resource.find(params[:id])\n end", "def set_resource\n @resource = Resource.find(params[:id])\n end", "def update(resource,identifier,json)\n raise 'Not Yet Implemented'\n end", "def update\n # make sure user is authorized\n unless @resource\n return render json: {\n success: false,\n errors: ['Unauthorized']\n }, status: 401\n end\n\n # ensure that password params were sent\n unless password_resource_params[:password] and password_resource_params[:password_confirmation]\n return render json: {\n success: false,\n errors: ['You must fill out the fields labeled \"password\" and \"password confirmation\".']\n }, status: 422\n end\n\n if @resource.update_attributes(password_resource_params)\n return render json: {\n success: true,\n data: {\n user: @resource,\n message: \"Your password has been successfully updated.\"\n }\n }\n else\n return render json: {\n success: false,\n errors: @resource.errors\n }, status: 422\n end\n end", "def update!(**args)\n @granted = args[:granted] if args.key?(:granted)\n @permission = args[:permission] if args.key?(:permission)\n @resource = args[:resource] if args.key?(:resource)\n @resource_attributes = args[:resource_attributes] if args.key?(:resource_attributes)\n end", "def save_resource\n resource.save\n end", "def accept_resource\n resource = resource_class.accept_invitation!(update_resource_params)\n @user = User.find(resource.invited_by_id)\n resource.company_id = @user.company.id\n resource.save\n resource\n end", "def update!(**args)\n @resource_key = args[:resource_key] if args.key?(:resource_key)\n end", "def update_resource(resource, params)\n # Require current password if user is trying to change password.\n return super if params['password']&.present?\n\n # Allows user to update registration information without password.\n resource.update_without_password(params.except('current_password'))\n end", "def update\n respond_to do |format|\n if @resource.update(resource_params)\n format.html { redirect_to @resource, notice: 'Resource was successfully updated.' }\n format.json { render :show, status: :ok, location: @resource }\n else\n format.html { render :edit }\n format.json { render json: @resource.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @resource.update(resource_params)\n format.html { redirect_to @resource, notice: 'Resource was successfully updated.' }\n format.json { render :show, status: :ok, location: @resource }\n else\n format.html { render :edit }\n format.json { render json: @resource.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @resource.update(resource_params)\n format.html { redirect_to @resource, notice: 'Resource was successfully updated.' }\n format.json { render :show, status: :ok, location: @resource }\n else\n format.html { render :edit }\n format.json { render json: @resource.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n self.resource = resource_class.to_adapter.get!(send(:\"current_#{resource_name}\").to_key)\n\n if resource.update_attributes(params[resource_name])\n if is_navigational_format?\n if resource.respond_to?(:pending_reconfirmation?) && resource.pending_reconfirmation?\n flash_key = :update_needs_confirmation\n end\n set_flash_message :notice, flash_key || :updated\n end\n sign_in resource_name, resource, :bypass => true\n respond_with resource, :location => after_update_path_for(resource)\n else\n clean_up_passwords resource\n respond_with resource\n end\n end", "def update_resource(resource, params)\n # Require current password if user is trying to change password.\n return super if params[\"password\"]&.present?\n # Allows user to update registration information without password.\n resource.update_without_password(params.except(\"current_password\"))\n end" ]
[ "0.7224355", "0.7005823", "0.69810694", "0.6971466", "0.69555235", "0.6954134", "0.6753178", "0.6746057", "0.6743163", "0.67177105", "0.67177105", "0.6707934", "0.6707934", "0.6707934", "0.6707934", "0.6707934", "0.6707934", "0.6707934", "0.6707934", "0.6702649", "0.6697928", "0.6695045", "0.6686968", "0.668169", "0.66671896", "0.6659751", "0.664227", "0.6627111", "0.6627111", "0.6627111", "0.6627111", "0.6627111", "0.6627111", "0.6627111", "0.65771246", "0.65642464", "0.6540155", "0.6511574", "0.6510272", "0.65095145", "0.6435897", "0.6427307", "0.63887805", "0.6381756", "0.6381756", "0.63437545", "0.63271075", "0.63200504", "0.6312859", "0.6305082", "0.6287076", "0.62765217", "0.62747985", "0.6259813", "0.6257977", "0.6253746", "0.62329435", "0.62329435", "0.6231466", "0.6222577", "0.62140536", "0.6204791", "0.61853844", "0.61746037", "0.6167932", "0.6164669", "0.6158078", "0.6149717", "0.61487484", "0.61481327", "0.6142436", "0.61329854", "0.61319613", "0.6128234", "0.6127637", "0.6125567", "0.6125567", "0.61198395", "0.61198395", "0.61198395", "0.61198395", "0.61198395", "0.61198395", "0.61198395", "0.61198395", "0.61198395", "0.61198395", "0.61198395", "0.61198395", "0.6117758", "0.6112036", "0.6108507", "0.6107615", "0.6103826", "0.61006033", "0.60934764", "0.6087762", "0.6087762", "0.60876745", "0.60842854", "0.60839206" ]
0.0
-1
GET /resource/cancel Forces the session data which is usually expired after sign in to be expired now. This is useful if the user wants to cancel oauth signing in/up in the middle of the process, removing all OAuth session data.
def cancel expire_data_after_sign_in! redirect_to new_registration_path(resource_name) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cancel\n expire_session_data_after_sign_in!\n redirect_to new_registration_path(resource_name)\n end", "def cancel\n expire_session_data_after_sign_in!\n redirect_to new_registration_path(resource_name)\n end", "def cancel\n expire_session_data_after_sign_in!\n redirect_to new_registration_path(resource_name)\n end", "def cancel\n expire_session_data_after_sign_in!\n redirect_to new_registration_path(resource_name)\n end", "def cancel\n expire_session_data_after_sign_in!\n redirect_to new_registration_path(resource_name)\n end", "def cancel\n expire_session_data_after_sign_in!\n redirect_to new_registration_path(resource_name)\n end", "def cancel(oid, request, session)\n session.clear\n access_denied\n end", "def cancel\r\n expire_data_after_sign_in!\r\n redirect_to new_registration_path(resource_name)\r\n end", "def cancel\n set_session\n\n if @session.cancel\n respond_to do |format|\n format.html { redirect_back allow_other_host: false, fallback_location: batch_connect_sessions_url, notice: t(\"dashboard.batch_connect_sessions_status_blurb_cancel_success\") }\n format.json { head :no_content }\n end\n else\n respond_to do |format|\n format.html { redirect_back allow_other_host: false, fallback_location: batch_connect_sessions_url, alert: t(\"dashboard.batch_connect_sessions_status_blurb_cancel_failure\") }\n format.json { render json: @session.errors, status: :unprocessable_entity }\n end\n end\n end", "def cancel\n id = params[ :id ]\n @user = User.find( id )\n\n # We must have found a user in the database matching the ID.\n # The ID must be provided. There must be a currently logged in\n # user and their ID must match that of the cancellation request.\n # The user must not have a name yet - if they do, it implies a\n # created, active account.\n\n if ( @user.nil? or id.nil? or @current_user.nil? or ( id.to_i() != @current_user.id ) or @user.name )\n flash[ :error ] = \"Cancellation request not understood.\"\n else\n @user.destroy()\n flash[ :error ] = 'Sign in cancelled.'\n end\n\n redirect_to( signout_path() )\n end", "def cancel(params)\n request(Resources::RESOURCE_CANCEL, HTTP_METHOD_POST, params)\n end", "def cancel\n @service.context.post(@control_path, :action => 'cancel')\n self\n end", "def cancel\n appointment_service.put_cancel_appointment(cancel_params)\n head :no_content\n end", "def cancel(params={})\n self.request(__method__, params)\n end", "def cancel\n if @event.cancel\n render :json => @event.to_json, :status => :ok\n else\n render :nothing => true, :status => :unprocessable_entity\n end\n end", "def cancel\n __log_activity\n __debug_route\n __debug_request\n super\n rescue => error\n auth_failure_redirect(message: error)\n end", "def cancel\n flash[:notice] = \"Canceling accounts is not enabled.\"\n redirect_to root_path\n end", "def cancel!\n update(request_cancelled: true)\n end", "def cancel\n session_id = params[:id]\n @package = PackageSession.find(session_id)\n session = @package.session.all\n unless session.empty?\n session['status'] = \"cancel\"\n @package.session.update(session)\n AwsService.push_to_queue_cancel(\"get_package_service\", @package.id)\n end\n\n service_logger.note({cancel_package: session})\n redirect_to :authenticated_root\n end", "def cancel\n\t\t@notification = Notification::Cancel.new\n\t\[email protected] = @class_session\n\n\t\tunless params[:notification_cancel].blank?\n\t\t\trespond_to do |format|\n\t\t\t\[email protected]_attributes class_session_notification_params\n\t\t\t\tif @notification.save\n\t\t\t\t\[email protected]_to @class_session.subscribers\n\n\t\t\t\t\tformat.html { redirect_to @class_session, notice: 'Class session cancellation was successfully posted.' }\n\t\t\t\t\tformat.json { render action: 'show', status: :created, location: @class_session }\n\t\t\t\telse\n\t\t\t\t\tthrow\n\t\t\t\t\tformat.html { render :cancel, error: 'Class session cancellation failed.' }\n\t\t\t\t\tformat.json { render json: notification.errors, status: :unprocessable_entity }\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\tend", "def cancel\n @ride = Ride.find_by_id(params[:ride])\n current_user.cancel!(@ride)\n redirect_to root_path\n end", "def cancel_event\r\n event = Event.find_by(id: params[:eventid].to_i)\r\n if event.present? && event.user_id == current_user.id\r\n event.update(status: 2)\r\n lt_update_event_status event, 'Canceled'\r\n render json: SuccessResponse.new(\r\n code: 200,\r\n message: 'Event cancelled.'\r\n ), adapter: :json, status: :ok\r\n else\r\n render json: ErrorResponse.new(\r\n code: 404,\r\n message: 'Event not found!'\r\n ), adapter: :json, status: :not_found\r\n end\r\n end", "def cancel()\n if current_user.state == 'requesting'\n current_user.change_state('online')\n else\n # Notify student that he is not in requesting state\n msg = I18n.t('students.errors.appointment.cancel')\n MessageBroadcastJob.perform_later(msg, 'error',\n student_id: current_user.id)\n end\n end", "def cancel_and_redirect\r\n redirect_back\r\n end", "def cancel()\n require_relative 'message'\n Message.new(@api, @api.do_request(\"POST\", get_base_api_path() + \"/cancel\"))\n end", "def cancel()\n require_relative 'message'\n Message.new(@api, @api.do_request(\"POST\", get_base_api_path() + \"/cancel\"))\n end", "def cancel\n if !current_user.worksessions.include?(@worksession)\n respond_to do |format|\n format.html {\n redirect_to user_worksessions_path(params[:user_id]), notice: 'You cannot cancel a worksession you are not signed up for.'\n }\n format.json { render :show, status: :created, location: @worksession }\n end\n else\n @user.worksessions.delete(@worksession)\n @worksession.users.delete(@user)\n @user.save\n if (@worksession.date.wday.between?(0, 1) and @worksession.users.size < 8) or (@worksession.date.wday.between?(5, 6) and @worksession.users.size < 4)\n @worksession.free = true\n @worksession.save\n end\n redirect_to available_path(current_user)\n @worksession.save\n end\n end", "def cancel\n @error = :cancelled\n end", "def cancel\n success = current_subscriber.cancel_subscription\n render json: { success: success }\n end", "def cancel\n response = CoachClient::Request.delete(url, username: @user1.username,\n password: @user1.password)\n set_user_confirmed(response.to_h)\n self\n end", "def cancel\n # Context is already cleared in before_action\n end", "def destroy\n set_cancel_status\n @ticket.save\n Account::Tickets::TicketLogs.cancel(@ticket)\n respond_to do |format|\n format.html { redirect_to @last_page, notice: 'Ticket was successfully canceled.' }\n format.json { head :no_content }\n end\n end", "def cancel\n redirect_to checkid_request.cancel_url\n end", "def sign_out\n @logout = true\n authenticate_api_user\n @logout = false\n revoke_access if @current_user\n head :no_content\n end", "def cancel(id)\n http.post(\"/nfse/#{id}/cancel\") do |response|\n respond_with_entity(response, Entities::NfseStatus)\n end\n end", "def cancel\n @result = :cancel\n $game_system.se_play($data_system.cancel_se)\n end", "def revoke\n @session.do_rpc_endpoint('/revoke')\n nil\n end", "def revoke_token\n request @google + '/accounts/AuthSubRevokeToken'\n\n @session_token = false\n end", "def sso_session_revoke_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: SsoApi.sso_session_revoke ...'\n end\n # resource path\n local_var_path = '/sso/session/revoke'\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n header_params['X-UltraCart-Api-Version'] = @api_client.select_header_api_version()\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['ultraCartOauth', 'ultraCartSimpleApiKey']\n data, status_code, headers = @api_client.call_api(:DELETE, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: SsoApi#sso_session_revoke\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def cancel_request\n\t\tuser = current_user\n\n\t\tassignment = Assignment.find_by(request_id: params[:request_id])\n\t\tuser.accepteds.delete(assignment)\n\t\tpending_status = RequestStatus.find_by(description: \"pending accept\")\n\t\tpending_status.assignments << assignment\n\n\t\tredirect_back fallback_location: '/home'\n\tend", "def cancel\n end", "def cancel\n end", "def destroy\n @session_resource.destroy\n\n head :no_content\n end", "def cancel\n self.class.cancel(self)\n end", "def cancel()\n\t\tagent = spike_login()['agent'] # Mechanize agent at successful login page\n\t\tgsr = spike_login()['gsr'] # Mechanize page = successful login page\n\t\t\n\t\tcancel = gsr.link_with(:text => 'Cancel')\n\t\tif (cancel.nil?)\n\t\t\traise \"Error: You have no GSR reservation to cancel.\"\n\t\telse\n\t\t\tgsr = cancel.click\n\t\tend\n\tend", "def cancel_request\n if params.key?(:tutor_id) && params.key?(:student_id) && params.key?(:tutor_subject_id)\n pending_request = PendingTutorRequest.where('tutor_id = ? AND student_id = ? AND tutor_subject_id = ?',\n params[:tutor_id],\n params[:student_id],\n params[:tutor_subject_id]).first\n course = Course.find(TutorSubject.find(params[:tutor_subject_id]).course_id)\n else\n pending_request = PendingTutorRequest.find(params[:request_id])\n # Look into see if there is another way to do this.\n course = Course.find(TutorSubject.find(pending_request.tutor_subject_id).course_id)\n end\n\n pending_request.destroy\n course_code = course.course_prefix + course.course_code\n notifcation_params = { 'user_id' => params[:tutor_id],\n 'title' => 'Request Cencelled',\n 'body' => 'A request for ' + course_code + ' has been cancelled.',\n 'icon' => 'request_cancelled',\n 'color' => 'lightgrey',\n 'type' => 'cancel' }\n Notifications.send_notification(notifcation_params)\n\n head :ok\n end", "def disconnect(token)\n # You could reset the state at this point, but as-is it will still stay unique\n # to this user and we're avoiding resetting the client state.\n # session.delete(:state)\n session.delete(:token)\n\n # Send the revocation request and return the result.\n revokePath = 'https://accounts.google.com/o/oauth2/revoke?token=' + token\n uri = URI.parse(revokePath)\n request = Net::HTTP.new(uri.host, uri.port)\n request.use_ssl = true\n status request.get(uri.request_uri).code\nend", "def session_destroy(input={}, raw=false)\n response = get('mw/Session.Destroy', input, raw)\n end", "def revoke\n oauth_access_token.revoke\n head :ok\n end", "def cancel\n redirect_to root_url, flash[:alert] = \"Something went wrong.\"\n end", "def cancel!\n state_guard { modify_call 'Status' => 'cancelled' }\n end", "def sign_out\n request.session.delete(:authorized)\n end", "def cancel\n self.update_status :cancelled\n end", "def destroy\n appointment_request = current_user.pending_requests\n .find(params[:request_id])\n if appointment_request.cancel!\n redirect_to root_path\n else\n render status: 500\n end\n end", "def signed_out_other_scope(resource)\n ActiveRecord::SessionStore::Session.all(:conditions => ['user_id = ?', resource.id]).compact.each do |s|\n begin\n s.destroy\n rescue\n next\n end\n end\n end", "def sub_cancel\n (@shift = find_params_id(Shift)) || return\n #only user can cancel his own sub request or admin can cancel anybody's sub request\n if request.delete? and (from_admin? or (@shift.user == get_user))\n #somehow @shift.sub.destroy shortcut does not work properly\n s = @shift.sub\n s.destroy\n @shift.save\n redirect_with_flash \"Sub request cancelled.\", :action => :index, :date => @shift.shift_date, :anchor => @shift.shift_date\n else\n redirect_with_flash 'Illegal URL call'\n end\n end", "def cancel\n @delegation = current_user.managed_delegations.find_by(token: params[:delegation_id])\n if @delegation.nil?\n flash[:alert] = 'You are not authorised to access this page'\n redirect_to delegations_path and return\n else\n @delegation.deactivate('manager')\n flash[:notice] = 'Confirmed! You no longer control the account of ' + @delegation.employee.full_identity\n redirect_to delegations_path and return\n end\n end", "def cancel\n @confirmation_header = \"confirm cancellation\"\n @confirmation_body = \"Are you Sure to cancel this subscription?\"\n @cancel = \"No, Thank you\"\n @submit = \"Confirm cancellation\"\n end", "def cancel\r\n # @todo Emit a warning for attempts to cancel an action after it's been\r\n # executed\r\n @cancelled = true\r\n end", "def uncancel \n @project.update_attribute(:canceled, false)\n redirect_to project_path(@project)\n end", "def destroy\n if params[:cancel]\n flash[:notice] = t('account.cuenta_no_eliminada', :email => Settings.email_addresses[:contact])\n redirect_to account_path\n else\n @user = User.find(current_user.id)\n if @user.deactivate_account\n self.current_user.forget_me if logged_in?\n cookies.delete :auth_token\n reset_session\n flash[:notice] = t('account.cuenta_eliminada')\n redirect_to root_path\n else\n flash[:error] = t('account.cuenta_no_eliminada_razon')\n redirect_to account_path\n end\n end\n end", "def force_logout\n sign_out(resource) if user_signed_in?\n end", "def destroy\n sign_out(resource_name)\n doorkeeper_token.revoke\n\n render_meta message: I18n.t('devise.sessions.signed_out')\n end", "def cancel\n redirect_to( default_path ) if params[:commit] == 'cancel'\n end", "def cancel\n # Define this later\n end", "def cancel_trip\n label = request_label(:cancel, trip_id)\n \n @http_request_bundler.add(\n label, \n @url + \"/cancel_trip\", \n :delete,\n head: headers,\n query: { trip_id: trip_id, customer_id: customer_id, customer_token: customer_token }\n ).response!(label)\n end", "def revoke_session_with_http_info(revoke_session, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: DefaultApi.revoke_session ...'\n end\n # verify the required parameter 'revoke_session' is set\n if @api_client.config.client_side_validation && revoke_session.nil?\n fail ArgumentError, \"Missing the required parameter 'revoke_session' when calling DefaultApi.revoke_session\"\n end\n # resource path\n local_var_path = '/api/kratos/public/sessions'\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:debug_body] || @api_client.object_to_http_body(revoke_session)\n\n # return_type\n return_type = opts[:debug_return_type]\n\n # auth_names\n auth_names = opts[:debug_auth_names] || []\n\n new_options = opts.merge(\n :operation => :\"DefaultApi.revoke_session\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:DELETE, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: DefaultApi#revoke_session\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def cancel\n throw(:abort)\n end", "def logout\n request.env[\"keycard.authentication\"] = notary.reject\n reset_session\n end", "def destroy\n __log_activity(\"LOGOUT #{current_user}\")\n __debug_route\n __debug_request\n user = current_user&.account&.dup\n opt = BS_AUTH ? { no_revoke: true?(params[:no_revoke]) } : {}\n delete_auth_data(**opt)\n super\n api_clear(user: user)\n set_flash_notice(user: user, clear: true)\n rescue => error\n auth_failure_redirect(message: error)\n end", "def logout\n response = @session.delete\n @auth_token = nil\n @rest.default_headers = { 'Content-Type' => 'application/json' }\n response\n end", "def cancel\n # renders static page\n end", "def user_logout\n res = http_delete(:uri=>\"/session\", :fields=>x_cookie)\n return res.code\n end", "def disable_my_other_sessions_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: FrontendApi.disable_my_other_sessions ...'\n end\n # resource path\n local_var_path = '/sessions'\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n header_params[:'X-Session-Token'] = opts[:'x_session_token'] if !opts[:'x_session_token'].nil?\n header_params[:'Cookie'] = opts[:'cookie'] if !opts[:'cookie'].nil?\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'DeleteMySessionsCount'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || []\n\n new_options = opts.merge(\n :operation => :\"FrontendApi.disable_my_other_sessions\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:DELETE, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: FrontendApi#disable_my_other_sessions\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def logout\n handler = Proc.new do |request|\n if response.code == 204\n clear_session\n else\n case response.code\n when 401 then\n raise Jiralicious::NotLoggedIn.new(\"Not logged in\")\n else\n # Give Net::HTTP reason\n raise Jiralicious::JiraError.new(response)\n end\n end\n end\n\n request(:delete, '/rest/auth/latest/session', :handler => handler)\n end", "def paypal_cancel\r\n user_id = params[:user_id]\r\n @user = User.find(user_id)\r\n session[:user_id] = @user[:id]\r\n session[:user_email] = @user[:email]\r\n \r\n cancel_purchase(params[:id])\r\n flash[:notice] = I18n.t 'event.purchase.pur_can'\r\n redirect_to(:controller => 'home', :action=>'index') \r\n end", "def cancelEnv(cancel)\n $Logger.debug \"Cancelling requests for environment #{cancel.service} #{cancel.zone} #{cancel.envid}\"\n match = @reqList.select{|r| r.zone == cancel.zone && r.service == cancel.service && r.envid == cancel.envid}\n if match == nil or match[0] == nil\n # This can happen if oneenvd cancelling environment which didn't\n # request any resources\n $Logger.debug \"No matching requests for client #{cancel.service} #{cancel.zone} #{cancel.envid}\"\n return\n end\n match.each do |r|\n if r.status == \"ALLOCATED\"\n freeup(r)\n end\n r.status = \"CANCELLED\"\n $eventProcessor.updateRequest(r)\n $eventProcessor.expireRequest(r)\n @reqList.delete(r)\n end\n end", "def cancel\n order = current_user.customer.orders.find(params[:id])\n order.update(status: 9)\n render json: {is_success: true}, status: :ok\n end", "def cancel\n cookies.delete :payment_in_process\n redirect_to fines_path, flash: { error: (t 'mylibrary.fine_payment.cancel_html') }\n end", "def cancel\n set_params\n show_translation\n end", "def cancel_account\n @account.active = 0\n @account.endtime = Time.now\n @account.save\n \n redirect_to :controller => 'subs' and return false\n end", "def sign_out(resource_or_scope); end", "def sign_out(resource_or_scope); end", "def canceladd\n @session['groupcluster']=nil\n @session['groupshop']=nil\n @session['groupkey']=nil\n render :update do |page|\n page.redirect_to url_for(:controller=>'machines', :action=>'list')\n end\n end", "def logout\n params = {\n 'method' => :delete,\n 'command' => '/session'\n }\n\n response, headers = send_request(params)\n # reset auth key to nil\n @auth_key = nil\n end", "def __cancel__(what, &blk)\n req = Request.new\n req.verb = Request::Verb::CANCEL\n req.id = what.tag\n\n # Hold on to the tag as unavaiable for reuse until the cancel succeeds.\n @cbx[what.tag] = nil\n\n send(req) do |res|\n # Do not send any more responses from the server to this request.\n @cbx.delete(what.tag)\n blk.call(res) if blk\n end\n end", "def cancel(id); end", "def close\n @repo.request_http(:post, path('session/close'),\n :expected_status_code => 204)\n end", "def logout\n params = {\n 'method' => :delete,\n 'command' => '/session'\n }\n\n response, headers = send_request(params)\n # reset auth key to nil\n @auth_key = nil\n end", "def httpdigest_logout\n session.delete(SESSION_NONCE)\n session.delete(SESSION_OPAQUE)\n end", "def cancel_certificate\n @booking = Booking.where(user_id: current_user.id).first\n if @booking.certificate == true\n @booking.toggle!(:certificate)\n redirect_to account_users_path, flash: {notice: \"Successfully cancelled request!\"}\n end\n end", "def destroy\n id = shift_argument ||\n raise(Heroku::Command::CommandFailed, \"Usage: sessions:destroy [ID]\")\n session = request do\n api.request(\n :expects => 200,\n :headers => headers,\n :method => :delete,\n :path => \"/oauth/sessions/#{CGI.escape(id)}\"\n ).body\n end\n puts %{Destroyed \"#{session[\"description\"]}\".}\n end", "def revoke_access_token\n\t\t\tif session[:token]\n\t\t\t\t# Use either the refresh or access token to revoke if present.\n\t\t\t\ttoken = session[:token].to_hash[:refresh_token]\n\t\t\t\ttoken = session[:token].to_hash[:access_token] unless token\n\n\t\t\t\t# You could reset the state at this point, but as-is it will still stay unique\n\t\t\t\t# to this user and we're avoiding resetting the client state.\n\t\t\t\tsession.delete(:state)\n\t\t\t\tsession.delete(:token)\n\n\t\t\t\t# Send the revocation request and return the result.\n\t\t\t\trevokePath = 'https://accounts.google.com/o/oauth2/revoke?token=' + token\n\t\t\t\turi = URI.parse revokePath\n\t\t\t\trequest = Net::HTTP.new uri.host, uri.port\n\t\t\t\trequest.use_ssl = true\n\t\t\t\trequest.get uri.request_uri\n\t\t\tend\n\t\tend", "def destroy\n current_user.authentication_token = nil\n signed_out = (Devise.sign_out_all_scopes ? sign_out : sign_out(resource_name))\n render json: {status:0, data: nil}\n end" ]
[ "0.7635437", "0.7635437", "0.7635437", "0.7635437", "0.7635437", "0.7635437", "0.7367191", "0.7279831", "0.7144859", "0.6624382", "0.6509149", "0.65029466", "0.64647144", "0.63581675", "0.63340735", "0.6330951", "0.6324643", "0.63008875", "0.6219802", "0.6115184", "0.60845864", "0.6032165", "0.60305774", "0.60266834", "0.6023613", "0.6023613", "0.6006313", "0.59805304", "0.59180915", "0.58904314", "0.5884278", "0.5880454", "0.5867116", "0.5863791", "0.58475715", "0.583932", "0.5838967", "0.5830014", "0.58295995", "0.5818478", "0.58052546", "0.58052546", "0.5804786", "0.58043784", "0.5791716", "0.57818675", "0.57765937", "0.5751396", "0.57491004", "0.5739085", "0.57381094", "0.5726058", "0.5723481", "0.5710225", "0.57082826", "0.5697683", "0.5696683", "0.56922966", "0.56895715", "0.5683472", "0.56607914", "0.5660029", "0.56521285", "0.56431156", "0.56423557", "0.5638161", "0.5636373", "0.5629459", "0.56292075", "0.5624549", "0.56051606", "0.55996084", "0.55921537", "0.55916864", "0.5581809", "0.55796397", "0.5566879", "0.555522", "0.55538154", "0.5547243", "0.55286425", "0.5524136", "0.5524136", "0.55113894", "0.5511153", "0.5499427", "0.54968244", "0.54966", "0.5495922", "0.54890585", "0.54889756", "0.5480289", "0.5461845", "0.54615474" ]
0.73111695
13
By default we want to require a password checks on update. You can overwrite this method in your own RegistrationsController.
def update_resource(resource, params) resource.update_with_password(params) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def should_validate_password?\n updating_password || new_record?\n end", "def should_validate_password?\n updating_password || new_record?\n end", "def need_change_password!\n return unless password_expiration_enabled?\n\n need_change_password\n save(validate: false)\n end", "def should_validate_password?\n \tupdating_password || new_record?\n end", "def update_resource(resource, params)\n # Require current password if user is trying to change password.\n return super if params[\"password\"]&.present?\n # Allows user to update registration information without password.\n resource.update_without_password(params.except(\"current_password\"))\n end", "def send_password_change_notification; end", "def update_resource(resource, params)\n # Require current password if user is trying to change password.\n return super if params['password']&.present?\n\n # Allows user to update registration information without password.\n resource.update_without_password(params.except('current_password'))\n end", "def send_password_change_notification\n # do not notify the admins for now\n end", "def password_required?; end", "def need_change_password?\n password_change_requested? || password_too_old?\n end", "def skip_password_change_notification!; end", "def password_change_new\n\n end", "def update\n super do |resource|\n # TODO (rspeicher): In Devise master (> 3.4.1), we can set\n # `Devise.sign_in_after_reset_password = false` and avoid this mess.\n if resource.errors.empty? && resource.try(:otp_required_for_login?)\n resource.unlock_access! if unlockable?(resource)\n\n # Since we are not signing this user in, we use the :updated_not_active\n # message which only contains \"Your password was changed successfully.\"\n set_flash_message(:notice, :updated_not_active) if is_flashing_format?\n\n # Redirect to sign in so they can enter 2FA code\n respond_with(resource, location: new_session_path(resource)) and return\n end\n end\n end", "def password_required?\n super\n end", "def password_required?\n super\n end", "def update_resource(resource, params)\n if params[\"password\"]&.present? or params[\"email\"]&.present?\n return super\n else\n resource.update_without_password(params.except(\"current_password\"))\n end\n end", "def update\n if params[:user][:password].present? || params[:user][:current_password].present?\n super\n else\n @user = User.find(current_user.id)\n if @user.update_without_password(params[:user])\n redirect_to after_update_path_for(@user), :notice => I18n.t(\"devise.registrations.updated\")\n else\n render \"edit\"\n end\n end\n end", "def require_password_change\n @attributes[:require_password_change]\n end", "def edit_password; end", "def password_required?; false; end", "def password_required?\n super && self.is_password_set\n end", "def password_needed?\n resource.authentications.empty? \\\n ? resource.update_with_password(params[resource_name])\\\n : resource.update_attributes(params[resource_name])\n end", "def check_password_change_required?\n password_change_required || check_password_expired?\n end", "def password_required?\n super if confirmed?\nend", "def password_required?\n super if confirmed?\n end", "def password_required?\n super if confirmed?\n end", "def password_required?\n super if confirmed?\n end", "def password_required?\n super if confirmed?\n end", "def password_required?\n super if confirmed?\n end", "def password_required?\n super if confirmed?\n end", "def check_password_changed\n self.temp_password = nil if ( changed.include?('encrypted_password') && !(changed.include?('temp_password')))\n end", "def change_password\r\n \r\n end", "def validate_on_update\n unless plain_password.blank?\n unless plain_password == password_confirmation\n errors.add_to_base(\"Passwords don't match\")\n end\n # Validate password criteria\n unless plain_password.length >= 6\n errors.add_to_base(\"Password must be at least 6 characters long.\")\n end\n end\n end", "def password_required?\n new_record? ? super : false\n end", "def password_required?\n new_record? ? super : false\n end", "def password_required?\n return false if self.guest? || self.customer?\n super\n end", "def validate_on_update\n unless plain_password.blank?\n unless plain_password == password_confirmation\n errors.add_to_base(\"Passwords don't match\")\n end\n # Validate password criteria\n unless plain_password.length >= 6\n errors.add_to_base(\"Password must be at least 6 characters long.\")\n end\n end\n end", "def allow_users_to_change_passwords?\n @policy.allow_users_to_change_password\n end", "def password_required?\n if new_record? && oauth_account\n false\n else\n super\n end\n end", "def password_required?\n new_record? ? false : super\n end", "def password_required?\n new_record? ? false : super\n end", "def password_required?\n new_record? ? false : super\n end", "def need_change_password\n return unless password_expiration_enabled?\n\n self.password_changed_at = nil\n end", "def password_required?\n confirmed? ? super : false\n end", "def password_required?\n false\n end", "def password_required?\n false\n end", "def password_required?\n false\n end", "def password_required?\n false\n end", "def password_required?\n false\n end", "def change_password\n #check if user is new or being updated\n if self.encrypted_password.present?\n #verifies password\n if self.password_check\n self.encrypt_password\n else\n raise \"error\"\n end\n else\n raise \"error\"\n end\n end", "def password_required?\n super if confirmed?\n end", "def password_required?\n super if confirmed?\n end", "def password_required?\n super if confirmed?\n end", "def password_required?\n # new_record? ? false : super\n false\n end", "def password_required? \n false \n end", "def save\r\n if valid_password?\r\n super\r\n else\r\n false\r\n end\r\n end", "def password_required?\n return false if !persisted? and provider.present?\n super\n end", "def change_password\n set_breadcrumbs(\"change_password\") \n if request.post? || request.patch? \n admin = Admin.find(current_admin.id)\n @check = params[:admin][:password] == params[:admin][:password_confirmation] && params[:admin][:password].present? && params[:admin][:password_confirmation].present?\n if admin.present? && admin.valid_password?(params[:admin][:old_password])\n if @check \n if admin.update_attribute(:password, params[:admin][:password])\n sign_in admin, :bypass => true\n flash[:notice] = I18n.t('change_password.update.success')\n redirect_to admin_root_path\n else\n flash[:error] = I18n.t('common.error') \n end\n else\n flash[:error] = I18n.t('change_password.failure.password_is_not_match')\n end\n else\n flash[:error] = I18n.t('change_password.failure.invalid_old_password')\n end\n end\n end", "def admin_pwd_update\n @user = User.find_by_id(params[:user_id])\n @user.validate_pwd = true\n if @user.update(email: params[:user][:email], password: params[:user][:password], password_confirmation: params[:user][:password_confirmation])\n # if an admin is updating her own password, we need to get around Devise's automatic sign out\n if @user.id == current_user.id\n sign_in(@user, :bypass => true)\n end\n flash.keep[:notice] = 'The password for \"' + params[:user][:email] + '\" was successfully updated.'\n redirect_to '/users'\n else\n render :admin_pwd\n end\n end", "def update_with_password(params, *options)\n if encrypted_password.blank?\n update_attributes(params, *options)\n else\n super\n end\n end", "def update_with_password(params, *options)\n if encrypted_password.blank?\n update_attributes(params, *options)\n else\n super\n end\n end", "def update_with_password(params, *options)\n if encrypted_password.blank?\n update_attributes(params, *options)\n else\n super\n end\n end", "def update_with_password(params, *options)\n if encrypted_password.blank?\n update_attributes(params, *options)\n else\n super\n end\n end", "def update_with_password(params, *options)\n if encrypted_password.blank?\n update_attributes(params, *options)\n else\n super\n end\n end", "def update_with_password(params, *options)\n if encrypted_password.blank?\n update_attributes(params, *options)\n else\n super\n end\n end", "def update_with_password(params, *options)\n if encrypted_password.blank?\n update_attributes(params, *options)\n else\n super\n end\n end", "def update_with_password(params, *options)\n if encrypted_password.blank?\n update_attributes(params, *options)\n else\n super\n end\n end", "def update_with_password(params, *options)\n if encrypted_password.blank?\n update_attributes(params, *options)\n else\n super\n end\n end", "def update_with_password(params, *options)\n if encrypted_password.blank?\n update_attributes(params, *options)\n else\n super\n end\n end", "def require_password?(action)\n action == \"new\" ? true : false\n end", "def valid_password?; end", "def password_required?\n confirmed? ? super : false\n end", "def password_changed?\n !@new_password.blank?\n end", "def password_changed?\n !@new_password.blank?\n end", "def password_changed?\n !password.blank?\n end", "def password_required?\n allow_blank_password ? false : super\n end", "def password_required?\n allow_blank_password ? false : super\n end", "def needs_update_password? user, params\n user.email != params[:user][:email] ||\n params[:user][:password].present? ||\n params[:user][:password_confirmation].present?\n end", "def password_changeable?\n @password_changeable\n end", "def require_password?\n if self.new_record?\n APP_CONFIG[:auto_activate] or self.password\n else\n true and !self.password.nil?\n end\n end", "def password\n if @user.update_with_password(password_params)\n render_update_success @user\n else\n render_failure @user\n end\n end", "def can_edit_password?\n encrypted_password.present?\n end", "def update_with_password(params, *options)\n\t\tif encrypted_password.blank?\n\t\t\tupdate_attributes(params, *options)\n\t\telse\n\t\t\tsuper\n\t\tend\n\tend", "def needs_password_change_email?\n encrypted_password_changed? && persisted?\n end", "def update_resource(resource, params)\n return super if params[\"password\"]&.present?\n resource.update_without_password(params.except(\"current_password\"))\n end", "def password_required?\n false\n end", "def password_required?\n false\n end", "def password_required?\n provider.present? ? false : super\n end", "def change_password\n @user = User.shod(params[:id])\n authorize! :update, @user\n end", "def check_password!\n if self.password != password_confirmation\n false\n end \n end", "def update_with_password params, *options\n if encrypted_password.blank?\n update_attributes(params, *options)\n else\n super\n end\n end", "def update_password\n @user = User.find_by_uuid(params[:id])\n if @user.update(user_params_with_password)\n if @user.send_password.present? && @user.send_password.to_i == 1\n @user.send_welcome_email\n end\n flash[:success] = t('messages.default_success')\n redirect_to users_path\n else\n flash[:error] = t('messages.default_error')\n render :edit_password\n end\n end", "def change_password_allowed?\n return true if auth_source_id.blank?\n return auth_source.allow_password_changes?\n end", "def check_admin_password\n if current_user&.has_role?(:admin) && current_user.email == \"[email protected]\" &&\n current_user&.greenlight_account? && current_user&.authenticate(Rails.configuration.admin_password_default)\n\n flash.now[:alert] = I18n.t(\"default_admin\",\n edit_link: change_password_path(user_uid: current_user.uid)).html_safe\n end\n end", "def valid_password?(password_input)\n if using_old_validation?\n Devise.secure_compare(cakephp_password_digest(password_input), self.old_password).tap do |validated|\n if validated\n self.password = password_input\n self.old_password = nil\n self.save(:validate => false)\n end\n end\n else\n super(password_input)\n end\n end", "def reset_password(*args)\n self.legacy_password = false\n super\n end", "def needs_password_update?\n password_hash.empty? or not password.empty?\n end", "def need_change_password!\n if self.expire_password_after.is_a? Fixnum or self.expire_password_after.is_a? Float\n need_change_password\n self.save(:validate => false)\n end\n end", "def update_with_password(params, *options)\n if authentications.present?\n update_attributes(params, *options)\n else\n super\n end\n end", "def password_required?\n if new_record?\n !(password.blank? && password_confirmation.blank?)\n else\n super\n end\n end", "def password_required?\n provider.blank? && super\n end" ]
[ "0.7431037", "0.7416546", "0.7395955", "0.7375806", "0.73549265", "0.73029804", "0.7261664", "0.72445995", "0.72270805", "0.7188048", "0.7187129", "0.71696925", "0.7148469", "0.7136121", "0.7136121", "0.71323127", "0.71269363", "0.7123784", "0.70972013", "0.708736", "0.70805496", "0.7072636", "0.7072002", "0.70668024", "0.7052217", "0.7052217", "0.7052217", "0.7052217", "0.7052217", "0.7052217", "0.7047877", "0.70466334", "0.70292693", "0.70135146", "0.70135146", "0.7011574", "0.7007701", "0.70044756", "0.69947696", "0.6994007", "0.6994007", "0.6994007", "0.6993444", "0.69862616", "0.6956303", "0.6956303", "0.6956303", "0.6956303", "0.6956303", "0.69537", "0.694802", "0.694802", "0.694802", "0.6938323", "0.6934343", "0.6921025", "0.69044495", "0.6899617", "0.68995917", "0.68942267", "0.68942267", "0.68942267", "0.68942267", "0.68942267", "0.68942267", "0.68942267", "0.68942267", "0.68942267", "0.68942267", "0.68891996", "0.6871034", "0.6867166", "0.6865124", "0.6865124", "0.68299", "0.6827014", "0.6827014", "0.68264455", "0.6822529", "0.681744", "0.68163997", "0.6809278", "0.6803374", "0.6801848", "0.6793145", "0.6792781", "0.6792781", "0.67844605", "0.6771464", "0.67714274", "0.67637193", "0.6758359", "0.67537856", "0.6726798", "0.67250186", "0.6720997", "0.67195994", "0.6718775", "0.671578", "0.671473", "0.67114174" ]
0.0
-1
Build a devise resource passing in the session. Useful to move temporary session data to the newly created user.
def build_resource(hash=nil) self.resource = resource_class.new_with_session(hash || {}, session) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def build_resource(hash=nil)\n self.resource = resource_class.new_with_session(hash || {}, session)\n attrs = session[\"devise.user_attributes\"]\n self.resource.first_name = attrs[\"first_name\"]\n self.resource.last_name = attrs[\"last_name\"]\n self.resource.picture = attrs[\"picture\"]\n end", "def build_resource(hash = nil)\n self.resource = resource_class.new_with_session(hash || {}, session)\n auth = session['devise.google_data']\n self.resource = User.fill_from_omniauth(resource, auth) if auth\n end", "def new_with_session(params, session)\n if session[\"devise.user_attributes\"]\n # we already trust this isn't mass assignment attack, so use without protection\n new(session[\"devise.user_attributes\"], without_protection: true) do |user|\n user.attributes = params\n user.identities.build(session[\"identity_attributes\"]) if session[\"identity_attributes\"]\n # sets user.errors, which will be present when we return the user to the form with validation errors\n user.valid?\n end\n else\n # create and return new User instance\n super\n end \n end", "def build_resource(*args)\n # Call the parent function to build the User model.\n super\n \n # If we have :omniauth in our session cookie we will build\n # an authentication for the new user from that and then \n # check to make sure the user is valid (i.e. get the password\n # from someone who created an account via LinkedIn or Facebook).\n if session[:omniauth]\n @user.build_authentication(session[:omniauth])\n @user.valid?\n end\n end", "def new_with_session(params, session)\n if session['devise.google_oauth2_data']\n super.tap do |user|\n if data = session[\"devise.google_oauth2_data\"] && session[\"devise.google_oauth2_data\"][\"info\"]\n user.email = data[\"email\"] if user.email.blank?\n user.first_name = data['first_name']\n user.last_name = data['last_name']\n user.identity.first_or_initialize(\n provider: session[\"devise.google_oauth2_data\"]['provider'],\n uid: session[\"devise.google_oauth2_data\"]['uid']\n )\n end\n end\n\n elsif session['devise.facebook_data']\n super.tap do |user|\n if data = session[\"devise.facebook_data\"] && session[\"devise.facebook_data\"][\"extra\"][\"raw_info\"]\n user.email = data[\"email\"] if user.email.blank?\n user.first_name = data['first_name']\n user.last_name = data['last_name']\n user.identity.first_or_initialize(\n provider: session[\"devise.facebook_data\"]['provider'],\n uid: session[\"devise.facebook_data\"]['uid']\n )\n end\n end\n else\n new(params)\n end\n end", "def build_resource(permitted_params = {})\n session[:department_id] ||= params[:department_id]\n permitted_params.merge!(department_id: params[:department_id] || session[:department_id])\n\n self.resource = RegistrationForm.new(permitted_params)\n end", "def build_resource(hash=nil)\n\t\tsuper\n\t\t\n\t\t# session[:claiming_profile] might have been set with the token value by lib/custom_authentication_failure_app.rb.\n\t\tresource.claiming_profile! session[:claiming_profile] if session[:claiming_profile].present?\n\t\t\n\t\t# During private alpha, registrants are screened. Don't send them the confirmation link until they've passed screening.\n\t\t# The exception is someone we've invited to claim their profile.\n\t\tresource.skip_confirmation_notification! if running_as_private_site? && !resource.profile_to_claim\n\t\t\n\t\t# If the blog parameter is present, set resource.signed_up_from_blog to true, otherwise preserve its current value.\n\t\tif params[:blog].present? && resource.new_record?\n\t\t\tresource.signed_up_from_blog = true\n\t\t\tsession[:signed_up_from_blog] = true # Needed for pre-confirmation phase because we're not signed in.\n\t\t\tif params[:rp].present?\n\t\t\t\tclean_path = params[:rp].sub(%r|https?://[^/]+/|, '').sub(%r|[/?].*|, '')\n\t\t\t\tsession[:after_confirmation_url] = blog_url + clean_path\n\t\t\telse\n\t\t\t\tsession[:after_confirmation_url] = blog_url\n\t\t\tend\n\t\tend\n\t\t\n\t\t# If the newsletter subscription parameter is present, set resource.signed_up_for_mailing_lists to true, otherwise preserve its current value.\n\t\tif params[:nlsub].present? && resource.new_record?\n\t\t\tresource.signed_up_for_mailing_lists = true\n\t\tend\n\t\t\n\t\t# Flags that tell the views whether this registration is primarily a newsletter or blog sign-up from the user's perspective.\n\t\t@signing_up_from_blog = resource.signed_up_from_blog\n\t\t@signing_up_for_newsletter = resource.signed_up_for_mailing_lists\n\t\tflash[:after_sign_in_path_override] = edit_subscriptions_path if @signing_up_for_newsletter\n\tend", "def create_guest_user!\n # bah, this may not be entirely guaranteed to be unique\n # but it would be hard for it to collide, good enough. Actually\n # if the rails session id isn't unique, it's gonna cause problems\n # for more than just us, we should be good with just that even.\n unique_token = \"#{request.session_options[:id]}_#{(Time.now.to_f * 1000.0).to_i}_#{Process.pid}\"\n\n new_user = User.new.tap do |u|\n u.login = \"GUEST_USER_#{unique_token}\"\n u.guest = true\n u.save!\n end\n end", "def build_session\n # If it's empty assume user doesn't need session attributes.\n @session_attributes = Hash.new if @session_attributes.nil?\n @session = { :sessionAttributes => @session_attributes }\n @session\n end", "def create_session\n @user = User.new(nickname: User.temp_user_name)\n @user.save\n session[:user_id] = @user.id\n @user\n end", "def create_from_form\n if stored_anonymous_user?\n anonymous_user = current_user\n sign_out(current_user)\n end\n\n self.resource = warden.authenticate(auth_options)\n if self.resource.nil?\n begin\n self.resource = warden.authenticate!(auth_options)\n rescue Exception => e\n sign_in(anonymous_user)\n raise\n end\n else\n end\n\n set_flash_message(:notice, :signed_in) if is_navigational_format?\n\n sign_in(resource_name, resource)\n fandom_play_login(resource)\n \n redirect_after_successful_login()\n end", "def build_resource(hash = {})\n self.resource = resource_class.new_with_session(hash, session)\n end", "def build_resource(hash = {})\n self.resource = resource_class.new_with_session(hash, session)\n end", "def make_user(params = {})\n self.user= account.users.build_with_fields params.reverse_merge(:email => email, :invitation => self)\n end", "def create\n build_resource(sign_up_params)\n if resource.save\n expire_session_data_after_sign_in!\n respond_with resource, :location => after_inactive_sign_up_path_for(resource)\n else\n clean_up_passwords resource\n if resource.errors[:email].include?(I18n.t(\"errors.messages.taken\"))\n user = User.where(email: resource.email).first\n if user.active_for_authentication?\n # Send mail that already registered and ready to log in\n user.already_active_user_mail\n elsif user.invitation_token.nil?\n # Is not invited yet? tell him that he is already on the\n # waiting list\n user.already_waiting_mail\n else\n # Probably didn't get the invitation mail. Resend!\n user.re_invite!(:selfrequest)\n end\n resource.errors.clear\n end\n respond_with resource, :location => after_inactive_sign_up_path_for(resource)\n end\n end", "def new\n @user_session = UserSession.new(:email => session[:user_real_account_email])\n end", "def create_user_session\n password = '12345678'\n user = User.make!(\n password: password,\n password_confirmation: password\n )\n UserSession.create!(\n email: user.email,\n password: password\n )\n end", "def create\n build_resource\n @users = User.find(:all, :include => :roles) \n debugger\n if resource.save\n if resource.active_for_authentication?\n sign_in(resource_name, resource)\n (render(:partial => 'newuser', :locals => {:user => resource.id}, :layout => false) && return) if request.xhr?\n respond_with resource, :location => after_sign_up_path_for(resource)\n else\n expire_session_data_after_sign_in!\n (render(:partial => 'newuser', :locals => {:user => resource.id}, :layout => false) && return) if request.xhr?\n respond_with resource, :location => after_inactive_sign_up_path_for(resource)\n end\n else\n clean_up_passwords resource\n render :action => :new, :layout => !request.xhr?\n end\n end", "def new\n @session = User::Session.new\n end", "def user\n build :user, :using => data_for(:user)\n end", "def create_guest_user\n u = User.new(:name => \"guest\", :email => \"guest_#{Time.now.to_i}#{rand(100)}@example.com\", :guest => true)\n u.save!(:validate => false)\n session[:guest_user_id] = u.id\n u\n end", "def create\n build_resource\n if resource.save\n log_event(\"signed up\")\n if resource.active_for_authentication?\n set_flash_message :notice, :signed_up if is_navigational_format?\n sign_in(resource_name, resource)\n respond_with resource, :location => after_sign_up_path_for(resource)\n else\n set_flash_message :notice, :\"signed_up_but_#{resource.inactive_message}\" if is_navigational_format?\n expire_session_data_after_sign_in!\n respond_with resource, :location => after_inactive_sign_up_path_for(resource)\n end\n else\n clean_up_passwords resource\n respond_with resource\n end\n end", "def new_with_session(params, session)\n super.tap do |user|\n if (data = session['devise.facebook_data']) && session['devise.facebook_data']['extra']['raw_info']\n user.email = params[:email]\n user.alias = params[:alias]\n user.fb_userid = data['id']\n generated_password = Devise.friendly_token.first(10)\n user.password = generated_password\n user.password_confirmation = generated_password\n user.skip_confirmation!\n end\n end\n end", "def create\n build_resource(registration_params)\n\n #Find the email entered by the guest in the User table in the database \n @userTemp = User.where(email: resource.email).first\n \n #If user doesn't exist\n if @userTemp.nil?\n\n \t#Save it in the database\n\t if resource.save\n\t set_flash_message :notice, :signed_up\n\t sign_up(resource_name, resource)\n\t respond_with resource, :location => after_sign_up_path_for(resource)\n\t else\n\t #clean_up_passwords\n\t respond_with resource\n\t end\n\telse\n\t\t#If user already exists, go to registration url and show an error message\n\t\tredirect_to new_user_registration_url, :flash => { :error => \"Email already registered! Please Sign Up using another email!\" }\n\tend\n\n end", "def create_temporary_user\n\t\t@cur_user = User.create(:temp => true)\n\t\tcookies.permanent[:auth_token] = @cur_user.auth_token\n return @cur_user\n end", "def create\n unless session[:user_id].present?\n user = User.create_user\n session[:user_id] = user.id\n end\n end", "def app_user\n return SpUser.new unless current_user\n @sp_user ||= SpUser.find_by_ssm_id(current_user.id)\n if @sp_user.nil? && current_person.isStaff?\n @sp_user = SpGeneralStaff.create(ssm_id: current_user.id, created_by_id: current_user.id, person_id: current_person.id)\n end\n unless session[:login_stamped] || @sp_user.nil?\n @sp_user.update_attribute(:last_login, Time.now)\n session[:login_stamped] = true\n end\n @sp_user ||= SpUser.new\n end", "def build_resource(hash = nil)\n self.resource = resource_class.new_with_session(hash || {}, session)\n end", "def build_resource(hash=nil)\n self.resource = resource_class.new_with_session(hash || {}, session)\n end", "def create\n build_resource(sign_up_params)\n if resource.save\n\n save_image_attribute!(sign_up_params[:profile_image] || sign_up_params[:profile_image_name], params[:remove_profile_image])\n\n child = (auth_user && auth_user.is_a?(Child)) ? auth_user : (params[:child_id] ? User.find_by_id(params[:child_id]) : nil)\n if child.is_a?(Child)\n connect_parent_and_child!(resource, child)\n end\n if resource.active_for_authentication?\n set_flash_message :notice, :signed_up if is_navigational_format?\n sign_up(resource_name, resource)\n\n else\n set_flash_message :notice, :\"signed_up_but_#{resource.inactive_message}\" if is_navigational_format?\n expire_session_data_after_sign_in!\n end\n respond_to do |format|\n format.json { render :json => {:success => true, :user => resource.json_attributes} }\n format.html { redirect_to(user_locations_path(initial_reg: params[:initial_reg], child_id: params[:child_id] ) ) }\n end\n else\n clean_up_passwords resource\n logger.info \" Errors: #{resource.errors.full_messages}\"\n respond_to do |format|\n format.json { render :json => {:success => false, :errors => resource.errors.messages} }\n format.html { params[:initial_reg] ? redirect_to(user_locations_url) : redirect_to(edit_user_registration_path) }\n end\n end\n end", "def create\n # super\n self.resource = warden.authenticate!(auth_options)\n set_flash_message!(:notice, :signed_in)\n \n if resource\n user = resource\n sign_in(resource_name, resource)\n else\n user = User.find_by_email(params[:user][:email])\n sign_in(user, scope: :user) \n end\n \n profile = Profile.new\n profile.app_name = ''\n profile.desc = ''\n profile.tel = ''\n profile.app_email = ''\n user.profile = profile\n profile.save\n\n yield resource if block_given?\n render status: :ok, json: {\"user\": user, \"token\": user.token(user.id)}\n # respond_with resource, location: after_sign_in_path_for(resource)\n end", "def devise_create_new_rails_user\n build_resource(sign_up_params)\n\n resource.save\n\n yield resource if block_given?\n if resource.persisted?\n if resource.active_for_authentication?\n set_flash_message :notice, :signed_up if is_flashing_format?\n sign_up(resource_name, resource)\n respond_with resource, location: after_sign_up_path_for(resource)\n else\n set_flash_message :notice, :\"signed_up_but_#{resource.inactive_message}\" if is_flashing_format?\n expire_data_after_sign_in!\n respond_with resource, location: after_inactive_sign_up_path_for(resource)\n end\n else\n clean_up_passwords resource\n set_minimum_password_length\n respond_with resource\n end\n end", "def create\n \n build_resource(sign_up_params)\n\n #this is a private function to check params\n if check_some_validations\n redirect_to new_user_registration_path\n else #the next part taken form Devise on github\n resource.save\n yield resource if block_given?\n\n if resource.persisted?\n if resource.active_for_authentication?\n set_flash_message! :notice, :signed_up\n sign_up(resource_name, resource)\n respond_with resource, location: after_sign_up_path_for(resource)\n else\n set_flash_message! :notice, :\"signed_up_but_#{resource.inactive_message}\"\n expire_data_after_sign_in!\n respond_with resource, location: after_inactive_sign_up_path_for(resource)\n end\n else\n clean_up_passwords resource\n set_minimum_password_length\n respond_with resource\n end\n end\n end", "def build_from_current_user!(user)\n self.nombres = user.name\n self.email = user.info[\"email\"]\n end", "def create\n build_resource sign_up_params\n\n if resource.save\n Notifier.notify_superusers_of_access_request(resource).deliver\n if resource.active_for_authentication?\n set_flash_message :notice, :signed_up if is_navigational_format?\n sign_in(resource_name, resource)\n respond_with resource, :location => after_sign_up_path_for(resource)\n else\n set_flash_message :notice, :\"signed_up_but_#{resource.inactive_message}\" if is_navigational_format?\n expire_session_data_after_sign_in!\n respond_with resource, :location => after_inactive_sign_up_path_for(resource)\n end\n else\n clean_up_passwords resource\n respond_with resource\n end\n end", "def create_session(ip, meta_info={})\n raw, enc = Authentication::TokenGenerator.generate(Authentication::Session, 'authentication_token')\n Authentication::Session.create({\n 'authentication_token' => enc,\n 'meta_info' => meta_info,\n 'ip' => ip,\n 'account_id' => self.id\n })\n return raw, enc\n end", "def create\n @user = User.find_or_create_by(uid: auth['uid']) do |u|\n u.name = auth['info']['name']\n u.email = auth['info']['email']\n end\n\n session[:user_id] = @user.id\n\n render 'welcome/home'\n end", "def create_guest_user\n logger.debug \"ApplicationController:create_guest_user >>> creating new guest_user record\"\n guest = User.new_guest_user\n session[:guest_user_id] = guest.id\n guest\n end", "def constructed_user\n OpenStruct.new(email: current_user.email)\n end", "def create_guest_user\n email = \"guest_#{Time.now.to_i}#{rand(99)}@example.com\"\n u = User.create(:email => email)\n u.save!(:validate => false)\n session[:guest_user_id] = u.id\n u\n end", "def generate_session(user)\n AuthController.clear_user_info(session, nil)\n session[:original_user] = @original_user\n session[:impersonate] = true\n session[:user] = user\n end", "def build_resource(*args)\n super\n if session[\"devise.omni\"]\n @user.apply_omniauth(session[\"devise.omni\"])\n @user.valid?\n end\n end", "def from_session\n self.current_user = session['current_user'] if session['current_user']\n end", "def build_resource(hash=nil)\n self.resource = resource_class.new_with_session(hash || {}, session)\n self.resource.role = params[:r] if hash.blank?\n self.resource.build_profile if self.resource.profile.blank?\n end", "def create_new_user\n session[:user_id] = nil\n user = User.create_new(:remote_ip => request.remote_ip)\n user.language_abbr= session[:language] if session[:language] # Recalls language from previous session.\n user\n end", "def create\n\n\t\t# grab the authentication return\n\t\tauth = request.env[\"omniauth.auth\"]\n\n\t\t# now create a temporary user with the auth element etc\n\t\tuser = User.omniauth_create auth\n\n\t\t# now set the session_id \n\t\tsession[:id] = user.id\n\n\t\t# redirect back to the root which can successfully switch the pages of the application etc\n\t\tredirect_to root_url, :notice => \"Successful Authentication\"\t\n\tend", "def create\n user = warden.authenticate!(auth_options)\n Tiddle.expire_token(user, request) if request.headers['X-USER-EMAIL'] && request.headers['X-USER-TOKEN']\n Tiddle.purge_old_tokens(user)\n token = Tiddle.create_and_return_token(user, request, expires_in: 3.days)\n render json: { user: user.as_json, authentication_token: token, message: t('devise.sessions.signed_in') }\n end", "def create\n @user = User.find_or_create_from_auth_hash(auth_hash)\n # self.current_user = @user\n session[:user] = @user.id\n redirect_to '/'\n end", "def new_with_session(params, session)\n super.tap do |user|\n if data = session[\"devise.twitter_data\"] && session[\"devise.twitter_data\"][\"extra\"][\"raw_info\"]\n user.email = data[\"email\"] if user.email.blank?\n elsif data = session[\"devise.google_data\"] && session[\"devise.google_data\"][\"extra\"][\"raw_info\"]\n user.email = data[\"email\"] if user.email.blank?\n end\n end\n end", "def create_dev_session\n\t\t# redirect to root if not in dev environment\n\t\tredirect_to root_path unless Rails.env == 'development'\n\n\t\t# Get user name from .env file\n\t\tusername = ENV['DEV_USER']\n\n\t\t# if user with username doesn't exist, create one\n\t\tuser = User.find_by(name: username)\n\t\tunless user.present?\n\t\t\tuser = User.new(name: username)\n\t\t\tuser.save!\n\t\tend\n\n\t\t# sign in user and redirect to notes\n\t\tsession[:user_id] = user.id\n\t\tredirect_to user_notes_path(current_user)\n\tend", "def create\n user = build_resource\n\n if user.save\n set_flash_message :notice, :signed_up\n sign_in('user', user)\n respond_with user, :location => after_sign_up_path_for(user)\n else\n clean_up_passwords user\n respond_with user\n end\n end", "def create\n # logout_keeping_session!\n full_logout\n \n if using_open_id?\n open_id_authentication\n else\n password_authentication\n end\n save_sponsor_user \n end", "def create\n user = User.from_omniauth(env[\"omniauth.auth\"])\n session[:user_id] = user.id\n me=User.find(user.id)\n me.loggedin=true\n me.tutoring=false\n me.request=Request.new(class_name:\"3365f80a5cccb3e76443df3b736b26e8\")\n me.save\n render erb: :'sessions#create'\nend", "def new\n @user = env['omniauth.identity'] ||= User.new\n\n #if signed_in?\n #@user = User.new\n #else\n #auth = session['auth']\n #@user = User.create_with_omniauth(auth)\n #end\n end", "def new\n @user_session = UserSession.new\n end", "def new\n @user_session = UserSession.new\n end", "def new\n @user_session = UserSession.new\n end", "def new\n @user_session = UserSession.new\n end", "def new\n @user_session = UserSession.new\n end", "def create\n super do\n resource.authentication_token = Devise.friendly_token\n resource.save\n end\n end", "def build_user\n User.new(first_name: 'John', \n last_name: 'Doe', \n email: '[email protected]') \n end", "def current_user\n # super = use the devise current_user method || create a new open struct with 1 attr of full name\n super || OpenStruct.new(full_name: 'Guest')\n end", "def create\n user = User.find_by_email(params[:session][:email])\n respond_to do |format|\n if user && user.authenticate(params[:session][:password])\n sign_in user # create a cookie\n format.json { render json: user.active_model_serializer.new(user, {}) }\n format.html { redirect_back_or user }\n else\n #raise SessionsController::InvalidAuth\n render 'new'\n end\n end\n end", "def build_resource(*args)\n super\n if session[:omniauth]\n @user.apply_oauth_data(session[:omniauth])\n @user.valid?\n end\n end", "def create\n self.resource = User.find_by(email: sign_in_params[:email])\n\n if resource.persisted?\n if resource.valid_password?(sign_in_params[:password])\n sign_in(resource_name, resource) # Update sign in record with Devise method.\n\n payload = {\n sub: resource.id.to_str,\n iat: Time.now.to_i,\n exp: Time.now.to_i + 24 * 60 * 60,\n }\n\n token = JwtService.encode(payload: payload)\n\n response['Authorization'] = token\n render json: { current_user: resource }\n else\n # binding.pry\n render json: { errors: \"Incorrect username or password.\" }\n end\n else\n # binding.pry\n render json: { errors: resource.errors.messages }\n end\n\n end", "def generate_user(package_dir)\n system \"rails generate devise:install\"\n\n replace('config/environments/development.rb',\n \"config.action_mailer.raise_delivery_errors \\= false\",\n \"config.action_mailer.raise_delivery_errors = false\\n\\tconfig.action_mailer.default_url_options = { host: 'localhost:3000' }\")\n system \"rails generate devise User\"\n\n # make the authentication return a json\n dest = \"app/controllers/sessions_controller.rb\"\n File.new(dest, \"w\")\n FileUtils.cp \"#{package_dir}/template/#{dest}\", dest\n\n replace('config/routes.rb', 'devise_for :users', \"devise_for :users, :controllers => { sessions: 'sessions' } \")\n\n system \"git add -A\"\n system \"git commit -m 'Add devise and configure it'\"\nend", "def create\n @session_resource = SessionResource.new(session_resource_params)\n\n if @session_resource.save\n render json: @session_resource, status: :created, location: @session_resource\n else\n render json: @session_resource.errors, status: :unprocessable_entity\n end\n end", "def create\n reset_session\n params[:user] ||= {}\n username = params[:user][:username].to_s\n password = params[:user][:password].to_s\n user = User.where('username = ? and crypted_password = ?', username, User.encrypt(password)).first\n\n params[:client_uid] = 'Web Platform' if request.format.html?\n \n if user && params[:client_uid]\n session_obj = Session.create(user_id:user.id, client_uid:params[:client_uid])\n session[:app_session_id] = session_obj.id\n session[:user_id] = user.id\n\n if request.format.html?\n redirect_to controller: 'main'\n elsif request.format.json?\n render json: {success: true, session: session_obj.to_json}\n end\n else\n if request.format.html?\n flash[:alert] = \"Cannot login, please try again\"\n render action: 'new'\n elsif request.format.json?\n render json: {success: false, message: 'Cannot login, please try again'}\n end\n end\n end", "def current_user\n return nil unless valid_session?\n\n OpenStruct.new(\n id: session['uid'],\n email: session['email']\n )\n end", "def create\n build_resource(sign_up_params)\n resource_saved = resource.save\n yield resource if block_given?\n if resource_saved\n if resource.active_for_authentication?\n set_flash_message :notice, :signed_up\n sign_in(resource_name, resource)\n else\n set_flash_message :notice, :\"signed_up_but_#{resource.inactive_message}\"\n expire_data_after_sign_in!\n end\n\n @redirect_to_path = request.referer\n \n respond_to do |format|\n format.js { render 'registrations/success_registration' }\n end\n else\n # Errors occurred while registration\n clean_up_passwords resource\n @validatable = devise_mapping.validatable?\n \n @minimum_password_length = resource_class.password_length.min if @validatable\n\n respond_to do |format|\n format.js { render 'registrations/error_registration' }\n end\n end\n end", "def create\n ret = super\n sign_in(resource.type.underscore, resource.type.constantize.send(:find, resource.id)) unless resource.type.nil?\n ret\n end", "def user_session\n @user_session ||= UserSession.new(session, cookies)\n end", "def identity_create\n # Potential threat of overlap\n identity = Identity.create(uid:rand(100000000..9999999999), provider: 'registration')\n identity.user_id = resource.id\n identity.name = params['user']['name'] #Looks very ugly\n identity.email = resource.email\n identity.save\n end", "def create_session\n session[:who_is_this] = \"admin\"\n end", "def build_resource(*args)\n super\n if session[:omniauth]\n @user.apply_omniauth(session[:omniauth])\n @user.valid?\n end\n end", "def build_profile\n Profile.create(user: self)\n end", "def prepare_user\n if session[:user]\n @user = User.find(session[:user])\n end\n end", "def create\n @user = User.new(params[:user]).decorate\n @user.save\n session[:user_id] = @user.id\n respond_with @user\n end", "def create\n u = params[:username]\n p = params[:password]\n user_session = UserSession.new(:login => u, :password => p, :remember_me => true)\n if not user_session.save then\n return render :text => \"error\", :status => 401\n end\n\n ret = { :user => User.find_by_username_or_email(user_session.login) }\n ret[:redir] = URI.parse(session.delete(:return_to)).path if session.include? :return_to\n restful ret\n end", "def create_user\n # provide the interface asking for name, destination and duration\n # then, create and store the User object\n end", "def user_session\n\t\t@user_session ||= UserSession.new(session)\n\tend", "def tmp_user\n @tmp_user ||= (session[:tmp_user].blank?) ? (session[:tmp_user] = TmpUser.create) : TmpUser.find(session[:tmp_user]['id'])\n end", "def create_new_session\n session = reviewer_access_sessions.build\n session.save!\n session\n end", "def create\n options = {:remember_me => true}.merge(params[:user_session])\n @user_session = UserSession.new(options)\n if @user_session.save\n redirect_back_or_default root_url\n else\n @failed = true\n render :action => :new\n end\n end", "def create\n auth = request.env[\"omniauth.auth\"]\n user = User.find_by_provider_and_uid(auth[\"provider\"], auth[\"uid\"]) || User.create_with_omniauth(auth)\n session[:user_id] = user.id\n redirect_to app_path\n end", "def create\n build_resource(sign_up_params)\n\n resource.save\n render_resource(resource)\n end", "def new\n super do |resource|\n resource.build_user_data\n end\n end", "def set_user_session\n if signed_in? || devise_controller?\n session[:current_user] = nil\n else\n session[:current_user] = Faker::Internet.username\n end\n end", "def new\n\t\t@user_session = UserSession.new\n\tend", "def build_user(attributes = {}, options = {})\n @provider_user.build_record(attributes, options)\n end" ]
[ "0.7091186", "0.6656826", "0.66357183", "0.6563214", "0.6234899", "0.6205804", "0.6152574", "0.61236125", "0.611247", "0.6099068", "0.6014181", "0.59724724", "0.59724724", "0.59543395", "0.59440666", "0.5900429", "0.58953875", "0.588996", "0.585942", "0.58219326", "0.58096445", "0.580787", "0.57890254", "0.5787707", "0.57769066", "0.5776256", "0.5756887", "0.5726208", "0.57243663", "0.5707262", "0.5705594", "0.5700486", "0.5693252", "0.569296", "0.56845844", "0.56475407", "0.56297934", "0.56278443", "0.5618173", "0.5617248", "0.5615903", "0.5613238", "0.5604316", "0.5600924", "0.559499", "0.55857694", "0.5578526", "0.5574386", "0.55669665", "0.5566642", "0.55664086", "0.5566168", "0.555032", "0.55428505", "0.552705", "0.552705", "0.552705", "0.552705", "0.552705", "0.55135137", "0.55057913", "0.5498396", "0.54937017", "0.5486113", "0.5476326", "0.5466261", "0.5466258", "0.54645014", "0.5462542", "0.5460321", "0.5459865", "0.545961", "0.54568356", "0.54491967", "0.54426855", "0.5429889", "0.5429332", "0.54218394", "0.54206353", "0.5419678", "0.5416585", "0.5413481", "0.5407416", "0.540504", "0.54033536", "0.5398462", "0.5387348", "0.5384198", "0.538325", "0.5374985" ]
0.5726887
36
Signs in a user on sign up. You can overwrite this method in your own RegistrationsController.
def sign_up(resource_name, resource) sign_in(resource_name, resource) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def authenticate_user_with_sign_up!\n unless user_signed_in?\n store_location_for(:user, request.fullpath)\n redirect_to new_user_registration_path, alert: t(\"create_an_account_first\")\n end\n end", "def authenticate_user_with_sign_up!\n unless user_signed_in?\n store_location_for(:user, request.fullpath)\n redirect_to new_user_registration_path, alert: \"Please create an account first\"\n end\n end", "def signup\n sign_out\n puts '----signup called.----'\n redirect_to '/users/sign_up'\n end", "def signup\n client.signup(\n params[:user],\n params[:password]\n )\n end", "def sign_up(resource_name, resource)\n # sign_in(resource_name, resource)\n end", "def signup\n end", "def signup\n end", "def sign_up\n @user = User.new(user_params)\n if @user.save\n sign_in(@user)\n head :no_content\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end", "def sign_up(user)\n visit '/users/new'\n # expect(page.status_code).to eq 200\n fill_in :email, with: user.email\n fill_in :username, with: user.username\n fill_in :password, with: user.password\n fill_in :password_confirmation, with: user.password_confirmation\n click_button 'Sign up'\n end", "def signup\n case request.method\n when :post\n @user = User.new(params['user'])\n \n if @user.save \n session['user'] = User.authenticate(@user.login, params['user']['password'])\n flash['notice'] = _(\"Signup successful\")\n redirect_back_or_default :action => \"welcome\" \n end\n end \n end", "def signed_up(user)\n @user = user\n \n mail to: @user.email, subject: 'Sign Up Confirmation.'\n end", "def set_sign_up_user\n @sign_up_user = SignUpUser.find(params[:id])\n end", "def sign_up\n @user = User.new\n end", "def sign_up(resource_name, resource)\n redirect_to users_url\n end", "def signup!(params)\n self.login = params[:user][:login]\n self.email = params[:user][:email]\n generate_temporary_password!\n save_without_session_maintenance\n end", "def ensure_signed_up!\n # current_user\n end", "def signup_user(options = {})\n options[:email] = options[:email] || $settings.customer_service_email\n options[:password] = options[:password] || 'foobar'\n\n # signup\n json = post_api '/api/session/signup', {\n 'email' => options[:email],\n 'password' => options[:password],\n 'name' => options[:email].split('@')[0]\n }\n assert_equal 200, json['status'], json['message']\n\n login_user(options) unless options[:skip_login]\n end", "def signed_up(user)\n @user = user\n mail to: @user.email\n end", "def signed_up(user)\n @user = user\n\n mail to: user.email\n end", "def signup\n @user = User.new(params[:user])\n if @user.save\n redirect_to :controller => 'sessions', :action => 'new'\n else\n render'index'\n flash.now[:error]= \"Sign up failure, please try again.\"\n end\n end", "def set_sign_up\n @sign_up = SignUp.find(params[:id])\n end", "def set_sign_up\n @sign_up = SignUp.find(params[:id])\n end", "def signup\n @user = User.new\n end", "def signup\n @user = User.new\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n end", "def sign_up(resource_name, resource)\r\n sign_in(resource_name, resource)\r\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n flash[:notice] = \"Sign up successfully.\"\n end", "def create\n if Feedbunch::Application.config.signups_enabled\n super\n else\n Rails.logger.warn \"Creation of new user attempted, but signups are disabled\"\n redirect_to root_path\n end\n end", "def create\n @user = User.create(params[:user]) \n \n if @user.save\n sign_in @user\n flash[:succeess] = \"Sign up successfully\" \n redirect_to @user\n else\n render 'new'\n end\n \n end", "def signup!(params)\n self.login = params[:user][:login]\n self.email = params[:user][:email]\n save_without_session_maintenance\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource) unless current_user_is_admin?\n end", "def create #sign up button\n @user = User.new(user_params)\n if @user.save\n login(@user)\n redirect_to user_url(@user)\n else\n flash.now[:errors] = @user.errors.full_messages\n render :new\n end\n end", "def signup\n if params[:user]\n if User.exists?(id: params[:user_id])\n #check if the user already exists in database\n flash[:notice] = \"User already exists. Please log in\"\n redirect_to root_path()\n else\n auth_user\n end\n else\n auth_user\n end\n \n end", "def signup(user_that_just_signed_up)\n mail to: user_that_just_signed_up.email,\n subject: \"You signed up for YardSale\"\n end", "def sign_up_with(&blk)\n @signup = blk if blk\n @signup\n end", "def sign_up(resource_name, resource)\n sign_in(resource_name, resource)\n end", "def signup!(params)\n self.username = params[:user][:username]\n self.email = params[:user][:email]\n save_without_session_maintenance\n end", "def signup!(params)\n self.username = params[:user][:username]\n self.email = params[:user][:email]\n save_without_session_maintenance\n end", "def signup\n if params['user']\n params['username'] ||= params['user']['name'] if params['user']['name']\n params['password'] ||= params['user']['password'] if params['user']['password']\n params['role'] ||= params['user']['role'] || 'customer' if params['user']['role']\n\n end\n if params['username'] and params['password'] and params['role']\n # This action has been invoked by the signup form.\n @user = User.create(:role => params['role'],\n :name => params['username'],\n :password => params['password'])\n if @user.new_record?\n flash[:error] = @user.errors.messages.collect{|k,v| \"#{k} #{v.join(k.to_s)}\"}\n else\n unless session['user_attributes']\n session['user_attributes'] = @user.attributes\n session['user_attributes']['id'] = @user.id\n\n flash[:notice] = \"Welcome #{@user.name}, you have signed up as a #{@user.role}\"\n if session[:redirect_to]\n redirect_to session[:redirect_to]\n else\n redirect_to(root_url)\n end\n end\n end\n else\n # This action was not invoked by the signup form, redirect\n # to the form.\n\n if params['role'] == 'marketeer'\n # We do not allow users to create marketeer (admin) users by\n # crafting their own URLs.\n flash[:error] = 'Marketeer users may only be created through the Users management page'\n redirect_to(root_url)\n end\n\n @user = User.new(:role => params[:role])\n end\n end", "def signup\n @user = User.new(params[:user])\n return unless request.post?\n if @user.save\n flash[:notice] = \"New user added!\"\n redirect_to :action=>'show', :id=>@user\n end\n end", "def create\n @user = User.new(user_params)\n @user.save\n if @user.save\n redirect_to root_path, :notice => \"Signed up!\"\n else\n redirect_to signup_path\n end\n end", "def client_sign_up\n\n end", "def create\n @user = User.new(user_params)\n if @user.save\n\t sign_in @user unless signed_in?\n\t flash[:success] = \"Signup successful\"\n redirect_to @user\n else\n render 'new'\n end\n end", "def sign_up_helper\n username = prompt.ask(\"Enter Username\")\n while User.find_by(name: username.downcase)\n puts \"This username is already taken, please create a different username\"\n username = prompt.ask(\"Enter Username\")\n end\n password = prompt.ask(\"Enter Password\")\n self.user = User.create(name: username, password: password)\n puts \"Sign up complete.\"\n sleep(0.8)\n puts \"Let's Get Cookin #{self.user.name.upcase}!!!\"\n sleep(1.5)\n main_menu\n end", "def sign_up(resource_name, resource)\n # DO NOT sign_in(resource_name, resource)\n end", "def signup\n @user_genders = User.genders_for_select\n if request.get?\n redirect_to admin_dashboard_path if User.count != 0\n @user = User.new({:role => 'admin'})\n else\n @user = User.new(params[:user])\n if @user.save\n if params[:send_credentials]\n Notifications.admin_user_created(@user).deliver\n end\n flash[:notice] = t('Successfully signup admin user')\n redirect_to admin_dashboard_path\n end\n end\n rescue Errno::ECONNREFUSED => e\n flash[:error] = t(:signup_mail_delivery_error)\n redirect_to admin_dashboard_path\n end", "def create\n @user = User.new(params[:user])\n if @user.save\n # Handle a successful save.\n sign_in @user\n flash[:success] = \"Welcome to turftalk!\"\n redirect_to '/users'\n else\n @title = \"Sign up\"\n render 'new'\n end\n end", "def create\n @user = User.new(signup_params)\n if @user.save\n sign_in @user\n respond_to do |format|\n \tformat.json { render json: @user }\n end\n else\n # render :new\n end\n end", "def signup!(params)\n self.email = params[:user][:email]\n self.name = params[:user][:name]\n self.password = params[:user][:password]\n #save_without_session_maintenance\n end", "def signup\n\n\t\temail = params[:email] # Extract the email from the params of the signup form\n\t\ttimezone = params[:timezone] # Extract the timezone from the params of the signup form\n\n\t\t@url = uniqueUrlKeyGenerator # Generate a unique url key\n\t\told_user = User.find_by_email(email)\n\n\t\t# If user exists\n\t\tif !old_user.nil?\n\t\t # If user is not registered\n\t\t if !old_user.registered?\n\t\t # Send welcome email again and save him\n\t\t old_user.sendWelcomeEmail\n\t\t old_user.save\n\t\t end\n\t\tend\n\n\t\t# Find the user in the user db with the same email as extracted in the params\n\t\tcheck_users = User.find_by_email(email)\n\n\t\t#create a new PotentialUser object with the extarcted email, timezone and url key\n\t\tuser = User.new(email: email, url: @url, timezone: timezone, day: 1, registered: false)\n\n\t\t# If no such user exists\n\t\tif check_users.nil?\n\n\t\t#If the new user is valid and can be saved\n\t\t if user.save\n\t\t user.sendWelcomeEmail\n\t\t @title = \"Thank you for signing up\"\n\t\t @result = \"A confirmation email with instructions has been sent to you\"\n\t\t @result2 = \"Your unique access key is: \" + @url\n\n\t\t#If not valid\n\t\t else\n\t\t #Set @result as the error message\n\t\t @title = \"Looks like something went wrong ...\"\n\t\t @result = \"Email #{user.errors[:email][0]}.\".html_safe\n\t\t end\n\n\t\t#User by this email already exists\n\t\telse\n\n\t\t if !check_users.registered?\n\t\t\t # Result instance variable for the view\n\t\t\t @title = \"Looks like something went wrong ...\"\n\t\t\t @result = \"User by this email already exists, but we sent another confirmation email just in case\"\n\t\t\t else\n\t\t\t @title = \"Looks like something went wrong ...\"\n\t\t\t @result = \"User by this email already exists\"\n\t\t end\n\n\tend\n\n\t\t# Respond to only javascript, set for AJAX\n\t\trespond_to do |format|\n\t\t\tformat.js\n\t\tend\n\tend", "def sign_up(resource_name, resource)\r\n # Create an account for this user.\r\n account = Account.create(name: \"#{resource.email}'s account\")\r\n \r\n # Sign in the user (from the original Devise::RegistrationsController).\r\n sign_in(resource_name, resource)\r\n end", "def create\n @user = User.new(params[:user])\n if @user.save\n # we log him in and we redirect to root path\n log_user_in(@user)\n redirect_to root_path, :notice => \"You have successfully signed up.\"\n else\n flash.now[:alert] = \"Cannot sign you up. Sorry for that.\"\n render :new, :status => :unprocessable_entity\n end\n end", "def create\n @user = User.new(signup_params)\n if @user.save\n session[:user_email][email protected]\n render 'homepage'\n else\n render 'index'\n end\n\n end", "def attemp_signup\n\n end", "def create\n @user = User.new(sign_up_params)\n\t\[email protected] = 0\n respond_to do |format|\n if @user.save\n format.html { redirect_to url_for({controller: :users, action: :sign_in, id: 0}), notice: 'User was successfully created.' }\n format.json { render :show, status: :created, location: @user }\n else\n format.html { render :new }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end", "def signup!(user, prompt, &block)\n return save(true, &block) if openid_complete?\n return signup_as_invitee!(user, prompt, &block) if user and user[:invitation_id]\n signup_without_credentials!(user, &block)\n end", "def signup!(user, prompt, &block)\n return save(true, &block) if openid_complete?\n return signup_as_invitee!(user, prompt, &block) if user and user[:invitation_id]\n signup_without_credentials!(user, &block)\n end", "def sign_up_new_user\n user = User.make\n visit \"/\"\n find('#join_now_link').click\n click_link 'Sign up with Email'\n fill_in 'Full name', with: user.full_name\n fill_in 'Email', with: user.email\n find('#user_password').set user.password\n find('#user_password_confirmation').set user.password\n click_button 'Sign up'\n user\nend", "def signup\n @user= User.new\n end", "def signup\n return if generate_blank\n params[:user].delete('form')\n params[:user].delete('verified') # you CANNOT pass this as part of the request\n @user = User.new(params[:user])\n begin\n User.transaction(@user) do\n @user.new_password = true\n unless LoginEngine.config(:use_email_notification) and LoginEngine.config(:confirm_account)\n @user.verified = 1\n end\n if @user.save\n key = @user.generate_security_token\n url = url_for(:action => 'home', :user_id => @user.id, :key => key)\n flash[:notice] = 'Signup successful!'\n if LoginEngine.config(:use_email_notification) and LoginEngine.config(:confirm_account)\n UserNotify.deliver_signup(@user, params[:user][:password], url)\n flash[:notice] << ' Please check your registered email account to verify your account registration and continue with the login.'\n else\n flash[:notice] << ' Please log in.'\n end\n redirect_to :action => 'login'\n end\n end\n rescue Exception => e\n flash.now[:notice] = nil\n flash.now[:warning] = 'Error creating account: confirmation email not sent'\n logger.error \"Unable to send confirmation E-Mail:\"\n logger.error e\n end\n end", "def signup_with(&blk)\n @sign_up_with = blk if blk\n @sign_up_with\n end", "def sign_up(useremail, password)\n if new_user_available(useremail)\n query = \"INSERT INTO Users (useremail, password) VALUES('#{useremail}', '#{password}')\"\n @connection.exec(query)\n end\n end", "def create\n @user = User.new(user_params)\n if @user.save\n flash[:success] = \"Registration was successful!\"\n sign_in @user\n redirect_to root_path\n else\n render 'new'\n end\n end", "def signup\n return set_session_user_id!(nil, 'Введите данные.') { render :signup_new } if params_valid?(:signup)\n LdapService.mutex.synchronize do\n ldap_result = LdapService.new.add(\n email: params[:email],\n password: params[:password],\n name: params[:name],\n surname: params[:surname],\n role: 'painter'\n )\n return set_session_user_id!(nil, 'Невозможно зарегистрироваться.') { render :signup_new } if ldap_result.blank?\n user = User.find_by(ldap_id: ldap_result[:ldap_id])\n return set_session_user_id!(user.id, 'Вы вошли!') { redirect_to root_url } if user.present?\n user = User.new(ldap_result)\n return set_session_user_id!(user.id, 'Вы вошли!') { redirect_to root_url } if user.save\n set_session_user_id!(nil, 'Возникли проблемы. Попробуйте еще раз.') { render :signup_new }\n end\n end", "def create\n @user = user_from_params\n\n if @user.save\n sign_in @user\n redirect_to home_path, notice: 'Thanks for signing up!'\n\n\n else \n render template: 'users/new'\n end\n end", "def signup \n @page_title = 'Sign Up'\n return if generate_blank\n params[:user][:email].downcase!\n @user = User.new(params[:user]) \n if params[:account_type].nil?\n flash[:warning] = \"Please select a user type (student/renter/instructor).\"\n return\n end\n User.transaction(@user) do\n @user.new_password = true\n\n unless LoginEngine.config(:use_email_notification) and LoginEngine.config(:confirm_account)\n @user.email_verified = true\n end\n \n if @user.save\t\n @group = Group.find_by_group_name(params[:account_type]) \n @user.add_group @group\n key = @user.generate_security_token\n url = url_for(:action => 'home', :user_id => @user.id, :key => key)\n UserNotify.deliver_signup(@user, params[:user][:password], url)\n\n flash[:notice] = 'Signup successful! Please check your email at '\n flash[:notice] << @user.email + ' and confirm your membership before using the system.'\n @session[:user] = nil\n redirect_to '/'\n end\n end\n end", "def sign_up_with(**args)\n email = args.fetch(:email, '[email protected]')\n password = args.fetch(:password, '12345678')\n password_confirmation = args.fetch(:password_confirmation, '12345678')\n\n within '#new_user' do\n fill_in 'Email', with: email\n fill_in 'Password', with: password\n fill_in 'Password confirmation', with: password_confirmation\n click_on 'Sign up'\n end\n end", "def create\n @user = User.new(user_params)\n if @user.save\n sign_in @user, false\n redirect_to users_dash_path, notice: 'User was successfully registered'\n else\n flash[:errors] = @user.errors.full_messages\n redirect_to new_user_path\n end\n end", "def create\n # redirect user if already logged in\n if current_user\n redirect_to root_path\n else\n user = User.new(user_params)\n if user.save\n session[:user_id] = user.id\n flash[:notice] = \"Successfully signed up.\"\n redirect_to profile_path #root_path\n else\n flash[:error] = user.errors.full_messages.join(', ')\n redirect_to signup_path\n end\n end\n end", "def sign_up\n service_response = UserManagement::SignUp.new(\n params.merge({\n is_client_manager: 1,\n client_creation_needed: 1,\n browser_user_agent: http_user_agent\n })\n ).perform\n\n if service_response.success?\n # NOTE: delete cookie value from data\n cookie_value = service_response.data.delete(:cookie_value)\n set_cookie(\n GlobalConstant::Cookie.user_cookie_name,\n cookie_value,\n GlobalConstant::Cookie.user_expiry.from_now\n )\n end\n\n render_api_response(service_response)\n end", "def create\n\n if User.exists? email: params[:user][:email]\n user = User.find_by_email params[:user][:email]\n if user.invited_to_sign_up?\n Rails.logger.warn \"User #{user.email} was invited but instead of accepting the invitation is signing up normally. Destroying old user record before signup.\"\n user.destroy\n end\n end\n\n super\n end", "def sign_in_user(user)\n visit new_user_session_path\n fill_in 'Email', with: user.email\n fill_in 'Password', with: user.password\n click_button 'Sign in'\n user\n end", "def create\n @user = User.new(user_params)\n # If the User instance was successfully saved then redirect to home page and display notice\n if @user.save\n # Set authentication token cookie so that the User is automatically signed in after signing up\n cookies[:authentication_token] = @user.:authentication_token\n redirect_to root_url, notice: \"Thank you for signing up.\"\n # If the User instance was not saved in the database then redisplay the sign up form\n else\n render :new\n end\n end", "def create\n user = build_resource\n\n if user.save\n set_flash_message :notice, :signed_up\n sign_in('user', user)\n respond_with user, :location => after_sign_up_path_for(user)\n else\n clean_up_passwords user\n respond_with user\n end\n end", "def create\n @user = User.new(params[:user])\n if @user.save\n redirect_to root_url, :notice => \"Signed up!\"\n else\n render \"new\"\n end\n end", "def sign_up\n self.sign_up_link\n CreateNewAccount.new @browser\n end", "def new\n if current_user\n flash[:notice] = 'you are already signed up'\n redirect_to home_path\n else\n @user = User.new\n render 'signup/step1'\n end\n end", "def success_signup\n redirect_to root_path if flash[:notice].nil? || flash[:notice] != t('devise.registrations.signed_up_but_inactive')\n end", "def allow_sign_up?\n @allow_sign_up\n end", "def after_sign_up_path_for(resource)\n redirect_to users_path\n end", "def auth_user\n redirect_to new_user_registration_url unless user_signed_in?\n end", "def create\n\t\t@user = User.new(user_params)\n\t\tif @user.save\n\t\t\tredirect_to sign_in_path\n\t\telse\n\t\t\tredirect_to sign_up_path\n\t\tend\n\tend" ]
[ "0.74822444", "0.7396724", "0.72480386", "0.711055", "0.71018165", "0.70786214", "0.70786214", "0.7010431", "0.6943396", "0.69256973", "0.6915402", "0.6913177", "0.6904785", "0.6899749", "0.68899375", "0.6874407", "0.68355083", "0.6826637", "0.68244773", "0.68235505", "0.68125176", "0.68125176", "0.68024117", "0.68024117", "0.67942816", "0.67942816", "0.67942816", "0.67942816", "0.67942816", "0.67942816", "0.67942816", "0.67942816", "0.67942816", "0.67942816", "0.67942816", "0.67942816", "0.67942816", "0.67942816", "0.67942816", "0.67942816", "0.67942816", "0.67942816", "0.67942816", "0.67849267", "0.67762816", "0.6770096", "0.6765143", "0.6760831", "0.67482364", "0.67442214", "0.6740251", "0.6740077", "0.67332906", "0.6698045", "0.6677514", "0.6677514", "0.6672213", "0.6671869", "0.6671492", "0.6650561", "0.6639799", "0.66381633", "0.6617336", "0.6616168", "0.6602805", "0.66011864", "0.6598055", "0.6582423", "0.65763646", "0.6565099", "0.654014", "0.6534684", "0.65117085", "0.65113574", "0.65113574", "0.6496893", "0.6496831", "0.6491598", "0.64850825", "0.6469134", "0.6466227", "0.6443841", "0.6442101", "0.64417547", "0.6432554", "0.64309686", "0.6413432", "0.64102316", "0.64038754", "0.6401694", "0.6384122", "0.6367097", "0.63503885", "0.6338302", "0.6326585", "0.63144714", "0.62975883", "0.6286316", "0.6267998", "0.6267734" ]
0.6896803
14
The path used after sign up. You need to overwrite this method in your own RegistrationsController.
def after_sign_up_path_for(resource) after_sign_in_path_for(resource) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def after_sign_up_path_for(resource)\n after_register_path\n end", "def after_sign_up_path_for(resource)\n\n end", "def after_sign_up_path_for(_resource)\n edit_user_registration_path\n end", "def after_sign_up_path_for(resource)\n super\n end", "def after_sign_up_path_for(resource)\n complete_user_registration_path\n end", "def after_sign_up_path_for(_resource)\n return new_user_session_path\n end", "def after_sign_up_path_for(resource)\n \"/\"\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n edit_user_registration_path\n end", "def after_sign_up_path_for(resource)\n edit_registration_path(resource)\n end", "def after_sign_up_path_for(resource)\n edit_registration_path(resource)\n end", "def after_sign_up_path_for(resource)\n # super(resource)\n account_path(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n users_path\n end", "def after_sign_up_path_for(resource)\n users_path\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n user_path(resource)\n end", "def after_sign_up_path_for(resource)\n user_path(resource)\n end", "def after_sign_up_path_for(_resource)\n root_path\n end", "def after_sign_up_path_for(resource)\n profile_path\n end", "def after_sign_up_path_for(resource)\n user_path(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n after_signup_path\n end", "def after_sign_up_path_for(resource)\n :new_profile # Or :prefix_to_your_route\n end", "def after_sign_up_path_for(resource)\n new_user_session_path\n end", "def after_sign_up_path_for(resource)\n :pre_created\n end", "def after_sign_up_path_for(resource)\n '/carrier/sign_up_complete'\n end", "def after_sign_up_path_for(resource)\n super(resource)\nend", "def after_sign_up_path_for(resource)\n after_sign_in_path_for(resource)\n end", "def after_sign_up_path_for(resource)\n after_sign_in_path_for(resource)\n end", "def after_sign_up_path_for(resource)\n after_sign_in_path_for(resource)\n end", "def after_sign_up_path_for(resource)\r\n root_path\r\n end", "def after_sign_up_path_for(resource)\n home_user_path(resource)\n end", "def after_sign_up_path_for(resource)\n user_path(current_user)\n end", "def after_inactive_sign_up_path_for(resource)\n \"/users/sign_up\"\n end", "def after_sign_up_path_for(resource)\n root_path\n end", "def after_sign_up_path_for(resource)\n root_path\n end", "def after_sign_up_path_for(resource)\n root_path\n end", "def after_sign_up_path_for(resource)\n root_path\n end", "def after_sign_up_path_for(resource)\n :new_reparation\nend", "def after_sign_up_path_for(resource)\n stored_location_for(resource) || edit_user_registration_path\n end", "def after_sign_up_path_for(resource)\n after_sign_in_path_for(resource)\n end", "def after_sign_up_path_for(resource)\n\t root_path + \"home\"\n end", "def after_sign_up_path_for(user)\n return root_url\n end", "def after_inactive_sign_up_path_for(resource)\n after_sign_up_path_for(resource)\n end", "def after_sign_up_path_for(user)\n after_sign_in_path_for(user)\n end", "def after_sign_up_path_for(resource)\n thank_you_path\n end", "def after_sign_up_path_for(resource)\n \t\tsign_in(resource)\n \t'/profiles/new' # Or :prefix_to_your_route\n \tend", "def after_sign_up_path_for(resource)\n usermgmt_path\n end", "def after_sign_up_path_for(resource)\n welcome_path\n end", "def after_sign_up_path_for(resource)\n new_location_path\n end", "def after_sign_up_path_for(resource_or_scope)\n new_user_session_path\n end", "def after_sign_up_path_for(resource)\n edit_user_path(resource.id)\n end", "def after_sign_up_path_for(resource)\n # binding.irb\n super\n # super(resource) do\n edit_user_registration_path\n # end\n # binding.irb\n # if resource[:role] == 'guest'\n # new_guest_path\n # else\n # new_host_path\n # end\n end", "def after_inactive_sign_up_path_for(resource)\n signup_success_path\n end", "def after_inactive_sign_up_path_for(resource)\n signup_success_path\n end", "def after_inactive_sign_up_path_for(resource)\n \t new_user_registration_path\n end", "def after_sign_up_path_for(resource)\n after_sign_in_path_for(resource) if is_navigational_format?\n end", "def after_sign_up_path_for(resource)\n after_sign_in_path_for(resource) if is_navigational_format?\n end", "def after_inactive_sign_up_path_for(resource)\n super\n end", "def after_sign_up_path_for(resource)\n get_started_path\n end", "def after_sign_up_path_for(resource)\n after_sign_in_path_for(resource)\n root_path\n end", "def after_inactive_sign_up_path_for(resource)\n root_path\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end" ]
[ "0.828119", "0.8156382", "0.81545085", "0.81270283", "0.8126006", "0.8125953", "0.8093928", "0.8058524", "0.8058524", "0.8058524", "0.8058524", "0.8058524", "0.8058524", "0.8058524", "0.8058524", "0.8058524", "0.8058524", "0.8058524", "0.8058524", "0.8058524", "0.8058524", "0.8058524", "0.8058524", "0.8033793", "0.8033793", "0.8033793", "0.8031046", "0.802629", "0.802629", "0.8010102", "0.7994289", "0.7994289", "0.79909474", "0.79909474", "0.79744273", "0.79698116", "0.79698116", "0.7967307", "0.7964086", "0.79561836", "0.793659", "0.7911753", "0.7904698", "0.78764486", "0.78764147", "0.7868138", "0.7852225", "0.7852225", "0.7852225", "0.7850194", "0.78464645", "0.7844841", "0.784444", "0.7789737", "0.7789737", "0.7789737", "0.7789737", "0.77715325", "0.77646327", "0.7749588", "0.77443624", "0.77382183", "0.77172405", "0.7698204", "0.7693377", "0.7685734", "0.76652586", "0.76639634", "0.7656845", "0.7651925", "0.76510346", "0.76469624", "0.7642663", "0.7642663", "0.7637262", "0.76368266", "0.76368266", "0.7636795", "0.7630875", "0.7629819", "0.7601866", "0.7587784", "0.7587784", "0.7587784", "0.7587784", "0.7587784", "0.7587784", "0.7587784", "0.7587784" ]
0.78639686
56
The path used after sign up for inactive accounts. You need to overwrite this method in your own RegistrationsController.
def after_inactive_sign_up_path_for(resource) #scope = Devise::Mapping.find_scope!(resource) #router_name = Devise.mappings[scope].router_name #context = router_name ? send(router_name) : self #context.respond_to?(:root_path) ? context.root_path : "/" after_sign_in_path_for(resource) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def after_inactive_sign_up_path_for(resource)\n after_signup_path\n end", "def after_inactive_sign_up_path_for(resource)\n \"/users/sign_up\"\n end", "def after_inactive_sign_up_path_for(resource)\n super\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n after_sign_up_path_for(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n \t new_user_registration_path\n end", "def after_inactive_sign_up_path_for(resource)\n signup_success_path\n end", "def after_inactive_sign_up_path_for(resource)\n signup_success_path\n end", "def after_inactive_sign_up_path_for(_resource)\n new_user_session_path\n end", "def after_inactive_sign_up_path_for(resource)\n root_path\n end", "def after_inactive_sign_up_path_for(resource)\n root_path\n end", "def after_inactive_sign_up_path_for(resource)\n root_path\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\nend", "def after_inactive_sign_up_path_for(resource)\n super(resource)\nend", "def after_inactive_sign_up_path_for(resource)\n home_user_path(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n '/app/users/registration/confirm'\n end", "def after_inactive_sign_up_path_for(resource)\n new_user_session_url\n end", "def after_inactive_sign_up_path_for(resource)\n respond_to?(:root_path) ? root_path : \"/\"\n end", "def after_inactive_sign_up_path_for(resource)\n respond_to?(:root_path) ? root_path : \"/\"\n end", "def after_inactive_sign_up_path_for(resource)\n respond_to?(:root_path) ? root_path : \"/\"\n end", "def after_inactive_sign_up_path_for(resource)\n respond_to?(:root_path) ? root_path : \"/\"\n end", "def after_inactive_sign_up_path_for(resource)\n respond_to?(:root_path) ? root_path : \"/\"\n end", "def after_inactive_sign_up_path_for(resource)\n #logger.debug(\"after_inactive_sign_up_path_for\")\n new_usuario_session_path\n end", "def after_inactive_sign_up_path_for(resource)\n respond_to?(:root_path) ? root_path : \"/\"\n end", "def after_inactive_sign_up_path_for(_resource)\n consultant_welcome_path\n end", "def after_inactive_sign_up_path_for(resource)\n thank_you_path\n end", "def after_inactive_sign_up_path_for(resource)\n \"/allusers\"\n end", "def after_inactive_sign_up_path_for(resource)\n new_user_session_path\n end", "def after_inactive_sign_up_path_for(resource)\n new_user_session_path\n end", "def after_inactive_sign_up_path_for(resource)\n \tputs \"---------------------------------------------------------------\"\n birth_plans_path\n end", "def after_inactive_sign_up_path_for(resource)\n '/public/index'\n end", "def after_inactive_sign_up_path_for(resource, account=nil)\n if account.nil?\n super(resource)\n else\n signin_url(subdomain: account.subdomain)\n end\n end", "def after_inactive_sign_up_path_for(resource)\n rent_path\n end", "def after_inactive_sign_up_path_for(_resource)\n users_confirmations_pending_path\n end", "def after_inactive_sign_up_path_for(_resource)\n users_confirmations_pending_path\n end", "def after_inactive_sign_up_path_for(resource)\n new_owner_session_path\n end", "def after_inactive_sign_up_path_for(resource)\n company_path(current_user.company_id )\n end", "def after_inactive_sign_up_path_for(resource)\n users_sign_up_email_notice_path(email: resource.email)\n end", "def after_inactive_sign_up_path_for(resource)\n new_session_path(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n localized_root_path\n end", "def after_sign_up_path_for(resource)\n # super(resource)\n account_path(resource)\n end", "def after_inactive_sign_up_path_for(resource)\n super(resource)\n # users_popups_email_verification_path\n end", "def after_inactive_sign_up_path_for(resource)\n redirect_to users_path\n end", "def after_inactive_sign_up_path_for(resource)\n # super(resource)\n session[THANKS_KEY] = { email: resource[:email] }\n if resource.individual_use == true\n users_thanks_personal_path\n else\n users_thanks_company_path\n end\n end", "def after_sign_up_path_for(_resource)\n return new_user_session_path\n end", "def after_inactive_sign_up_path_for(_resource)\n new_participant_session_path\n end", "def after_inactive_sign_up_path_for(resource)\n scope = Devise::Mapping.find_scope!(resource)\n router_name = Devise.mappings[scope].router_name\n context = router_name ? send(router_name) : self\n context.respond_to?(:root_path) ? context.root_path : \"/\"\n # context.respond_to?(:index) ? context.index : \"/users\"\n end", "def after_inactive_sign_up_path_for(resource)\n user_steps_path\n end", "def after_inactive_sign_up_path_for(resource)\n scope = Devise::Mapping.find_scope!(resource)\n router_name = Devise.mappings[scope].router_name\n context = router_name ? send(router_name) : self\n context.respond_to?(:root_path) ? context.root_path : \"/\"\n end", "def after_inactive_sign_up_path_for(resource)\n scope = Devise::Mapping.find_scope!(resource)\n router_name = Devise.mappings[scope].router_name\n context = router_name ? send(router_name) : self\n context.respond_to?(:root_path) ? context.root_path : \"/\"\n end", "def after_inactive_sign_up_path_for(resource)\n scope = Devise::Mapping.find_scope!(resource)\n router_name = Devise.mappings[scope].router_name\n context = router_name ? send(router_name) : self\n context.respond_to?(:root_path) ? context.root_path : \"/\"\n end", "def after_inactive_sign_up_path_for(resource)\n scope = Devise::Mapping.find_scope!(resource)\n router_name = Devise.mappings[scope].router_name\n context = router_name ? send(router_name) : self\n context.respond_to?(:root_path) ? context.root_path : \"/\"\n end", "def after_inactive_sign_up_path_for(resource)\n scope = Devise::Mapping.find_scope!(resource)\n router_name = Devise.mappings[scope].router_name\n context = router_name ? send(router_name) : self\n context.respond_to?(:root_path) ? context.root_path : \"/\"\n end", "def after_inactive_sign_up_path_for(resource)\n scope = Devise::Mapping.find_scope!(resource)\n router_name = Devise.mappings[scope].router_name\n context = router_name ? send(router_name) : self\n context.respond_to?(:root_path) ? context.root_path : \"/\"\n end", "def after_inactive_sign_up_path_for(resource)\n scope = Devise::Mapping.find_scope!(resource)\n router_name = Devise.mappings[scope].router_name\n context = router_name ? send(router_name) : self\n context.respond_to?(:root_path) ? context.root_path : '/'\n end", "def after_inactive_sign_up_path_for(resource)\n new_political_party_path\n end", "def after_sign_up_path_for(resource)\n \"/\"\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super\n end", "def after_sign_up_path_for(resource)\n after_register_path\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n super(resource)\n end", "def after_sign_up_path_for(resource)\n complete_user_registration_path\n end", "def after_sign_up_path_for(resource, account=nil)\n if account.nil?\n super(resource)\n else\n signin_url(subdomain: account.subdomain)\n end\n\n end", "def after_inactive_sign_up_path_for(resource)\n Rails.logger.debug(\"controller Clients::RegistrationsController.after_inactive_sign_up_path_for\")\n patch = workflow_link(@client_wf)\n patch || super(resource)\n end" ]
[ "0.8187507", "0.81848", "0.81760675", "0.8153954", "0.8153954", "0.8153954", "0.8153954", "0.8153954", "0.8153954", "0.8153954", "0.8153954", "0.8153954", "0.8153954", "0.8153954", "0.8153954", "0.8153954", "0.8153954", "0.8153954", "0.814797", "0.814797", "0.80890775", "0.8049293", "0.8049293", "0.8049293", "0.803011", "0.80289674", "0.80289674", "0.80083954", "0.7999278", "0.7943053", "0.7943053", "0.79406214", "0.79406214", "0.79056126", "0.7888468", "0.78871614", "0.7882286", "0.7882286", "0.7882286", "0.7882286", "0.7882286", "0.78810126", "0.7873435", "0.787105", "0.7865514", "0.7861306", "0.7854481", "0.7854481", "0.78446656", "0.7825912", "0.7796437", "0.7782965", "0.77744347", "0.77744347", "0.7757861", "0.77390265", "0.7736396", "0.7734639", "0.7710782", "0.7705746", "0.7680377", "0.76776665", "0.764033", "0.7619823", "0.7603593", "0.75627166", "0.75252163", "0.7513783", "0.7513783", "0.7513783", "0.7513783", "0.7513783", "0.7513783", "0.7493755", "0.7472782", "0.7445859", "0.7432913", "0.7432913", "0.7432913", "0.7398972", "0.7396837", "0.7396449", "0.7396449", "0.7396449", "0.7396449", "0.7396449", "0.7396449", "0.7396449", "0.7396449", "0.7396449", "0.7396449", "0.7396449", "0.7396449", "0.7396449", "0.7396449", "0.7396449", "0.7396449", "0.73956835", "0.7378444", "0.7344754" ]
0.7571099
65