query
stringlengths
7
9.55k
document
stringlengths
10
363k
metadata
dict
negatives
listlengths
0
101
negative_scores
listlengths
0
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
GET /customer_users/1 GET /customer_users/1.json
def show end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def index\n @user = current_user\n @customers = @user.customers\n\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @customers }\n end\n end", "def index\n @customers = @user.customers.all\n render json: @customers\n end", "def list_users_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: CustomerApi.list_users ...'\n end\n # unbox the parameters from the hash\n customer_id = opts[:'customer_id']\n # verify the required parameter 'customer_id' is set\n if @api_client.config.client_side_validation && customer_id.nil?\n fail ArgumentError, \"Missing the required parameter 'customer_id' when calling CustomerApi.list_users\"\n end\n # resource path\n local_var_path = '/customer/{customer_id}/users'.sub('{' + 'customer_id' + '}', CGI.escape(customer_id.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'Array<SchemasUserResponse>'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['token']\n\n new_options = opts.merge(\n :operation => :\"CustomerApi.list_users\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: CustomerApi#list_users\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def get_shopify_user(customer_id, email)\n conn = shopify_connection()\n\n response = conn.get do |req|\n req.url \"/admin/customers/#{customer_id}.json\"\n end\n customer = JSON.parse(response.body)['customer']\n customer_match = customer.nil? ? false : customer['email'] == email\n { 'customer' => customer, 'match' => customer_match }\nend", "def get_shopify_user(customer_id, email)\n conn = shopify_connection()\n\n response = conn.get do |req|\n req.url \"/admin/customers/#{customer_id}.json\"\n end\n customer = JSON.parse(response.body)['customer']\n customer_match = customer.nil? ? false : customer['email'] == email\n { 'customer' => customer, 'match' => customer_match }\nend", "def index\n if params[:single]\n\t url = \"#{API_BASE_URL}/users/#{params[:id]}.json\"\n\t response = RestClient.get(url)\n\t @user = JSON.parse(response.body)\n\telse\n\t url = \"#{API_BASE_URL}/users.json\"\t \n response = RestClient.get(url)\n @users = JSON.parse(response.body)\t\t \n\tend\n end", "def index\n @users = User.fetch_all_customers\n end", "def index\n\t\t@customer_users = CustomerUser.all\n\tend", "def fetch_one_user_data\n get_url(\"/api/v1/users/#{@filter}\")\n end", "def customer(customer_id)\n client.get \"customers/#{inst_id}/#{customer_id}\"\n end", "def index\n if current_user.role == 'customer_admin'\n render json: current_user.customer and return\n else\n authorize! :read_all, Customer\n customers = Customer.order('name asc')\n render json: customers and return\n end\n end", "def GetUser id\n\n APICall(path: \"users/#{id}.json\")\n\n end", "def get_customer(id)\n get(\"customers/#{id}\")\n end", "def get_customer(id)\n get(\"customers/#{id}\")\n end", "def get_customer_profile\n authenticate_request!\n json_response(current_customer)\n end", "def show\n begin\n user = User.find(params[:user_id])\n render json: { users: user }, status: :ok\n rescue => e\n render json: { errors: e.message}, status: 404\n end\n end", "def customer(options = nil)\n request = Request.new(@client)\n path = \"/authorization-requests/\" + CGI.escape(@id) + \"/customers\"\n data = {\n\n }\n\n response = Response.new(request.get(path, data, options))\n return_values = Array.new\n \n body = response.body\n body = body[\"customer\"]\n customer = Customer(self._client)\n return_values.push(customer.fill_with_data(body))\n\n \n return_values[0]\n end", "def customer(customer_id)\n perform_get_request(\"/customer/#{customer_id}\")\n end", "def users(args = {})\n get(\"/users.json\",args)\n end", "def customer_list\n perform_get_request('/customer/list')\n end", "def account_user_with_http_info(customeruuid, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: CustomersApi.account_user ...'\n end\n # verify the required parameter 'customeruuid' is set\n if @api_client.config.client_side_validation && customeruuid.nil?\n fail ArgumentError, \"Missing the required parameter 'customeruuid' when calling CustomersApi.account_user\"\n end\n # resource path\n local_var_path = '/1.0.0/account/{customeruuid}/user'.sub('{' + 'customeruuid' + '}', customeruuid.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['oAuth2']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'Array<User>')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: CustomersApi#account_user\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def GetUsers params = {}\n\n params = params.merge(path: 'users.json')\n APICall(params)\n\n end", "def show\n @users = User.all\n json_response(@users)\n end", "def get \n render :json => User.find(params[:id])\n end", "def show\n @user = User.find(params[:id])\n render json: {\n username: @user.username,\n first_name: @user.first_name,\n last_name: @user.last_name,\n email: @user.email,\n phone_number: @user.phone_number,\n contacts: @user.contacts\n }, status: :ok\n end", "def index\n @customers = Customer.where(user_id: current_user.id)\n end", "def users(params = {})\n make_get_request('/account/users', params)\n end", "def users\n get('get_users')\n end", "def list_users_for_all_tenants(args = {}) \n get(\"/users.json/global\", args)\nend", "def list_users_for_all_tenants(args = {}) \n get(\"/users.json/global\", args)\nend", "def index\n @customers = Customer.where(:company_id => current_user.company.id).paginate(:page => params[:page])\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @customers }\n end\n end", "def get_user_details\n @user = User.find_by_id(params[:user_id])\n render json: @user\n end", "def index\n @customers = User.where(:authority => 'customer')\n end", "def index\n\t\t# specifying json format in the URl\n\t uri = \"#{API_BASE_URL}/users.json\"\n\t # It will create new rest-client resource so that we can call different methods of it\n\t rest_resource = RestClient::Resource.new(uri, USERNAME, PASSWORD)\n\n\t # this next line will give you back all the details in json format, \n\t #but it will be wrapped as a string, so we will parse it in the next step.\n\t users = rest_resource.get \n\n\t # we will convert the return data into an array of hash. see json data parsing here\n\t @users = JSON.parse(users, :symbolize_names => true)\n\tend", "def list_users\n self.class.get('/users')\n end", "def show\n @users = User.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @users }\n end\n end", "def index\n users = User.all\n\n render json: users, each_serializer: Api::V1::UsersSerializer\n end", "def index\n @myusers = Myuser.all\n\n render json: @myusers\n end", "def show\n user = User.select(:id, :username, :email).find(params[:id])\n render :json => user\n end", "def index\n \tcustomers = Customer.all\n \trender json: customers\n \tend", "def user\n render :json=> User.find(params[:id])\n end", "def customers\n ShopifyAPI::Customer.all\n end", "def index\n #@users = User.all\n @users = User.where(tenant_id: current_tenant.id)\n render json: {\n message: 'Your All Users',\n user: @users\n }\n end", "def show\n render json: Users.find(params[\"id\"])\n end", "def get_users(request); end", "def index\n @users = User.order_by(last_name: :desc)\n if @users\n render json: Oj.dump(json_for(@users, include: ['phones', 'cards'], meta: meta), mode: :compat)\n else\n return head :unauthorized\n end\n end", "def index\n if !session[:user_id]\n redirect_to customer_path(session[:customer_id]) , notice: 'Access Denied'\n return\n else\n @customers = Customer.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @customers }\n end\n end\n end", "def show\n # When a http GET request to '/users/1' is received, have it show,\n # in json format, user 1's information.\n @id = params[:id]\n @user = User.find(@id)\n render json: @user\n end", "def show\n user = User.find(params[:id])\n render json: user\n end", "def index\n users = User.all\n json_response(users)\n end", "def show\n user = User.find(params[:id])\n\n render json: user\n end", "def index\n users = User.all\n render json: { users: users }, status: :ok\n end", "def show\n user = User.find(params[:id])\n render json: user\n end", "def index\n #byebug\n #CHECK AUTHORIZATION HERE NOT JUST JUST AUTHENTICATION\n \n render json: {farmers: Farmer.all, customers: CustomerUser.all} #User.all #CustomerUser.all #{users: User.all, customers: Customer_User.all}\n end", "def show\n user = User.find(params[:id])\n render json: @user\nend", "def user(user_id)\n params = {\n :client_id => Swiftype.platform_client_id,\n :client_secret => Swiftype.platform_client_secret\n }\n get(\"users/#{user_id}.json\", params)\n end", "def index\n json_response(User.all) \n end", "def query_users(options={}) path = \"/api/v2/users\"\n get(path, options, AvaTax::VERSION) end", "def show\n user = User.friendly.find(params[:user_id]) \n render json: user\n end", "def show\n @user = User.where('email = ?',params[:email]).take\n render json: @user\n end", "def show\n @customer = Customer.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @customer }\n end\n end", "def show\n @customer = Customer.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @customer }\n end\n end", "def show\n @customer = Customer.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @customer }\n end\n end", "def show\n @customer = Customer.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @customer }\n end\n end", "def show\n @customer = Customer.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @customer }\n end\n end", "def show\n @customer = Customer.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @customer }\n end\n end", "def show\n @user = User.find(params[:id])\n render json: @user\n end", "def show\n @user = User.find(params[:id])\n render json: @user\n end", "def show\n @user = User.find(params[:id])\n render json: @user\n end", "def show\n @user = User.find(params[:id])\n render json: @user\n end", "def show\n @user = User.find(params[:id])\n render json: @user\n end", "def show\n @user = User.find(params[:id])\n render json: @user\n end", "def show\n @user = User.find(params[:id])\n render json: @user\n end", "def index\r\n users = User.all\r\n render json: users\r\n end", "def show\n @user = User.find(params[:id])\n\n render json: @user\n end", "def show\n @user = User.find(params[:id])\n\n render json: @user\n end", "def show\n @user = User.find(params[:id])\n\n render json: @user\n end", "def show\n @user = User.find(params[:id])\n\n render json: @user\n end", "def show\n @user = User.find(params[:id])\n\n render json: @user\n end", "def show\n @user = User.find(params[:id])\n\n render json: @user\n end", "def show\n @user = User.find(params[:id])\n\n render json: @user\n end", "def show\n @user = User.find(params[:id])\n\n render json: @user\n end", "def index\n users = User.all\n render json: users\n end", "def index\n users = User.all\n render json: users\n end", "def index\n users = User.all\n render json: users\n end", "def index\n users = User.all\n render json: users\n end", "def index\n @users = User.all\n render json: @users\n end", "def index\n @users = User.all\n render json: @users\n end", "def index\n @users = User.all\n render json: @users\n end", "def index\n @users = User.all\n render json: @users\n end", "def index\n @users = User.all\n render json: @users\n end", "def index\n @users = User.all\n render json: @users\n end", "def index\n @users = User.all\n render json: @users\n end", "def index\n @users = User.all\n render json: @users\n end", "def index\n @users = User.all\n render json: @users\n end", "def index\n @users = User.all\n render json: @users\n end", "def index\n @users = User.all\n\n render json: @users\n end", "def index\n @users = User.all\n\n render json: @users\n end", "def index\n @users = User.all\n\n render json: @users\n end", "def index\n @users = User.all\n\n render json: @users\n end", "def index\n @users = User.all\n\n render json: @users\n end" ]
[ "0.7416016", "0.74013877", "0.6927694", "0.69048446", "0.69048446", "0.68475395", "0.68373173", "0.6823955", "0.68135196", "0.67877716", "0.67836523", "0.6776003", "0.6753472", "0.6753472", "0.67411846", "0.6731843", "0.67249376", "0.668473", "0.66769946", "0.66676176", "0.6648374", "0.6623787", "0.6599894", "0.65974617", "0.6587802", "0.6565881", "0.6552212", "0.6533606", "0.6528509", "0.6528509", "0.65233976", "0.65201694", "0.6517388", "0.6515681", "0.6496481", "0.64939255", "0.6475359", "0.6456191", "0.6442436", "0.6437104", "0.6424852", "0.6421046", "0.64020175", "0.6401863", "0.63983935", "0.639417", "0.6389", "0.63827026", "0.6377415", "0.63722086", "0.6364596", "0.63639176", "0.63636065", "0.6345531", "0.6344747", "0.6343905", "0.6343452", "0.6342976", "0.632996", "0.63273424", "0.6327139", "0.6327139", "0.6327139", "0.6327139", "0.6327139", "0.6327139", "0.6326376", "0.6326376", "0.6326376", "0.6326376", "0.6326376", "0.6326376", "0.6326376", "0.6323333", "0.6323296", "0.6323296", "0.6323296", "0.6323296", "0.6323296", "0.6323296", "0.6323296", "0.6323296", "0.6313956", "0.6313956", "0.6313956", "0.6313956", "0.63112885", "0.63112885", "0.63112885", "0.63112885", "0.63112885", "0.63112885", "0.63112885", "0.63112885", "0.63112885", "0.63112885", "0.6310655", "0.6310655", "0.6310655", "0.6310655", "0.6310655" ]
0.0
-1
POST /customer_users POST /customer_users.json
def create @customer_user = CustomerUser.new(customer_user_params) respond_to do |format| if @customer_user.save format.html { redirect_to login_url, alert: "Customer user #{@customer_user.name} was successfully created." } format.json { render action: 'show', status: :created, location: @customer_user } else format.html { render action: 'new' } format.json { render json: @customer_user.errors, status: :unprocessable_entity } end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create\n @customer = @user.customers.build(customer_params)\n if @customer.save\n render json: @customer, status: :created\n else\n render json: @customer.errors, status: :unprocessable_entity\n end\n\n end", "def create\n \n @customer = Customer.new(customer_params)\n @user = current_user\n respond_to do |format|\n if @customer.save\n @customer.update!(user_id: @user.id)\n format.html { redirect_to customers_path, notice: 'Customer was successfully created.' }\n format.json { render :show, status: :created, location: @customer }\n else\n format.html { render :new }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @customer = User.new(customer_params)\n @customer.authority = 'customer'\n respond_to do |format|\n if @customer.save\n if !current_user\n log_in @customer, :customer\n end\n format.html {redirect_to customer_path(@customer), notice: 'Customer was successfully created.'}\n format.json {render :show, status: :created, location: @customer}\n else\n format.html {render :new}\n format.json {render json: @customer.errors, status: :unprocessable_entity}\n end\n end\n end", "def create\n #@customer = Customer.new(params[:customer].merge(:user_id => current_user.id))\n @customer = Customer.new(params[:customer])\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to customers_path, notice: 'Customer was successfully created.' }\n format.json { render json: @customer, status: :created, location: @customer }\n else\n format.html { render action: \"new\" }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def post_users(users)\n self.class.post('https://api.yesgraph.com/v0/users', {\n :body => users.to_json,\n :headers => @options,\n })\n end", "def create\n authorize! :create, Customer\n @customer = Customer.new(customer_params)\n @customer.customer_admin = current_user\n\n if @customer.save\n render json: nil, status: :created\n else\n render json: @customer.errors.full_messages, status: :unprocessable_entity\n end\n end", "def create\r\n @customer = Customer.new(params[:customer])\r\n\r\n respond_to do |format|\r\n if @customer.save\r\n format.html { redirect_to '/auth/identity/register', :notice => 'Customer was successfully created.' }\r\n format.json { render :json => @customer, :status => :created, :location => @customer }\r\n else\r\n format.html { render :action => \"new\" }\r\n format.json { render :json => @customer.errors, :status => :unprocessable_entity }\r\n end\r\n end\r\n end", "def create\n @user = current_user\n @customer = @user.customers.build(params[:customer])\n generated_password = Devise.friendly_token.first(6)\n\n @customer.password = generated_password\n @customer.password_visible = generated_password\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to user_customer_path(@user, @customer), notice: 'Customer was successfully created.' }\n format.json { render json: @customer, status: :created, location: @customer }\n else\n format.html { render action: \"new\" }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def CreateUser params = {}\n \n APICall(path: 'users.json',method: 'POST',payload: params.to_json)\n \n end", "def create_customer(customer_info, password)\n post_wrapper('/V1/customers',\n { customer: customer_info,\n password: password }.to_json,\n admin_headers)\n end", "def create\n @customer = Customer.new(customer_params)\n\n if @customer.save\n render json: @customer, status: :created, location: @customer\n else\n ap errors(@customer.errors)\n render json: errors(@customer.errors), status: :unprocessable_entity\n end\n end", "def send_customer(customer)\n request(customer, \"customer\", :post, {method: \"add\"})\n end", "def create\n @user = User.new(user_params)\n\n return render action: 'new' unless @user.save\n\n client = Dwolla.new\n\n first_name = @user.first_name\n last_name = @user.last_name\n email = @user.email\n type = \"personal\"\n address = @user.address\n city = @user.city\n state = @user.state\n postal_code = @user.postal_code\n date_of_birth = @user.date_of_birth\n last_four_of_ssn = @user.last_four_of_ssn\n\n client.create_customer(first_name, last_name, email, type, address, city, state, postal_code, date_of_birth, last_four_of_ssn)\n redirect_to user_path(@user), notice: 'Created user'\n end", "def create\n @customer_detail = CustomerDetail.new(customer_detail_params)\n # When we make a new CustomerDetail, also make a Customer to go with it, using the Email from CustomerDetail\n customer_info = customer_detail_params['customer_attributes']\n customer_info['email'] = customer_detail_params['email']\n @customer = Customer.create(customer_info)\n @customer_detail.customer = @customer\n\n respond_to do |format|\n if @customer_detail.save\n format.html { redirect_to wines_url,\n notice: \"User #{@customer_detail.email} was successfully created. Please proceed to Login.\" }\n format.json { render :show, status: :created, location: @customer_detail }\n else\n format.html { render :new }\n format.json { render json: @customer_detail.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @customer = Customer.new(customer_params)\n @customer.user = current_user\n authorize @customer, :create?\n\n respond_to do |format|\n if @customer.save\n @customer.works.create!(trackable_url: \"#{customer_path(@customer)}\", action: :create, user: current_user, \n parameters: @customer.to_json(except: {customer: [:fullname_and_address_and_pesel_nip_and_birth_date]}, \n include: { \n user: {\n only: [:id, :name, :email] } \n }))\n\n flash_message :success, t('activerecord.messages.successfull.created', data: @customer.fullname)\n format.html { redirect_to @customer }\n format.json { render :show, status: :created, location: @customer }\n else\n format.html { render :new }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @customer = Customer.new(customer_params)\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to @customer, notice: 'Customer was successfully created.' }\n format.json { render json: @customer, status: :created }\n else\n format.html { render :new }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_user\n params = {\n :client_id => Swiftype.platform_client_id,\n :client_secret => Swiftype.platform_client_secret\n }\n post(\"users.json\", params)\n end", "def create_customer(*args)\n options = args.last.is_a?(Hash) ? args.pop : {}\n response = post(\"customers\",options)\n if response['success']\n return response['results']['customer']\n else\n return response\n end\n end", "def customer_user_params\n\t\t\tparams.require(:customer_user).permit(:name, :password, :password_confirmation, :email)\n\t\tend", "def create_customer\n begin\n customer = Stripe::Customer.create\n # Create customer successful - return its id\n log_info(\"Customer created with ID #{customer[:id]}\")\n rescue Stripe::StripeError => e\n return_error 402, \"Error creating customer #{e.message}\"\n end\n status 201\n { customer_id: customer.id }.to_json\n end", "def create\n @customer = Customer.new(customer_params.map{|k,v| {k.to_sym => v.class == ActionController::Parameters ? [v.to_hash] : v.to_s}}.reduce({}, :merge))\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to @customer, notice: 'Customer was successfully created.' }\n format.json { render :show, status: :created, location: @customer }\n else\n format.html { render :new }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_customer(body:)\n new_api_call_builder\n .request(new_request_builder(HttpMethodEnum::POST,\n '/v2/customers',\n 'default')\n .header_param(new_parameter('application/json', key: 'Content-Type'))\n .body_param(new_parameter(body))\n .header_param(new_parameter('application/json', key: 'accept'))\n .body_serializer(proc do |param| param.to_json unless param.nil? end)\n .auth(Single.new('global')))\n .response(new_response_handler\n .deserializer(APIHelper.method(:json_deserialize))\n .is_api_response(true)\n .convertor(ApiResponse.method(:create)))\n .execute\n end", "def create\n @customer = Customer.new(customer_params)\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to customers_path, notice: 'Customer was successfully created.' }\n format.json { render :show, status: :created, location: @customer }\n else\n format.html { render :new }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def mf_api_manually_add_user\n\n # Create new User\n user = User.new\n\n # Populate User\n user.clientid = params[:client_id]\n user.email = params[:email]\n\n user.save\n\n response = {\n success: true,\n message: 'New User Created!'\n }\n\n render json: response\n\n end", "def create\n @customer = current_user.customers.new(params[:customer])\n\n respond_to do |format|\n if @customer.save\n format.xml { render :xml => @customer, :status => :created, :location => @customer }\n format.js\n else\n format.xml { render :xml => @customer.errors, :status => :unprocessable_entity }\n format.js\n end\n end\n end", "def create\n user = User.new(\n username: user_params[:username],\n password: user_params[:password])\n if user.save\n create_example_collection(user)\n render json: user, except: [:password_digest, :created_at, :updated_at]\n else\n render json: {errors: user.errors.full_messages}\n end\n end", "def create\n @user = @application.users.create(user_params)\n\n if @user.valid?\n render json: @user, status: :created, location: api_application_user_path(@application,@user)\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end", "def create\n @customer = Customer.new(customer_params)\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to @customer, notice: 'Customer was successfully added.' }\n format.json { render :show, status: :created, location: @customer }\n else\n format.html { render :index }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n user = User.create(user_params) \n render json: user, status: :created\n end", "def createCustomer(customer_properties)\n url_data = \"/api/customers/\"\n @json_customer_properties = customer_properties.to_json\n options = {\n :digest_auth => @auth_digest,\n :body => @json_customer_properties\n }\n url_request = \"#{url_data}\"\n postData(url_request, options)\n end", "def create\n\n @customer = Customer.new(customer_params)\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to @customer, notice: 'Customer was successfully created.' }\n format.json { render :show, status: :created, location: @customer }\n else\n format.html { render :new }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @user = current_user.users.build(user_params)\n\n if @user.save\n render json: @user\n else\n @user_items = []\n end\n end", "def create\n @customer = Customer.new(customer_params)\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to admin_customer_path(@customer), notice: 'Customer was successfully created.' }\n format.json { render :show, status: :created, location: @customer }\n else\n format.html { render :new }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @customer = Customer.new(customer_params)\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to @customer, notice: \"Customer was successfully created.\" }\n format.json { render :show, status: :created, location: @customer }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @customer = Customer.new(customer_params)\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to @customer, notice: 'Customer was successfully created.' }\n format.json { render :show, status: :created, location: @customer }\n else\n format.html { render :new }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @customer = Customer.new(customer_params)\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to @customer, notice: 'Customer was successfully created.' }\n format.json { render :show, status: :created, location: @customer }\n else\n format.html { render :new }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @customer = Customer.new(customer_params)\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to @customer, notice: 'Customer was successfully created.' }\n format.json { render :show, status: :created, location: @customer }\n else\n format.html { render :new }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @customer = Customer.new(customer_params)\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to @customer, notice: 'Customer was successfully created.' }\n format.json { render :show, status: :created, location: @customer }\n else\n format.html { render :new }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @customer = Customer.new(customer_params)\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to @customer, notice: 'Customer was successfully created.' }\n format.json { render :show, status: :created, location: @customer }\n else\n format.html { render :new }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @customer = Customer.new(customer_params)\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to @customer, notice: 'Customer was successfully created.' }\n format.json { render :show, status: :created, location: @customer }\n else\n format.html { render :new }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @user = User.new(user_params)\n\n if @user.save\n render json: @user, status: :created\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end", "def create\n @customer = Customer.new(customer_params)\n\n respond_to do |format|\n if @customer.save\n UserMailer.activation_email(@customer.user).deliver_now\n format.html { redirect_to home_path, notice: 'Customer was successfully created.' }\n format.json { render :show, status: :created, location: @customer }\n else\n format.html { render :new }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def customer_params\n params.require(:customer).permit(\n :first_name,\n :last_name,\n :email,\n :username\n )\n end", "def create\n @user = User.new(user_params)\n\n respond_to do |format|\n if @user.save\n format.html { redirect_to @user, notice: 'User was successfully created.' }\n format.json { render :show, status: :created, location: @user }\n else\n format.html { render :new }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n\n Stripe.api_key = ENV[\"STRIPE_SECRET_KEY\"]\n\n stripe_customer = Stripe::Customer.create(\n :email => @user.email,\n )\n @user.stripe_id = stripe_customer.id\n end", "def create\n @customer = Customer.new(customer_params)\n \n respond_to do |format|\n if @customer.save\n format.html { redirect_to @customer, notice: 'Customer was successfully created.' }\n format.json { render :show, status: :created, location: @customer }\n else\n format.html { render 'new' }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @user = current_user\n @customer = @user.customers.build\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @customer }\n end\n end", "def customer_params\n params.require(:customer).permit(:first_name, :last_name, :user_id)\n end", "def create\n @user = User.new(user_params)\n\n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end", "def create\n @user = User.new(user_params)\n\n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end", "def create\n @user = User.new(user_params)\n\n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end", "def create\n @customer = Customer.new(params[:customer])\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to action: \"index\", notice: 'Customer was successfully created.' }\n format.json { render json: @customer, status: :created, location: @customer }\n else\n format.html { render action: \"new\" }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create(payload)\n client.post(make_path('/customers'), payload)\n end", "def create\n @customer_customer = Customer::Customer.new(customer_customer_params)\n @customer_customer.admin = current_admin\n\n respond_to do |format|\n if @customer_customer.save\n format.html { redirect_to @customer_customer, notice: 'Customer was successfully created.' }\n format.json { render :show, status: :created, location: @customer_customer }\n else\n format.html { render :new }\n format.json { render json: @customer_customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @customer = Customer.new(params[:customer])\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to @customer, notice: 'Customer was successfully created.' }\n format.json { render json: @customer, status: :created, location: @customer }\n else\n format.html { render action: \"new\" }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @customer = Customer.new(params[:customer])\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to @customer, notice: 'Customer was successfully created.' }\n format.json { render json: @customer, status: :created, location: @customer }\n else\n format.html { render action: \"new\" }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def user_create(username, email, password, tenant_id)\n\t\n\t\tuser = {\"user\" => {\"name\" => username, \"email\" => email, \"enabled\" => true, \"password\" => password, \"tenantid\" => tenant_id}}\n\t\n\t\tjson_string = JSON.generate(user)\n\t\n\t\tpost_call = Curl::Easy.http_post(\"#{@ip_address}:#{@port_2}/v2.0/users\", json_string\n\t\t) do |curl|\n\t\t\tcurl.headers['x-auth-token'] = @token\n\t\t\tcurl.headers['Content-Type'] = 'application/json'\n\t\tend\n\t\t\t\t\t\t\t\t\t \n\t\tparsed_json = JSON.parse(post_call.body_str)\n\t\t\n\t\tputs parsed_json\n\t\treturn parsed_json\n\tend", "def create\n # whitelist params\n @customer = Customer.new(customer_params)\n respond_to do |format|\n if @customer.save\n format.html { redirect_to customers_path }\n format.json { render :show, status: :ok, location: @customer }\n format.json { render :json => @objects.map(&:attributes) }\n else\n format.html { render :new, warning: \"Customer record was NOT saved.\"}\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @user = User.new(user_params(params))\n \n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end", "def create_customer(options = {})\n perform_post_with_object('/customers', options, Epages::Customer)\n end", "def create\n @user = User.new(user_params(params))\n\n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end", "def create\n @user = User.new(user_params(params))\n\n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end", "def create\n @user = User.new(user_params)\n\n if @user.save\n render json: @user.as_json(only: [:email, :authentication_token]), status: :created\n else\n head(:unprocessable_entity)\n end\n end", "def create_user(params:)\n parse(JSON.parse(connection.post(\"users\", params.to_json).body))\n end", "def create\n @user = User.new user_params(params[:user])\n\n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end", "def create\n @user = User.new user_params(params[:user])\n\n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end", "def create\n @customer = Customer.new(customer_params)\n respond_to do |format|\n if @customer.save\n format.html { redirect_to @customer, notice: 'Customer was successfully created.' }\n format.json { render action: 'show', status: :created, location: @customer }\n else\n format.html { render action: 'new' }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @user = User.new(form_params)\n\n respond_to do |format|\n if @user.save\n format.json { render json: { users: @user }, status: :created }\n else\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n\t\t@user = User.new(users_params)\n\t\tif @user.save\n\t\t\tjson_response(@user, \"User is created Successfully.\")\n\t\telse\n\t\t\trender json: {message: @user.errors.full_messages.join(\" \")}, status: 400\n\t\tend\t\t\n\tend", "def index\n @customers = @user.customers.all\n render json: @customers\n end", "def create\n @customer = Customer.new(customer_params)\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to @customer, notice: 'Customer was successfully created.' }\n format.json { render action: 'show', status: :created, location: @customer }\n else\n format.html { render action: 'new' }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @customer = Customer.new(customer_params)\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to @customer, \n notice: \"Customer #{@customer.email} was successfully created.\" }\n format.json { render action: \"show\", status: :created, location: @customer }\n else\n format.html { render action: \"new\" }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n # render json: params\n render json: Users.create(params[\"user\"])\n end", "def post body=nil, headers={}\n @connection.post \"users.json\", body, headers\n end", "def create\n @customer = Customer.new(params[:customer])\n @user = User.find(session[:user_id])\n @customer.created_by = @user.name\n respond_to do |format|\n if @customer.save\n format.html { redirect_to(customers_path, :notice => 'Customer was successfully created.') }\n format.xml { render :xml => @customer, :status => :created, :location => @customer }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @customer.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n user= User.create(user_params)\n render json: user\n end", "def create_user\n @user = User.new(user_params)\n if @user.save\n render json: UserSerializer.new(@user).serialized_json\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end", "def create\n\n puts '-----------------------create in user controller'\n\n @user = User.new(user_params)\n\n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n\n end", "def create\n\t\t@user = User.new(user_params)\n\t\tif @user.save\n\t\t\trender json: @user, status: :created, location: @user\n\t\telse\n\t\t\trender json: @user.errors, status: :unprocessable_entity\n\t\tend\n\tend", "def create\n @customer = Customer.new(params[:customer])\n\n respond_to do |format|\n if @customer.save\n UserMailer.welcome_customer(@customer).deliver\n format.html { redirect_to @customer, notice: 'Customer was successfully created.' }\n format.json { render json: @customer, status: :created, location: @customer }\n else\n format.html { render action: \"new\" }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n user = User.create(user_params)\n if user.valid?\n render json: user\n else\n render json: user.errors, status: :unprocessable_entity\n end\n end", "def create\n user = User.new(user_params)\n if user.save\n render json: user\n else\n render json: {errors: \"Cannot create user\"}, :status => 420\n end\n end", "def create\n @user = User.new(user_params)\n if @user.save\n render json: { user: @user, success: 'User registration successful' }\n else\n render json: { error: 'User registration unsuccessful' }\n end\n end", "def create\n @user = User.new(user_params)\n \n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end", "def create\n @user = User.new(user_params)\n\n if @user.save\n render json: @user, status: :created\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end", "def create\n @user = User.create(:name => params[:name],:email => params[:email],:number => params[:phone],:password => params[:password],:customer_id =>params[:customer_id])\n #@user = User.create(user_params)\n\n \t#respond_with {:auth_token => @user.authentication_token}\n \trespond_with(@user.authentication_token, :location =>nil)\n \t\n # respond_to do |format|\n # if @user.save\n # # Tell the UserMailer to send a welcome Email after save\n # UserMailer.welcome_email(@user).deliver\n \n # format.html { redirect_to(@user, notice: 'User was successfully created.') }\n # format.json { render json: @user, status: :created, location: @user }\n # else\n # format.html { render action: 'new' }\n # format.json { render json: @user.errors, status: :unprocessable_entity }\n # end\n # end\n \n end", "def create\n @customer = Customer.new(customer_params)\n\n respond_to do |format|\n if @customer.save\n format.html { redirect_to @customer, notice: t(\"activerecord.controller_msg.customer.created\", :default => 'Customer was successfully created.') }\n format.json { render :show, status: :created, location: @customer }\n else\n format.html { render :new }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def customer_params\n customer_params = params.require(:customer).permit(:username, :password, :password_confirmation)\n end", "def create\n @user = User.new(user_params)\n\n respond_to do |format|\n if @user.save\n format.json { render json: @user }\n else\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_customer\n input_param = params[:user] || params[:customer]\n if input_param\n input_param.delete(:password)\n input_param.delete(:password_confirmation)\n input_param.delete(:username)\n if current_user.is_a?(Referral) && input_param[:profile_attributes].present?\n input_param[:referral_category_id] = current_user.referral_category_id\n input_param[:profile_attributes][:referal_id] = current_user.code\n input_param[:profile_attributes][:referal] = current_user.referral_category.name if current_user.referral_category\n end\n end\n\n @customer = Customer.find_or_initialize_by_email(input_param[:email])\n @membership_order = Membership.find(session[:current_premium_id]) if session[:current_premium_id]\n \n if @customer.update_attributes(input_param)\n order.orderable = @customer\n # CustomerMailer.delay.welcome_email(@customer)\n CustomerMailer.delay.welcome_email_admin(@customer)\n \n if order.save && session[:current_premium_id].present?\n redirect_to extra_manage_orders_path\n else\n redirect_to premium_manage_orders_path\n end\n else\n @customer = Customer.new(input_param)\n flash[:errors] = @customer.errors.full_messages.uniq.join(', ')\n prepare_customer_form\n render :new\n end\n end", "def create(options = {})\n request(:post, '/users.json', default_params(options))\n end", "def create\n @customer = Customer.new(params[:customer])\n @customer.company = Company.find(current_user.company.id)\n\n respond_to do |format|\n if @customer.save\n format.html {\n flash[:notice] = I18n.t(:successfully_created, :model_name => Customer.model_name.human)\n redirect_to action: 'index'\n }\n format.json {\n render :json => {\n :listPartial => render_to_string(\n 'customers/_list',\n :formats => [:html],\n :layout => false,\n :locals => {\n :customers => Customer.where(:company_id => current_user.company.id).paginate(:page => params[:page])\n }\n ),\n status: :created,\n location: @customer\n }\n }\n else\n format.html { render action: \"new\" }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def users_post\n params = Rack::Request.new(@env).POST\n phone = params['phone']\n key = params['key']\n\n if phone_valid?(phone) && uuid_valid?(key)\n # Validate picture_id, first_name, last_name, and email\n picture_id = params['picture_id']\n error = picture_id_invalid_response!(picture_id)\n return error if error\n\n first_name = params['first_name']\n error = name_invalid_response!('First', first_name)\n return error if error\n\n last_name = params['last_name']\n error = name_invalid_response!('Last', last_name)\n return error if error\n\n email = params['email']\n error = email_invalid_response!(email)\n return error if error\n\n $pg.with do |pg|\n pg.exec_params('SELECT * FROM users_post($1, $2, $3, $4, $5)', [phone, key, first_name, last_name, email]) do |r|\n if r.num_tuples == 1\n user_id = r.getvalue(0, 0)\n body = {access_token: build_access_token(user_id, key)}\n if picture_id\n fields = Aws::S3::Resource.new.bucket('acani-chats').presigned_post({\n acl: 'public-read',\n content_length_range: 0..3145728,\n content_type: 'image/jpeg',\n key: \"users/#{user_id}/#{picture_id}.jpg\"\n }).fields\n body[:fields] = fields\n end\n return [201, body.to_json]\n end\n end\n end\n end\n set_www_authenticate_header\n [401, '{\"message\":\"Incorrect phone or key.\"}']\n end", "def create_user(attributes)\n post(\"/v1/users\", attributes)\n end", "def customer_params\n params.require(:customer).permit(:customer_name, :username, :registration_date, :email, :phone_number, :address, :gender)\n end", "def create\n user = User.new(@user_info)\n if user.save && user.errors.empty?\n render json: { status: 200, data: UserSerializer.new(user).as_json }\n else\n render json: { status: 400, error: user.errors.full_messages }\n end\n end", "def customer_params\n params.require(:customer).permit(:name)\n # , :username, :email)\n end", "def create\n @user = User.new(user_params)\n\n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end", "def create\n @customer = Customer.new(customer_params)\n if @customer.save\n # render json: {name:@customer.name,mobile:@customer.mobile,email:@customer.email,state_name:@customer.state.name,\n # city_name:@customer.city.name,state_id:@customer.state_id,city_id:@customer.city_id}\n render json: @customer.attributes.merge(state_name:@customer.state.name,city_name:@customer.city.name,customer_count:Customer.count);\n else\n render json:{error: @customer.errors.full_messages}\n end\n end", "def create\n r = @api.create_user(user_params)\n respond_to do |format|\n if r.code == 201\n format.html { redirect_to users_url, notice: 'User was successfully created.' }\n else\n response = JSON.parse(r.body)\n format.html { redirect_to users_url, alert: response['message']}\n end\n end\n end", "def create_customer(params)\n r = validate_params(params, {optional: [:id, :first_name, :last_name, :comapny, :email, :phone, :fax, :website, :payment_method_nonce, :credit_card, :custom_fields]})\n return r unless r.success?\n result = @gateway.customer.create(params)\n success_with_data({result: result})\n end" ]
[ "0.7491036", "0.72490185", "0.7003268", "0.68083495", "0.67773503", "0.67731065", "0.66887033", "0.6686378", "0.6682531", "0.6611727", "0.6576755", "0.6518717", "0.65125144", "0.64826393", "0.6435251", "0.64301914", "0.6410813", "0.6409883", "0.6399922", "0.63989323", "0.63982683", "0.63921386", "0.638761", "0.63838303", "0.6380574", "0.6373819", "0.63703567", "0.6369448", "0.636432", "0.6350199", "0.6347035", "0.6323851", "0.6319827", "0.63195735", "0.63168186", "0.63168186", "0.63168186", "0.63168186", "0.63168186", "0.63168186", "0.6311499", "0.6310076", "0.6306987", "0.6302802", "0.6295423", "0.6291056", "0.6288828", "0.6283284", "0.6283284", "0.6283284", "0.62779844", "0.62769073", "0.62755036", "0.62751716", "0.62751716", "0.6265451", "0.626434", "0.62552834", "0.6247056", "0.62417245", "0.62417245", "0.62408566", "0.62301874", "0.622517", "0.622517", "0.6224519", "0.6222936", "0.6222862", "0.6220908", "0.6216823", "0.6213787", "0.62071717", "0.6204163", "0.62037337", "0.61998147", "0.61879176", "0.61866134", "0.6185681", "0.6183232", "0.6179625", "0.61685973", "0.6167976", "0.61674553", "0.6166613", "0.6155763", "0.6144938", "0.6140915", "0.61311513", "0.6130163", "0.61150897", "0.6109925", "0.61048377", "0.61011577", "0.60959256", "0.6088353", "0.60846865", "0.6076866", "0.60720414", "0.6071085", "0.6070143" ]
0.7161228
2
PATCH/PUT /customer_users/1 PATCH/PUT /customer_users/1.json
def update respond_to do |format| if @customer_user.update(customer_user_params) format.html { redirect_to users_url, notice: "Customer user #{@customer_user.name} was successfully updated." } format.json { head :no_content } else format.html { render action: 'edit' } format.json { render json: @customer_user.errors, status: :unprocessable_entity } end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update\n respond_to do |format|\n if @user.customer? and @user.update(user_params_update)\n format.html { redirect_to admin_customers_path, notice: 'The user was successfully updated.' }\n format.json { render :show, status: :ok, location: @user }\n else\n format.html { render :edit }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end", "def update_customer(id, data)\n put(\"customers/#{id}\", { body: data })\n end", "def update\n authorize! :update, @customer\n update_customer_params = ( current_user.role == 'kanari_admin' ? admin_customer_params : customer_params)\n if @customer.update(update_customer_params)\n render json: nil, status: 200\n else\n render json: @customer.errors, status: :unprocessable_entity\n end\n end", "def update\n @customer = Customer.find(params[:id])\n\n if @customer.update(customer_params)\n head :no_content\n else\n render json: @customer.errors, status: :unprocessable_entity\n end\n end", "def update\n @customer = Customer.find(params[:id])\n\n respond_to do |format|\n if @customer.update_attributes(params[:customer])\n #format.html { redirect_to customers_path }\n format.json { head :ok }\n else\n #format.html { render action: \"edit\" }\n format.json { render :json=> @customer.errors, :status=> :unprocessable_entity }\n end\n end\n end", "def update\n @customer = current_user.customers.find(params[:id])\n\n respond_to do |format|\n if @customer.update_attributes(params[:customer])\n format.xml { head :ok }\n format.js\n else\n format.xml { render :xml => @customer.errors, :status => :unprocessable_entity }\n format.js\n end\n end\n end", "def update\n render json: Users.update(params[\"id\"], params[\"user\"])\n end", "def update\n render json: User.update(params[\"id\"], params[\"user\"])\n end", "def UpdateUser params = {}\n \n APICall(path: 'users.json',method: 'PUT',payload: params.to_json)\n \n end", "def update\n @customer.user = current_user\n authorize @customer, :update?\n\n respond_to do |format|\n if @customer.update(customer_params)\n @customer.works.create!(trackable_url: \"#{customer_path(@customer)}\", action: :update, user: current_user, \n parameters: @customer.to_json(except: {customer: [:fullname_and_address_and_pesel_nip_and_birth_date]}, \n include: { \n user: {\n only: [:id, :name, :email] } \n }))\n\n flash_message :success, t('activerecord.messages.successfull.updated', data: @customer.fullname)\n format.html { redirect_to @customer }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update_customer_profile\n authenticate_request!\n current_customer.assign_attributes(customer_update_params)\n\n if current_customer.save!\n json_response(current_customer)\n else\n json_response({ errors: customer.errors.full_messages }, status: :bad_request)\n end\n end", "def update\n @customer = Customer.find(params[:id])\n @user = User.find(session[:user_id])\n @customer.modified_by = @user.name\n respond_to do |format|\n if @customer.update_attributes(params[:customer])\n format.html { redirect_to(customers_path, :notice => 'Customer was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @customer.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, \n notice: \"Customer #{@customer.email} was successfully updated.\" }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update_customer(customer_id:,\n body:)\n new_api_call_builder\n .request(new_request_builder(HttpMethodEnum::PUT,\n '/v2/customers/{customer_id}',\n 'default')\n .template_param(new_parameter(customer_id, key: 'customer_id')\n .should_encode(true))\n .header_param(new_parameter('application/json', key: 'Content-Type'))\n .body_param(new_parameter(body))\n .header_param(new_parameter('application/json', key: 'accept'))\n .body_serializer(proc do |param| param.to_json unless param.nil? end)\n .auth(Single.new('global')))\n .response(new_response_handler\n .deserializer(APIHelper.method(:json_deserialize))\n .is_api_response(true)\n .convertor(ApiResponse.method(:create)))\n .execute\n end", "def update\n @customer = Customer.find(params[:id])\n\n respond_to do |format|\n if @customer.update_attributes(params[:customer])\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @customer = Customer.find(params[:id])\n\n respond_to do |format|\n if @customer.update_attributes(params[:customer])\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @customer = Customer.find(params[:id])\n\n respond_to do |format|\n if @customer.update_attributes(params[:customer])\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @customer = Customer.find(params[:id])\n\n respond_to do |format|\n if @customer.update_attributes(params[:customer])\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @customer = Customer.find(params[:id])\n\n respond_to do |format|\n if @customer.update_attributes(params[:customer])\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @customer = Customer.find(params[:id])\n\n respond_to do |format|\n if @customer.update_attributes(params[:customer])\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @customer = Customer.find(params[:id])\n\n respond_to do |format|\n if @customer.update_attributes(params[:customer])\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @customer = Customer.find(params[:id])\n\n respond_to do |format|\n if @customer.update_attributes(params[:customer])\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer_customer.update(customer_customer_params)\n format.html { redirect_to @customer_customer, notice: 'Customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer_customer }\n else\n format.html { render :edit }\n format.json { render json: @customer_customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @customer = Customer.find(params[:id])\n\n respond_to do |format|\n if @customer.update_attributes(params[:customer])\n format.html { redirect_to action:\"index\", notice: 'Customer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @customer = Customer.find(params[:id])\n\n respond_to do |format|\n if @customer.update_attributes(params[:customer])\n format.html { redirect_to customer_path(session[:customer_id]), notice: 'Your account was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to customers_path, notice: 'Customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @my_customer.update(my_customer_params)\n format.html { redirect_to @my_customer, notice: 'My customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @my_customer }\n else\n format.html { render :edit }\n format.json { render json: @my_customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html {redirect_to customer_path(@customer), notice: 'Customer was successfully updated.'}\n format.json {render :show, status: :ok, location: @customer}\n else\n format.html {render :edit}\n format.json {render json: @customer.errors, status: :unprocessable_entity}\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n\n\n respond_to do |format|\n if @customer.update(customer_params)\n @@current_customer = @customer.id\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.json { render action: 'show', status: :created, location: @customer }#format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update \n user = User.find(params[:id])\n # byebug\n user.update(user_params)\n\n render json: user\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: \"Customer was successfully updated.\" }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @Customer = Customer.find params[:id]\n if @Customer.update_attributes (customer_params)\n render :json => { :success => true, :Customer => @Customer, :message => 'customer add ok ' }\n else\n render :json => {:success => false, :message => \"Existe un error\"}\n end\n end", "def update\n @customer = Customer.where(:id => params[:id], :company_id => current_user.company.id).first\n\n respond_to do |format|\n if @customer.update_attributes(params[:customer])\n format.html {\n redirect_to @customer,\n notice: I18n.t(:successfully_updated, :model_name => Customer.model_name.human)\n }\n format.json { head :no_content }\n else\n @customer.errors.each do |name, error|\n flash[name] = error\n end\n format.html { render action: \"edit\" }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Customer was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :show }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update_customer(customer, id)\n request(customer, \"customer\", :post, {id: id, method: \"edit\"})\n end", "def update\n user = User.find(params[:id])\n\n # Use update with user_params to do a mass-assignment update and save. \n if user.update_attributes(user_params)\n render json: user\n else \n render json: user.errors.full_messages, status: :unprocessable_entity\n end\n end", "def update_users_password(args = {}) \n put(\"/users.json/backoffice/#{args[:userId]}/password/#{args[:password]}\", args)\nend", "def update_users_password(args = {}) \n put(\"/users.json/backoffice/#{args[:userId]}/password/#{args[:password]}\", args)\nend", "def update\n user = find_user\n user.update!(user_params)\n render json: user\n end", "def update\n user = User.find_by(id: params[:id])\n user.update(user_params)\n render json: user\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to [:admin, @customer], notice: t('messages.updated', model:Customer.model_name.human) }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Your Customer Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: t(\"activerecord.controller_msg.customer.updated\", :default => 'Customer was successfully updated.') }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update_customer(params)\n r = validate_params(params, {mandatory: [:customer_id],\n optional: [:id, :first_name, :last_name, :comapny, :email, :phone, :fax, :website, :payment_method_nonce, :credit_card, :custom_fields]})\n return r unless r.success?\n result = @gateway.customer.update(params)\n success_with_data({result: result})\n end", "def update # it is impossible for a customer to update a referral, so no need to modify.\n @customer = Customer.find(params[:id])\n\n respond_to do |format|\n if @customer.update_attributes(params[:customer])\n format.html { redirect_to [@selector, @customer], :notice => 'Customer was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @customer.errors, :status => :unprocessable_entity }\n end\n end\n end", "def modify_user(user)\n query_api_object Model::User, '/rest/user', user.to_hash, 'PUT'\n end", "def update_customer_profile\n json_response({ message: 'NOT IMPLEMENTED' })\n end", "def update\r\n @customer = Customer.find(params[:id])\r\n unencrypted_password = params[:customer][:password].to_s\r\n password_digest = BCrypt::Password.create(unencrypted_password)\r\n\tparams[:customer][:password_digest] = password_digest\r\n respond_to do |format|\r\n if @customer.update_attributes(params[:customer])\r\n format.html { redirect_to admin_path, :notice => 'Customer was successfully updated.' }\r\n format.json { head :ok }\r\n else\r\n format.html { render :action => \"edit\" }\r\n format.json { render :json => @customer.errors, :status => :unprocessable_entity }\r\n end\r\n end\r\n end", "def update \n @current_user.update(user_params)\n render json: @current_user\n end", "def update\n if @api_v1_user.update(api_v1_user_params)\n head :no_content\n else\n render json: @api_v1_user.errors, status: :unprocessable_entity\n end\n end", "def update\n user = @user_service.update_user(params[:id])\n render json: user, status: :ok\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n format.html { redirect_to @customer, notice: 'Personendaten wurden erfolgreich aktualisiert.' }\n format.json { render :show, status: :ok, location: @customer }\n else\n format.html { render :edit }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @cust = Cust.find(params[:id])\n\n respond_to do |format|\n if @cust.update_attributes(params[:cust])\n format.html { redirect_to @cust, notice: 'Cust was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @cust.errors, status: :unprocessable_entity }\n end\n end\n end", "def update_user(options)\n patch(\"/user\", options, 3)\n end", "def updateUser\n options = {\n :body => params.to_json,\n :headers => {\n 'Content-Type' => 'application/json',\n 'Authorization' => request.headers['Authorization']\n }\n }\n results = HTTParty.put(\"http://192.168.99.101:4051/users/\"+@current_user[\"id\"].to_s, options)\n render json: results.parsed_response, status: results.code\n end", "def update_customer(customer)\n respond_with customer.to_vaulted_billing\n end", "def update_customer(customer)\n respond_with customer.to_vaulted_billing\n end", "def update\n if user.update(user_params)\n render json: user\n else\n render json: {errors: \"Cannot create user\"}, :status => 420\n end\n end", "def update\n \trespond_to do |format|\n if @user.update(user_params)\n format.json { render json: @user }\n else\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n\t \t\n end", "def update\n @sixmonth_customer = SixmonthCustomer.find(params[:id])\n\n respond_to do |format|\n if @sixmonth_customer.update_attributes(params[:sixmonth_customer])\n format.html { redirect_to action: :index }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @sixmonth_customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update(params = {})\n req = WebPay::CustomerRequestUpdate.create(params)\n raw_response = @client._request(:post, 'customers' + '/' + req.id.to_s, req)\n WebPay::CustomerResponse.new(raw_response)\n end", "def update\n @user = User.find(params[:id])\n @user.update(user_params)\n render json: @current_user\n end", "def update\n if @user.id == current_api_user.id\n if @user.update(user_params)\n render json: @user.as_json(except: [:updated_at]), status: :ok\n else\n render json: @user.errors, status: :bad_request\n end\n else\n render json: '', status: :forbidden\n end\n end", "def update\n if @user.update(user_params)\n render json: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end", "def update\n if @user.update(user_params)\n render json: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end", "def update\n @self_service_customer = SelfService::Customer.find(params[:id])\n\n respond_to do |format|\n if @self_service_customer.update_attributes(params[:self_service_customer])\n format.html { redirect_to(@self_service_customer, :notice => 'Customer was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @self_service_customer.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @user = User.find(params[:id])\n @user.name = params[:name]\n @user.email = params[:email]\n @user.password = params[:password]\n @user.photo = params[:photo]\n @user.role = params[:type]\n @user.save\n render json:@user\n end", "def update_customer(customer, options = {})\n id = epages_id(customer)\n perform_patch_with_object(\"/customers/#{id}\", options, Epages::Customer)\n end", "def update\n respond_to do |format|\n if @customer.update(customer_params)\n set_favorite\n format.html { redirect_to @customer,\n notice: I18n.t('updated') }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @customer.errors,\n status: :unprocessable_entity }\n end\n end\n end", "def update\n user = User.find(params[:id])\n user.update(user_params)\n if user.valid?\n render json: user\n else\n render json: user.errors\n end\n end", "def customer_update\n respond_to do |format|\n if @customer.update(person_params)\n format.html { redirect_to customer_path, notice: 'データが更新されました。' }\n format.json { render :customer_show, status: :ok, location: @customer }\n else\n format.html { redirect_to edit_customer_path }\n format.json { render json: @customer.errors, status: :unprocessable_entity }\n end\n end\n end", "def update_current_logged_in_user(args = {}) \n put(\"/users.json/current\", args)\nend", "def update\n @user = selected_user\n if @user.update(users_params)\n render 'api/users/show'\n else\n render json: @user.errors.full_messages, status: 422\n end\n end", "def update\n if @user.update(user_params)\n render json: @user\n else\n render json: {error: \"Could not update user\"}\n end\n end", "def update!(**args)\n @customer_id = args[:customer_id] if args.key?(:customer_id)\n end", "def update!(**args)\n @customer_id = args[:customer_id] if args.key?(:customer_id)\n end", "def update!(**args)\n @customer_id = args[:customer_id] if args.key?(:customer_id)\n end", "def update!(**args)\n @customer_id = args[:customer_id] if args.key?(:customer_id)\n end", "def update\n respond_to do |format|\n if @customer_set.update(customer_set_params)\n format.html { redirect_to @customer_set, notice: 'Customer set was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @customer_set.errors, status: :unprocessable_entity }\n end\n end\n end" ]
[ "0.6875741", "0.6811379", "0.6757901", "0.66581225", "0.6607307", "0.65830755", "0.65765476", "0.65719026", "0.6555065", "0.65258634", "0.6516538", "0.65162563", "0.6511523", "0.6504533", "0.6487113", "0.6479674", "0.6479674", "0.6479674", "0.6479674", "0.6479674", "0.6479674", "0.6479674", "0.6476833", "0.6462614", "0.6437045", "0.6437045", "0.6437045", "0.642442", "0.63984394", "0.639303", "0.63859427", "0.63733584", "0.63733584", "0.63733584", "0.63733584", "0.63733584", "0.63733584", "0.63733584", "0.63733584", "0.63733584", "0.63733584", "0.63733584", "0.63733584", "0.63733584", "0.63733584", "0.63733584", "0.63733584", "0.63733584", "0.6368564", "0.6363069", "0.6361027", "0.6356823", "0.63387454", "0.63375294", "0.63335407", "0.6332783", "0.6326836", "0.632424", "0.632424", "0.6314295", "0.630239", "0.63003075", "0.62994874", "0.6271037", "0.62698334", "0.6252218", "0.6246609", "0.6241591", "0.6239302", "0.6209812", "0.62054044", "0.61967295", "0.61752415", "0.61742795", "0.61704797", "0.6168905", "0.61574626", "0.61574626", "0.6150231", "0.6147687", "0.61448634", "0.6141132", "0.613963", "0.61379886", "0.61353284", "0.61353284", "0.61334336", "0.6124953", "0.6112504", "0.6112472", "0.611067", "0.6098287", "0.6097988", "0.6091297", "0.60912865", "0.60892725", "0.60892725", "0.60892725", "0.60892725", "0.60812277" ]
0.69846743
0
DELETE /customer_users/1 DELETE /customer_users/1.json
def destroy begin @customer_user.destroy flash[:notice] = "User #{@customer_user.name} deleted" rescue Exception => e flash[:notice] = e.message end respond_to do |format| format.html { redirect_to users_url } format.json { head :no_content } end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_user_for_tenant(args = {}) \n delete(\"/tenants.json/#{args[:tenantId]}/users/#{args[:userId]}\", args)\nend", "def DeleteUser id\n \n APICall(path: \"users/#{id}.json\",method: 'DELETE')\n \n end", "def destroy\n \n @customer = Customer.find(params[:id])\n @customer.destroy\n\n respond_to do |format|\n format.html { redirect_to user_customers_url(current_user) }\n format.json do\n render json: {\n customer: @customer,\n status: :deleted\n }.to_json\n end\n end\n end", "def delete\n render json: User.delete(params[\"id\"])\n end", "def delete(id)\n request(:delete, \"/users/#{id}.json\")\n end", "def delete\n render json: Users.delete(params[\"id\"])\n end", "def destroy\n @customer = Customer.find(params[:id])\n @customer.destroy\n\n respond_to do |format|\n format.html { redirect_to customers_url }\n format.json { head :ok }\n end\n end", "def destroy\n @customer = Customer.find(params[:id])\n @customer.destroy\n\n respond_to do |format|\n format.html { redirect_to customers_url }\n format.json { head :ok }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url }\n format.json { head :no_content }\n end\n end", "def delete\n @user.destroy\n respond_to do |format|\n format.html { redirect_to v1_resources_users_all_path, notice: 'User was deleted.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer = Customer.find(params[:id])\n @customer.destroy\n\n respond_to do |format|\n format.html { redirect_to customers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer = Customer.find(params[:id])\n @customer.destroy\n\n respond_to do |format|\n format.html { redirect_to customers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer = Customer.find(params[:id])\n @customer.destroy\n\n respond_to do |format|\n format.html { redirect_to customers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer = Customer.find(params[:id])\n @customer.destroy\n\n respond_to do |format|\n format.html { redirect_to customers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer = Customer.find(params[:id])\n @customer.destroy\n\n respond_to do |format|\n format.html { redirect_to customers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer = Customer.find(params[:id])\n @customer.destroy\n\n respond_to do |format|\n format.html { redirect_to customers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer = Customer.find(params[:id])\n @customer.destroy\n\n respond_to do |format|\n format.html { redirect_to customers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer = Customer.find(params[:id])\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @user = User.find(params[:id])\n @user.destroy\n render json:@user\n end", "def destroy\n @user = User.find(params[:id])\n @user.destroy\n render json:@user\n end", "def user_delete(user_id)\n\t\tdelete_call = Curl::Easy.http_delete(\"#{@ip_address}:#{@port_2}/v2.0/users/#{user_id}\"\n\t\t) do |curl|\n\t\t\tcurl.headers['x-auth-token'] = @token\n\t\t\tcurl.headers['userId'] = user_id\n\t\tend\n\t\n\tend", "def destroy\r\n @customer = Customer.find(params[:id])\r\n @customer.destroy\r\n\r\n respond_to do |format|\r\n format.html { redirect_to customers_url }\r\n format.json { head :ok }\r\n end\r\n end", "def destroy\n \"\"\"\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n \"\"\"\n end", "def destroy\n @single_customer.destroy\n respond_to do |format|\n format.html { redirect_to single_customers_url, notice: 'Single customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n debugger\n @user.destroy\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end", "def delete(user_id:)\n path = '/users/{userId}'\n .gsub('{userId}', user_id)\n\n if user_id.nil?\n raise Appwrite::Exception.new('Missing required parameter: \"userId\"')\n end\n\n params = {\n }\n \n headers = {\n \"content-type\": 'application/json',\n }\n\n @client.call(\n method: 'DELETE',\n path: path,\n headers: headers,\n params: params,\n )\n end", "def destroy\n @user = User.find(params[:user_uuid])\n @user.destroy\n head :ok\n end", "def destroy\n @customer = Customer.where(:id => params[:id], :company_id => current_user.company.id).first\n @customer.destroy\n\n respond_to do |format|\n format.html { redirect_to customers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @user = V1::User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to(v1_users_url) }\n format.xml { head :ok }\n end\n end", "def delete_users\n delete(users_path)\n end", "def delete_customer_by_id(customer_id)\n host = Swagger.configuration.host\n api_key = Swagger.configuration.private_api_key\n\n conn = Faraday.new\n resp = conn.delete do |req|\n req.url \"https://#{host}/api/customer/#{customer_id}\"\n req.headers['Content-Type'] = 'application/json'\n req.headers['Authorization'] = 'Basic ' + [\"#{api_key}:\"].pack('m').delete(\"\\r\\n\")\n end\n body = JSON.parse(resp.body)\n Swagger::Response.new(resp.status, body)\n return body\n end", "def destroy\n @user.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end", "def destroy\n user = User.find(params[:id]) # from url, nothing to do with table\n user.destroy\n render json: user\n end", "def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.json { head :no_content }\n end\n end", "def destroy\n @user = User.find_by_id_or_username params[:id]\n @user.destroy\n render api_delete @user\n end", "def remove_user\n query_api '/rest/user', nil, 'DELETE'\n end", "def destroy\n @customer = Customer.find(params[:id])\n @customer.destroy\n\n head :no_content\n end", "def delete_user\n @user = User.find(params[:id])\n if @user.destroy\n render :json => @user\n else\n render :json => @user.errors.full_messages\n end\n end", "def delete user_id, options={}, headers={}\n @connection.delete \"users/#{user_id}.json\", options, headers\n end", "def destroy\n @user.destroy\n format.json { head :no_content }\n end", "def destroy\n @v1_user.destroy\n respond_to do |format|\n format.html { redirect_to v1_users_url, notice: 'User was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @cust = Cust.find(params[:id])\n @cust.destroy\n\n respond_to do |format|\n format.html { redirect_to custs_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @my_customer.destroy\n respond_to do |format|\n format.html { redirect_to my_customers_url, notice: 'My customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to admin_customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def user_management_delete_user id\n # the base uri for api requests\n query_builder = Configuration.BASE_URI.dup\n\n # prepare query string for API call\n query_builder << \"/v1/users\"\n\n # process optional query parameters\n query_builder = APIHelper.append_url_with_query_parameters query_builder, {\n \"id\" => id,\n \"client_id\" => @client_id,\n \"client_secret\" => @client_secret,\n }\n\n # validate and preprocess url\n query_url = APIHelper.clean_url query_builder\n\n # prepare headers\n headers = {\n \"user-agent\" => \"IAMDATA V1\",\n \"accept\" => \"application/json\"\n }\n\n # invoke the API call request to fetch the response\n response = Unirest.delete query_url, headers:headers\n\n # Error handling using HTTP status codes\n if response.code == 404\n raise APIException.new \"Not found\", 404, response.raw_body\n elsif response.code == 401\n raise APIException.new \"Unauthorized\", 401, response.raw_body\n elsif !(response.code.between?(200,206)) # [200,206] = HTTP OK\n raise APIException.new \"HTTP Response Not OK\", response.code, response.raw_body\n end\n\n response.body\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n authorize @customer, :destroy?\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'Customer was successfully deleted.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer_customer.destroy\n respond_to do |format|\n format.html { redirect_to customer_customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def delete_user\n client.delete(user)\n end", "def destroy\n @api_user.destroy\n\n head :no_content\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @user = User.find(params[:id])\n User.cascade_delete(@user)\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :ok }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: \"Customer was successfully destroyed.\" }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_url, notice: \"Customer was successfully destroyed.\" }\n format.json { head :no_content }\n end\n end", "def destroy\n @customer.destroy\n respond_to do |format|\n format.html { redirect_to customers_path, notice: 'Customer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n user = User.find(params[:id])\n if user.destroy\n render json: user\n else\n render json: user.errors\n end\n end", "def destroy\n @user = User.find_by_id(params[:id])\n @user.destroy\n render :json=>{:status =>t('users.destroy.success')}\n end", "def destroy\n @user = User.find_by_id(params[:id])\n @user.destroy\n render :json=>{:status =>t('users.destroy.success')}\n end", "def destroy\n @user = User.find(params[:id])\n @user.destroy\n \n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :ok }\n end\n end", "def destroy\n @user = user.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :ok }\n end\n end", "def destroy\n c_id = @user_activity.customer_id\n @user_activity.destroy\n\n respond_to do |format|\n format.html { redirect_to customer_url(c_id), notice: 'User activity was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @user = ::User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to admincp_users_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @client_user.destroy\n respond_to do |format|\n format.html { redirect_to client_users_url, notice: 'Client user was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def call(id)\n client.delete(\"/api/rest/v1/users/#{id}.json\")\n true\n end", "def destroy\r\n\r\n @user.destroy\r\n\r\n save_users_log(current_user.email, 'user_deleted',@user.to_json)\r\n\r\n respond_to do |format|\r\n format.html { redirect_to users_url, notice: 'User was successfully deleted.' }\r\n end\r\n end", "def destroy\n @user = User.find(params[:user_id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @user = User.find(params[:id])\n\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :ok }\n end\n end", "def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to dm_core.admin_users_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_path }\n format.json { head :no_content }\n end\n end", "def destroy\n @sixmonth_customer = SixmonthCustomer.find(params[:id])\n @sixmonth_customer.destroy\n\n respond_to do |format|\n format.html { redirect_to sixmonth_customers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @user.destroy\n respond_to do |format|\n format.html { redirect_to backend_shop_users_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :ok }\n end\n end" ]
[ "0.74469155", "0.74214846", "0.7356577", "0.7171427", "0.71094245", "0.7088757", "0.70513505", "0.70513505", "0.7042167", "0.7042167", "0.7042167", "0.7042167", "0.7042167", "0.7042167", "0.7042167", "0.7042167", "0.7029094", "0.7028306", "0.7028306", "0.7028306", "0.7028306", "0.7028306", "0.7028306", "0.7028306", "0.70247555", "0.70186156", "0.70186156", "0.7007525", "0.6970959", "0.6958958", "0.69586074", "0.69497883", "0.69272804", "0.692146", "0.6915509", "0.69154936", "0.69053996", "0.69052994", "0.68995464", "0.68995005", "0.68890005", "0.6885486", "0.6869843", "0.68675023", "0.68649626", "0.6856068", "0.68515515", "0.68498516", "0.6843024", "0.6840765", "0.6832651", "0.68322676", "0.68316936", "0.682903", "0.6828549", "0.6797174", "0.6787263", "0.6783456", "0.6783456", "0.6783456", "0.6783456", "0.6783456", "0.6783456", "0.6783456", "0.6783456", "0.6783456", "0.6783456", "0.6783456", "0.6783456", "0.6783456", "0.6783456", "0.6783456", "0.6783456", "0.6783456", "0.6783456", "0.6783238", "0.6782895", "0.6780989", "0.6780989", "0.6776507", "0.6774572", "0.67740566", "0.67740566", "0.6773404", "0.67731285", "0.6764978", "0.676389", "0.6763871", "0.6756366", "0.6755759", "0.67539805", "0.6746942", "0.6741756", "0.67414796", "0.6736069", "0.67347866", "0.6732024", "0.6732024", "0.67288995", "0.6727307" ]
0.6921021
34
Use callbacks to share common setup or constraints between actions.
def set_customer_user @customer_user = CustomerUser.find(params[:id]) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_required_actions\n # TODO: check what fields change to asign required fields\n end", "def action_hook; end", "def run_actions; end", "def define_action_hook; end", "def actions; end", "def define_action_helpers\n if super && action == :save\n @instance_helper_module.class_eval do\n define_method(:valid?) do |*args|\n self.class.state_machines.fire_event_attributes(self, :save, false) { super(*args) }\n end\n end\n end\n end", "def add_actions; end", "def callbacks; end", "def callbacks; end", "def setup *actions, &proc\n (@setup_procs ||= []) << [proc, actions.size > 0 ? actions : [:*]]\n end", "def define_action_helpers; end", "def post_setup\n end", "def action_methods; end", "def action_methods; end", "def action_methods; end", "def before_setup; end", "def action_run\n end", "def execute(setup)\n @action.call(setup)\n end", "def define_action_helpers?; end", "def set_actions\n actions :all\n end", "def action_done(action)\n dispatch = { :migrate => :done_migrating, :map => :done_mapping, :reduce =>\n :done_reducing, :finalize => :done_finalizing } \n self.send dispatch[action[:action]], action\n end", "def dependencies action, &block\n @actions.each do |other|\n if action[:requires].include? other[:provide]\n block.call other\n end\n end\n end", "def setup!\n return unless @setup_procs\n http_actions = actions\n @setup_procs.each do |setup_proc|\n proc, actions = setup_proc\n @setup__actions = actions.map do |action|\n\n action.is_a?(Regexp) ?\n http_actions.select { |a| a.to_s =~ action } :\n action.is_a?(String) && action =~ /\\A\\./ ?\n http_actions.map { |a| a.to_s << action if format?(a).include?(action) }.compact :\n action\n\n end.flatten\n self.class_exec &proc\n @setup__actions = nil\n end\n @setup_procs = nil\n end", "def before_actions(*logic)\n self.before_actions = logic\n end", "def setup_handler\n end", "def set_action(opts)\n opts = check_params(opts,[:actions])\n super(opts)\n end", "def setup(action)\n @targets.clear\n unless action.item.target_filters.empty?\n @targets = SES::TargetManager.make_targets(action)\n else\n item = action.item\n if item.for_opponent?\n @targets = $game_troop.alive_members\n elsif item.for_dead_friend?\n @targets = $game_party.battle_members.select { |actor| actor.dead? }\n else\n $game_party.battle_members.select { |actor| actor.alive? }\n end\n end\n @item_max = @targets.size\n create_contents\n refresh\n show\n activate\n end", "def action; end", "def action; end", "def action; end", "def action; end", "def action; end", "def workflow\n end", "def revisable_shared_setup(args, block)\n class << self\n attr_accessor :revisable_options\n end\n options = args.extract_options!\n self.revisable_options = Options.new(options, &block)\n \n self.send(:include, Common)\n self.send(:extend, Validations) unless self.revisable_options.no_validation_scoping?\n self.send(:include, WithoutScope::QuotedColumnConditions)\n end", "def setup\n @action = SampleActionAndroid.new(os_name: 'android',\n app_name: APP_PATH)\n end", "def before(action)\n invoke_callbacks *self.class.send(action).before\n end", "def process_action(...)\n send_action(...)\n end", "def before_dispatch(env); end", "def after_actions(*logic)\n self.after_actions = logic\n end", "def setup\n # override and do something appropriate\n end", "def setup(client)\n return unless @setup\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n actions.each do |action|\n action.execute(client)\n end\n self\n end", "def setup(_context)\n end", "def setup(resources) ; end", "def validate_actions\n errors.add(:base, :should_give_at_least_one_action) if !manage? && !forecasting? && !read? && !api?\n end", "def setup\n @resource_config = {\n :callbacks => {\n :before_create => nil,\n :after_create => nil,\n :before_update => nil,\n :after_update => nil,\n :before_destroy => nil,\n :after_destroy => nil,\n },\n :child_assoc => nil,\n :model => nil,\n :parent => nil,\n :path => nil,\n :permission => {},\n :properties => {},\n :relation => {\n :create => nil,\n :delete => nil,\n },\n :roles => nil,\n }\n end", "def determine_valid_action\n\n end", "def process_shared\n handle_taxes\n handle_shippings\n create_adjustments_from_params\n handle_status\n handle_inventory_refunds\n handle_payment_transactions\n order.updater.update\n end", "def startcompany(action)\n @done = true\n action.setup\n end", "def init_actions\n am = action_manager()\n am.add_action(Action.new(\"&Disable selection\") { @selection_mode = :none; unbind_key(32); bind_key(32, :scroll_forward); } )\n am.add_action(Action.new(\"&Edit Toggle\") { @edit_toggle = !@edit_toggle; $status_message.value = \"Edit toggle is #{@edit_toggle}\" })\n end", "def event_callbacks(event, metadata={})\n case event\n when :reset, :review\n if confirmed\n update_attributes(confirmed: false)\n end\n when :confirm\n confirm\n # trigger :order for all applicable items\n # NOTE: :order event is common to both physical and digital items\n items.each do |i|\n if i.event_permitted(:order)\n user_id = last_transition.user_id\n i.trigger!(:order, { order_id: id, user_id: user_id })\n end\n end\n when :complete_work\n request = metadata[:request]\n work_complete_notification(request)\n when :close\n close\n end\n if event != :close && !open\n reopen\n end\n end", "def setup_action\n return unless PONY::ERRNO::check_sequence(current_act)\n new_sequence = @action_sequence[@sequence_index+1...@action_sequence.size]\n @sequence_index = 0\n new_sequence = DND::SkillSequence::ACTS[@acts[1]] + new_sequence\n execute_sequence\n end", "def define_tasks\n define_weave_task\n connect_common_tasks\n end", "def setup(&block)\n define_method(:setup, &block)\n end", "def setup\n transition_to(:setup)\n end", "def setup\n transition_to(:setup)\n end", "def action\n end", "def setup( *args )\n\t\t\tself.class.setupBlocks.each {|sblock|\n\t\t\t\tdebugMsg \"Calling setup block method #{sblock}\"\n\t\t\t\tself.send( sblock )\n\t\t\t}\n\t\t\tsuper( *args )\n\t\tend", "def config(action, *args); end", "def setup\n @setup_proc.call(self) if @setup_proc\n end", "def before_action \n end", "def setup_callbacks\n defined_callbacks.each do |meth|\n unless respond_to?(\"call_#{meth}_callbacks\".to_sym)\n self.class.module_eval <<-EOE\n def call_#{meth}_callbacks(*args)\n plugin_store.each {|a| a.call_#{meth}_callbacks(*args) } if respond_to?(:plugin_store) && plugin_store\n self.send :#{meth}, *args if respond_to?(:#{meth})\n end\n EOE\n end\n end\n end", "def action\n end", "def matt_custom_action_begin(label); end", "def setup\n # override this if needed\n end", "def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend", "def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend", "def action(options,&callback)\n new_action = Action===options ? options : Action.new(options,&callback)\n # replace any with (shared name/alias or both default) + same arity\n @actions.delete_if do |existing_action|\n ((existing_action.names & new_action.names).size > 0 ||\n existing_action.default? && new_action.default?) &&\n existing_action.required.size == new_action.required.size &&\n existing_action.optional.size <= new_action.optional.size\n end\n @actions = (@actions + [new_action]).sort\n new_action\n end", "def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action\n end", "def after(action)\n invoke_callbacks *options_for(action).after\n end", "def pre_task\n end", "def setup(server)\n server.on('beforeMethod', method(:before_method), 10)\n end", "def add_actions\n attribute = machine.attribute\n name = self.name\n \n owner_class.class_eval do\n define_method(name) {self.class.state_machines[attribute].events[name].fire(self)}\n define_method(\"#{name}!\") {self.class.state_machines[attribute].events[name].fire!(self)}\n define_method(\"can_#{name}?\") {self.class.state_machines[attribute].events[name].can_fire?(self)}\n end\n end", "def init_actions\n @select_action = SelectAction.new\n @endpoint_mouse_action = EndpointMouseAction.new\n @move_action = MoveAction.new\n end", "def setup_signals; end", "def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend", "def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend", "def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action.respond_to?('weak!') ? action.weak! : action\n end", "def initialize(*args)\n super\n @action = :set\nend", "def after_set_callback; end", "def setup\n #implement in subclass;\n end", "def lookup_action; end", "def setup &block\n if block_given?\n @setup = block\n else\n @setup.call\n end\n end", "def setup_action\n return TSBS.error(@acts[0], 1, @used_sequence) if @acts.size < 2\n actions = TSBS::AnimLoop[@acts[1]]\n if actions.nil?\n show_action_error(@acts[1])\n end\n @sequence_stack.push(@acts[1])\n @used_sequence = @acts[1]\n actions.each do |acts|\n @acts = acts\n execute_sequence\n break if @break_action\n end\n @sequence_stack.pop\n @used_sequence = @sequence_stack[-1]\n end", "def release_actions; end", "def around_hooks; end", "def save_action; end", "def setup(easy)\n super\n easy.customrequest = @verb\n end", "def action_target()\n \n end", "def setup\n callback(:setup) do\n notify(:setup)\n migration_check.last_deployed_commit\n end\n end", "def setup\n return unless @setup\n\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n run_actions_and_retry(actions)\n self\n end", "def before_setup\n # do nothing by default\n end", "def my_actions(options)\n @setup = false\n get_template_part(\"custom_used\",\"action_users\",true)\n end", "def default_action; end", "def setup(&blk)\n @setup_block = blk\n end", "def callback_phase\n super\n end", "def advice\n end", "def _handle_action_missing(*args); end", "def duas1(action)\n action.call\n action.call\nend", "def shared_action(name, &block)\n @controller.shared_actions[name] = block\n end", "def before_action action, &block\n @audience[:before][action] ||= Set.new\n @audience[:before][action] << block\n end", "def setup_initial_state\n\n state_a = State.new(\"a\", 0)\n state_b = State.new(\"b\", 0)\n state_c = State.new(\"c\", 10)\n\n move_to_b = Action.new(\"move_to_b\", 1, state_b)\n\n move_to_c = Action.new(\"move_to_c\", 1, state_c)\n\n state_a.actions = [move_to_b, move_to_c]\n\n return state_a\n \nend" ]
[ "0.6163163", "0.6045976", "0.5946146", "0.591683", "0.5890051", "0.58349305", "0.5776858", "0.5703237", "0.5703237", "0.5652805", "0.5621621", "0.54210985", "0.5411113", "0.5411113", "0.5411113", "0.5391541", "0.53794575", "0.5357573", "0.53402257", "0.53394014", "0.53321576", "0.53124547", "0.529654", "0.5296262", "0.52952296", "0.52600986", "0.52442724", "0.52385926", "0.52385926", "0.52385926", "0.52385926", "0.52385926", "0.5232394", "0.523231", "0.5227454", "0.52226824", "0.52201617", "0.5212327", "0.52079266", "0.52050185", "0.51754695", "0.51726824", "0.51710224", "0.5166172", "0.5159343", "0.51578903", "0.51522785", "0.5152022", "0.51518047", "0.51456624", "0.51398855", "0.5133759", "0.5112076", "0.5111866", "0.5111866", "0.5110294", "0.5106169", "0.509231", "0.50873137", "0.5081088", "0.508059", "0.50677156", "0.50562143", "0.5050554", "0.50474834", "0.50474834", "0.5036181", "0.5026331", "0.5022976", "0.5015441", "0.50121695", "0.5000944", "0.5000019", "0.4996878", "0.4989888", "0.4989888", "0.49864885", "0.49797225", "0.49785787", "0.4976161", "0.49683493", "0.4965126", "0.4958034", "0.49559742", "0.4954353", "0.49535993", "0.4952725", "0.49467874", "0.49423352", "0.49325448", "0.49282882", "0.49269363", "0.49269104", "0.49252945", "0.4923091", "0.49194667", "0.49174926", "0.49173003", "0.49171105", "0.4915879", "0.49155936" ]
0.0
-1
Never trust parameters from the scary internet, only allow the white list through.
def customer_user_params params.require(:customer_user).permit(:name, :password, :password_confirmation, :email) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def strong_params\n params.require(:user).permit(param_whitelist)\n end", "def strong_params\n params.require(:listing_member).permit(param_whitelist)\n end", "def allow_params_authentication!; end", "def allowed_params\n ALLOWED_PARAMS\n end", "def default_param_whitelist\n [\"mode\"]\n end", "def param_whitelist\n [:role, :title]\n end", "def expected_permitted_parameter_names; end", "def safe_params\n params.except(:host, :port, :protocol).permit!\n end", "def strong_params\n params.require(:team_member).permit(param_whitelist)\n end", "def permitir_parametros\n \t\tparams.permit!\n \tend", "def strong_params\n params.require(:community).permit(param_whitelist)\n end", "def permitted_strong_parameters\n :all #or an array of parameters, example: [:name, :email]\n end", "def strong_params\n params.require(:education).permit(param_whitelist)\n end", "def restricted_params\n #params.require(self.controller_name.classify.underscore.to_sym).permit([])\n raise(\"No strong params set, override restricted_params method in your controller. E.g. params.require(:model).permit(:attribute1, :attribute2)\")\n end", "def allowed_params\n params.require(:user).permit(:username, :email, :password, :password_confirmation)\n end", "def param_whitelist\n [:rating, :review]\n end", "def param_whitelist\n whitelist = [\n :username, :name,\n :parent_id,\n :headline, :description, :video,\n :policy, :signup_mode, :category,\n :website, :facebook, :twitter, :linkedin,\n :founded_at,\n privacy: [\n :events,\n :resources\n ],\n permission: [\n :profile,\n :members,\n :children,\n :statistics,\n :posts,\n :listings,\n :resources,\n :events\n ],\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n \n if action_name === 'update'\n whitelist.delete(:parent_id)\n unless current_user.role_in(@community) === 'owner'\n whitelist.delete(:privacy)\n whitelist.delete(:permission)\n end\n end\n \n whitelist\n end", "def param_whitelist\n if @user.present? && current_user != @user\n return [:followed]\n end\n \n whitelist = [\n :username, :email, :password,\n :first_name, :last_name,\n :birthday, :gender,\n :headline, :biography, :ask_about, :focus,\n :website, :facebook, :linkedin, :twitter, :github,\n roles: [],\n skills: [],\n interests: [],\n privacy: { contact: [] },\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n \n if action_name === 'update'\n whitelist.delete(:email)\n whitelist.delete(:password)\n end\n \n whitelist\n end", "def user_params \n \tparams.require(:user).permit(:name, :email, :password, :password_confirmation)# preventing CSTR\n end", "def user_params\n params.permit(:name, :phoneNumber, :address, :postalCode, :local, :link, :counter, :latitude, :longitude) \n end", "def valid_params_request?; end", "def strong_params\n params.require(:experience).permit(param_whitelist)\n end", "def trim_whitelisted(params, whitelist)\n # remove any parameters that are not whitelisted\n params.each do |key, value|\n # if white listed\n if whitelist.include? key\n # strip the parameters of any extra spaces, save as string\n params[key] = value.to_s.strip\n else\n # delete any unauthorized parameters\n params.delete key\n end\n end\n params\n end", "def whitelist_url_params\n params.require(:whitelist_url).permit(:domain)\n end", "def allowed_params\n params.require(:allowed).permit(:email)\n end", "def permitted_params\n []\n end", "def trim_whitelisted(params, whitelist)\n # remove any parameters that are not whitelisted\n params.each do |key, value|\n # if white listed\n if whitelist.include? key\n # strip the parameters of any extra spaces, save as string\n params[key] = value.to_s.strip\n else\n # delete any unauthorized parameters\n params.delete key\n end\n end\n params\n end", "def safe_params\n params.permit(:id, :name, :origin, :emails => []); #emails is an array\n end", "def query_param\n\t\tparams.permit(:first_name, :last_name, :phone)\n\tend", "def strong_params\n params.require(:success_metric).permit(param_whitelist)\n end", "def devise_filter\r\n logger.debug(\"In devise_filter =>PARAMS: #{params.inspect}\")\r\n\r\n # White list for sign_up\r\n devise_parameter_sanitizer.for(:sign_up) { |u| u.permit(user_whitelist) }\r\n\r\n # White list for account update\r\n devise_parameter_sanitizer.for(:account_update) { |u| u.permit(user_whitelist, :current_password) }\r\n\r\n # White list for Invitation creation\r\n devise_parameter_sanitizer.for(:invite) { |u| u.permit(:account_type, :email, :invitation_token)}\r\n\r\n # White list for accept invitation\r\n devise_parameter_sanitizer.for(:accept_invitation) { |u| u.permit(user_whitelist, :invitation_token)}\r\n\r\n end", "def whitelisted_user_params\n params.require(:user).\n permit( :first_name, :last_name, :email,:password,:password_confirmation,:birthday,:gender)\n end", "def user_params\n ActionController::Parameters.permit_all_parameters = true\n params.require(:user) #.permit(:name, :surname, :phone, :password, :email, :time_zone)\n end", "def strong_params\n params.require(:metric_change).permit(param_whitelist)\n end", "def safe_params\n params.require(:user).permit(:name)\n end", "def get_params\n\t\treturn ActionController::Parameters.new(self.attributes).permit(\"account_id\", \"title\", \"category\", \"introduction\", \"tags\", \"segment_type\", \"visible\", \"status\", \"main_image\")\n\tend", "def grant_params\n @whitelisted = params.require(:grant).permit(:name, :description, :agency_id, :acronym)\n end", "def check_params; true; end", "def param_whitelist\n whitelist = [\n :description,\n :progress,\n :kpi_id\n ]\n \n unless action_name === 'create'\n whitelist.delete(:kpi_id)\n end\n \n whitelist\n end", "def quote_params\n params.permit!\n end", "def valid_params?; end", "def paramunold_params\n params.require(:paramunold).permit!\n end", "def user_params\n\t\tparams.permit(:nickname, :avatar, :description, :password, :gender, :birthday, :email, :phone, :qq_id, :wechat_id)\n\tend", "def filtered_parameters; end", "def user_params\n params.permit(\n \t:id,\n \t:email, \n \t:first_name, \n \t:last_name, \n \t:password, \n \t:confirm_token, \n \t:phone_number,\n \t:facebook_link,\n \t:car_model,\n \t:license_plate)\n end", "def filtering_params\n params.permit(:email, :name)\n end", "def check_params\n true\n end", "def wx_public_params\n params.require(:wx_public).permit(:nickname, :manager, :alias)\n end", "def allowed_params\n params.require(:user).permit(:email, :password, :role, :first_name, :last_name, :password_confirmation)\n end", "def allowed_params\n params.require(:user).permit(:email, :password, :role, :first_name, :last_name, :password_confirmation)\n end", "def listing_params\n\t\tparams.permit(:address, :transit_info, :rules, :other_info, :lat, :lng)\n\tend", "def social_account_params\n\t\t\tparams.require(:social_account).permit!\n\t\tend", "def safe_params\n resurce_name = self.class.resource_name\n params_method_name = \"#{resurce_name}_params\".to_sym\n if params[resurce_name]\n if respond_to?(params_method_name) || private_methods.include?(params_method_name)\n send(params_method_name)\n else\n raise ActiveModel::ForbiddenAttributesError, \"Please, define the '#{params_method_name}' method in #{self.class.name}\"\n end\n end\n end", "def url_params\n params.require(:url).permit(:short_url, :original_url, :clicks, :ip_addresses)\n end", "def user_params\n params.require(:user).permit(:uri, :username, :password, :realname, :email, :publicvisible)\n end", "def model_params\n\t\tparams.require(:manager).permit(\n\t :user_name,\n :password,\n :email,\n \t\t\t)\n\tend", "def article_params_whitelist\n params.require(:article).permit(:title, :description, category_ids: [])\n end", "def college_whitelist_params\n params.require(:college_whitelist).permit(:status)\n end", "def active_code_params\n params[:active_code].permit\n end", "def filtering_params\n params.permit(:email)\n end", "def valid_params(params)\n params.permit(:user_id, :photo_id, :originX, :originY, :width, :height)\n end", "def ip_address_params\n\t\t\tparams.require(:ip_address).permit!\n end", "def pull_request_params\n whitelist = [\n :url,\n :id,\n :html_url,\n :diff_url,\n :patch_url,\n :issue_url,\n :number,\n :state,\n :locked,\n :title\n ]\n params.require(:pull_request).permit(whitelist)\n end", "def reserved_params\n params.require(:reserved).permit(:name, :email, :pax, :address, :KTP, :title)\n end", "def post_params\n if current_user.admin? \n params.permit(:title, :body, :city, :country, :gps_location, :privacy, :visible, :latitude, :longitude, images: [], files: [])\n else \n params.permit(:title, :body, :city, :country, :gps_location, :privacy,:latitude, :longitude, images: [], files: [])\n end \n end", "def list_params\n params.permit(:name)\n end", "def filter_parameters; end", "def filter_parameters; end", "def vineyard_params\n params.permit(:vineyard_name, :email, :website_url, :phone, :address, :city, :region, :postcode, :country, :specialty, :description, :pet_friendly, :holiday, :tours, :events, :family_friendly, :cover_image, :image_one, :image_two, :image_three, :image_four, :user_id, :base64)\n end", "def available_activity_params\n # params.require(:available_activity).permit(:type,:geometry,:properties)\n whitelisted = ActionController::Parameters.new({\n type: params.require(:available_activity)[:type],\n geometry: params.require(:available_activity)[:geometry].try(:permit!).to_h,\n properties: params.require(:available_activity)[:properties].try(:permit!).to_h\n }).try(:permit!)\n end", "def user_params\n params.permit(:name, :username, :email, :password, :img_url, :bg_url, :coinbank)\n end", "def user_params_pub\n\t \tparams[:user].permit(:hruid)\n\t end", "def user_params\n params.permit(:id, :email, :password, :nickname, :status, :avatar, :flat_picture, :flatsharing_id, :member,\n :user, :color, :solde)\n end", "def validate_search_inputs\n @whitelisted = params.fetch(:user, nil)\n if @whitelisted.blank?\n render_error(400, \"#{I18n.t('general_error.params_missing_key')}\": [I18n.t('general_error.params_missing_value', model: \"review\")])\n return\n else\n @whitelisted = @whitelisted.permit(:name, :uen, :description)\n end\n end", "def param_whitelist\n [\n :title,\n :description,\n :organization,\n :team_id,\n :started_at,\n :finished_at,\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n end", "def url_whitelist; end", "def admin_social_network_params\n params.require(:social_network).permit!\n end", "def filter_params\n params.require(:filters).permit(:letters)\n end", "def origin_params\n params.permit(:country, :state, :city, :postal_code, :address, :description)\n end", "def valid_params(params)\n params.permit(:login, :first_name, :last_name, \n :password, :password_confirmation)\n end", "def sensitive_params=(params)\n @sensitive_params = params\n end", "def permit_request_params\n params.permit(:address)\n end", "def user_params\n # Ensure a user can't give themselves admin priveleges\n params.delete(:admin) if current_user.admin?\n params.require(:user).permit(:name, :email, :admin, :image)\n end", "def secure_params\n params.require(:location).permit(:name)\n end", "def strong_params\n params.require( :setting ).\n permit( :global_scan_limit, :per_user_scan_limit,\n :target_whitelist_patterns, :target_blacklist_patterns )\n end", "def question_params\n params.require(:survey_question).permit(question_whitelist)\n end", "def case_insensitive_params\n params.require(:case_insensitive).permit(:name)\n end", "def empire_master_no_match_params\n params.require(:empire_master_no_match).permit(:uid, :last_name, :list, :search_date, :double, :source)\n end", "def maintenance_request_params\n params[:maintenance_request].permit! #allow all parameters for now\n end", "def unwanted_params\n params.require(:unwanted).permit(:title, :description, :image)\n end", "def url_params\n params[:url].permit(:full)\n end", "def backend_user_params\n params.permit!\n end", "def filter_params\n\t\treturn params[:candidate].permit(:name_for_filter)\n\tend", "def speed_measurement_params\n\n #fuckit, to lazy to deal with permit crap right now\n ActionController::Parameters.permit_all_parameters = true\n\n params[:speed_measurement]\n end", "def user_params\n params.permit(:name, :age, :username, :display_photo, :password)\n end", "def get_params\r\n #params.require(:article).permit(:title, :permalink, :content, :source_site, :introtext, :type_id, :order_by, :searchable, :created_by, :edited_by, :published_by, :published_on, :user_id)\r\n params.require(:article).permit!\r\n\r\n end", "def pub_params\n params.require(:pub).permit(:name, :description, :phone, :email, :hidden, :city_id, :address)\n end", "def pass_params\n params[:pass].permit(:name, :price, :description, :colour, :events)\n end", "def droptraining_params\n params.permit(:training_id,:user_id, :utf8, :authenticity_token, :commit)\n end", "def person_params\n # params whitelist does *not* include admin, sub, remember_token\n # TBD: share this whitelist with the list used by configuration_permitted_parameters\n # TBD: should current_password be on this list? -- for now, leaving off, since it seems to work without\n # NOTE: do not include 'admin' in this list!\n params.require(:person).permit(\n :name, \n :email, \n :description,\n :password, \n :password_confirmation\n )\n end", "def parameter_params\n params.require(:parameter).permit(:name, :description, :param_code, :param_value, :active_from, :active_to)\n end" ]
[ "0.69792545", "0.6781151", "0.67419964", "0.674013", "0.6734356", "0.6591046", "0.6502396", "0.6496313", "0.6480641", "0.6477825", "0.64565", "0.6438387", "0.63791263", "0.63740575", "0.6364131", "0.63192815", "0.62991166", "0.62978333", "0.6292148", "0.6290449", "0.6290076", "0.62894756", "0.6283177", "0.6242471", "0.62382483", "0.6217549", "0.6214457", "0.6209053", "0.6193042", "0.6177802", "0.6174604", "0.61714715", "0.6161512", "0.6151757", "0.6150663", "0.61461", "0.61213595", "0.611406", "0.6106206", "0.6105114", "0.6089039", "0.6081015", "0.6071004", "0.60620916", "0.6019971", "0.601788", "0.6011056", "0.6010898", "0.6005122", "0.6005122", "0.6001556", "0.6001049", "0.59943926", "0.5992201", "0.59909594", "0.5990628", "0.5980841", "0.59669393", "0.59589154", "0.5958826", "0.5957911", "0.5957385", "0.5953072", "0.59526145", "0.5943361", "0.59386164", "0.59375334", "0.59375334", "0.5933856", "0.59292704", "0.59254247", "0.5924164", "0.59167904", "0.59088355", "0.5907542", "0.59064597", "0.5906243", "0.5898226", "0.589687", "0.5896091", "0.5894501", "0.5894289", "0.5891739", "0.58860534", "0.5882406", "0.587974", "0.58738774", "0.5869024", "0.58679986", "0.5867561", "0.5865932", "0.5864461", "0.58639693", "0.58617616", "0.5861436", "0.5860451", "0.58602303", "0.5854586", "0.58537364", "0.5850427", "0.5850199" ]
0.0
-1
gets c(word_2 | word_1)
def bigram_count() @corpus.each { |sentence_arr| prev_word = "" sentence_arr.each { |word| if(prev_word != "") @bifreq[prev_word + " " + word] += 1 else @bifreq["PHI "+word] += 1 end prev_word = word } } end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bitwise_or(a, b)\n\tresult = ''\n\ta.each_char.with_index do |val, index|\n\t\tif val == '1' || b[index] == '1'\n\t\t\tresult.concat '1'\n\t\telse\n\t\t\tresult.concat '0'\n\t\tend\n\tend\n\treturn result\nend", "def second_anagram?(word1, word2)\n w1_copy = word1.dup\n w2_copy = word2.dup\n w1_copy.each_char do |ch1|\n w2_copy.each_char do |ch2|\n if ch1 == ch2\n w1_copy[w1_copy.index(ch1)] = \"\"\n w2_copy[w2_copy.index(ch2)] = \"\"\n end\n end\n end\n w1_copy.empty? && w2_copy.empty?\nend", "def unite_words\n _, a_rest, a_word = %r{^(.* )?(\\w+)$}.match(a.text).to_a # a ends with word character\n _, b_word, b_rest = %r{^(\\w+\\s*)(.* )?$}.match(b.text).to_a # b starts with word character\n\n if a_word && b_word\n if a_word.length > b_word.length\n shift_backward(b_word)\n else\n shift_forward(a_word)\n end\n end\n end", "def match?(wordA, wordB)\n wordA == wordB.each_char.sort.join('') \nend", "def start_of_word(a, b)\n return a.chars.first(b).join\nend", "def looper(word1, word2)\n\t\tputs word1\n\t\t@word1 = word1.split(\"\") \n\t\t@word2 = word2.split(\"\")\n\n\t\tuntil @word1 == @word2 do \n\n\t\t\[email protected]_with_index do |let, index|\n\t\t\t\tif let == @word2[index]\n\t\t\t\telse \n\t\t\t\t\ttemp = @word1.clone\n\t\t\t\t\ttemp[index] = @word2[index]\n\t\t\t\t\ttemp = temp.join(\"\")\n\t\t\t\t\tif @dictionary.exists?(temp) \n\t\t\t\t\t\tputs temp\n\t\t\t\t\t\ttemp = temp.split(\"\")\n\t\t\t\t\t\t@word1=temp\n\t\t\t\t\tend\n\t\t\t\tend \n\t\t\tend\n\t\tend\n\tend", "def bonus_ana(word1, word2)\n hash = Hash.new(0)\n\n word1.each_char { |char| hash[char] += 1 }\n word2.each_char { |char| hash[char] -= 1 }\n\n hash.values.all? { |count| count == 0 }\nend", "def shared_characters(a, b)\n raise unless a.size == b.size\n a.zip(b).select {|x,y| x == y }.map(&:first)\nend", "def bitwise_exclusive(a, b)\n\tresult = ''\n\ta.each_char.with_index do |val, index|\n\t\tif (val == '1' && b[index] == '0') || (val == '0' && b[index] == '1')\n\t\t\tresult.concat '1'\n\t\telse\n\t\t\tresult.concat '0'\n\t\tend\n\tend\n\treturn result\nend", "def secondstrc(f)\n res = \"\"\n flag = 0\n while l = f.gets\n if flag == 2 then\n res << l\n flag = 0\n elsif flag == 1 then\n flag = 2\n elsif /^>/ =~ l then\n flag = 1\n end\n end\n return res\nend", "def secondstrc(f)\n res = \"\"\n flag = 0\n while l = f.gets\n if flag == 2 then\n res << l\n flag = 0\n elsif flag == 1 then\n flag = 2\n elsif /^>/ =~ l then\n flag = 1\n end\n end\n return res\nend", "def union(arg1, *arg2)\n first = arg1\n rest = []\n arg2.each do |char|\n char.each do |subchar|\n rest << subchar\n end\n end\n return first + rest\n \nend", "def combinations\nputs \"Enter text\"\ntext = gets.chomp\nputs text\nwords = text.scan /\\w/\n\n\n\nputs words\nend", "def flip_words (first_word, second_word)\n puts second_word + first_word\nend", "def XOR(str1,str2)\n ret =\"\"\n str1.split(//).each_with_index do |c, i|\n ret[i] = (str1[i].ord ^ str2[i].ord).chr\n end\n return ret\nend", "def bitwise_and(a, b)\n\tresult = ''\n\ta.each_char.with_index do |val, index|\n\t\tif val == b[index]\n\t\t\tresult.concat '1'\n\t\telse\n\t\t\tresult.concat '0'\n\t\tend\n\tend\n\treturn result\nend", "def two_strings a,b\n s1 = a.chars.uniq\n s2 = b.chars.uniq\n substring = false\n\n s2.each do |letter|\n if s1.include? letter\n substring = true\n break\n end\n end\n puts substring ? 'YES' : 'NO' \nend", "def spell_out word6\n word6.downcase.split(\"\").join(\"-\")\nend", "def xor_strings(s1, s2)\r\n s1.unpack('C*').zip(s2.unpack('C*')).map { |a, b| a ^ b }.pack('C*')\r\n end", "def combine(str1, str2)\n # Create an array which we will read the final outputs from\n output = []\n\n # Run processor for word 1 and push result to output\n output.push(processor(str1, 1))\n\n # Run processor for word 2 and push result to output\n output.push(processor(str2, 2))\n\n # Join the partial words together to form the wombinations\n wombination1 = (output[0][0] + output [1][0])\n wombination2 = (output[0][1] + output [1][1])\n\n # If the words are the same, then just return one of them...\n if wombination1 == wombination2\n puts wombination1\n\n # otherwise return both as an array\n else\n puts [wombination1, wombination2]\n\n end\nend", "def twoStrings(s1, s2)\n freq = Hash.new\n\n # 1) store characters frequency in first word\n for i in 0...(s1.length)\n freq[s1[i]] = 1\n end\n\n # 2) check if the second word has the common characters\n for i in 0...(s2.length)\n return \"YES\" unless freq[s2[i]].nil?\n end\n\n return \"NO\"\nend", "def phase_one?(str1,str2)\n dictionary = str1.chars.permutation.to_a\n dictionary.include?(str2.chars)\nend", "def second_anagram?(word1, word2)\n word1.each_char do |char|\n index = word2.index(char)\n word2.slice!(index)\n end\n\n if word2.empty?\n true\n else\n false\n end\n\nend", "def crazy_strings(first_word, second_word)\nputs \"#{first_word} #{second_word.gsub(\"s\",\"z\").swapcase}!\"\nend", "def xor_byte_strings(s1, s2) # :doc:\n s2 = s2.dup\n size = s1.bytesize\n i = 0\n while i < size\n s2.setbyte(i, s1.getbyte(i) ^ s2.getbyte(i))\n i += 1\n end\n s2\nend", "def str_xor(s1, s2)\n if s1.length != s2.length:\n minlen = [s1, s2].map(&:length).min\n s1 = s1[0...minlen]\n s2 = s2[0...minlen]\n end\n s1.bytes.zip(s2.bytes).map{ |b1, b2| b1 ^ b2 }.map(&:chr).join\nend", "def concatenate_words(w1, w2)\r\n raise ArgumentError.new(\"Only Strings allowed!\") if !w1.is_a? String or !w2.is_a? String\r\n w1 + ' ' + w2\r\nend", "def second_anagram?(word1, word2)\n return false if word1.length != word2.length\n word1.chars.each do |char1|\n word2.chars.each do |char2|\n if char1 == char2\n word1 = delete_char(word1, char1)\n word2 = delete_char(word2, char1)\n end\n end\n end\n\n word1.empty? && word2.empty?\nend", "def mix(s1, s2)\n # s1とs2から2文字以上含まれる文字を抽出\n selection = (\"a\"..\"z\").to_a.select { |letter| s1.count(letter) > 1 || s2.count(letter) > 1 }\n # s1とs2の出現回数を比較し、プレフィックスを付与した配列を作成する\n selection.map! do |selection|\n if s1.count(selection) > s2.count(selection)\n \"1:#{selection * s1.count(selection)}\"\n elsif s1.count(selection) < s2.count(selection)\n \"2:#{selection * s2.count(selection)}\"\n else\n \"=:#{selection * s1.count(selection)}\"\n end\n end\n # 出現回数(文字列の長さ)、1or2、アルファベット順でソートする\n selection.sort_by { |x| [-x.size, x[0], x[-1]] }.join(\"/\")\nend", "def mutation(array)\n p first_word = array[0].downcase.chars.sort.join(\" \")\n p second_word = array[1].downcase.chars.sort.join(\" \")\n p first_word.include?(second_word) ? true: false\nend", "def find_chain(word1, word2)\n\t\t word1_array = split_word(word1)\n\t\t word2_array = split_word(word2)\n\n\t\t \tindex = 0\n\n\t\t\t word1_array.cycle { |letter|\n\t\t\t \tletter_copy = word1[index]\n\t\t\t \t#p index\n\t\t\t \tword1[index] = word2[index]\n\t\t\t \tp word1\n\t\t\t \tif @dictionary.exists?(word1)\n\t\t\t \t\tp \"exists\"\n\t\t\t \t\tif word1 == word2\n\t\t\t \t\t\tputs \"MATCH\"\n\t\t\t \t\t\tbreak;\n\t\t\t \t\tend\n\t\t\t\telse\n\t\t\t\t\tp \"does NOT exist\"\n\t\t\t\t\tword1[index] = letter_copy\n\t\t\t \tend\n\t\t\t \tp index\n\t\t\t if index >= word1_array.length-1\n\t\t\t \tindex = 0\n\t\t\t else\n\t\t\t \tindex += 1\n\t\t\t end\n\n\t\t\t }\n end", "def shortest(word1, word2)\n \n end", "def xor(first, second)\n first.bytes.zip(second.bytes).map{ |(a,b)| (a ^ b).chr }.join('')\n end", "def complementWord()\n\n\tcolor = $rng.rand(0..5)\n\tword = $rng.rand(6..11)\n\n\tprintJumble(color,word)\n\n\tentry = gets.chomp\n\n\tif (entry == getComplement($arr[word]))\n\t\t\treturn 1\n\telse\n\t\t\treturn 0\n\tend\n\nend", "def mutation?(string_one, string_two)\n all_there = true\n string_two.chars.map { |letter| all_there = false unless string_one.include?(letter) }\n all_there\nend", "def mix(s1, s2)\n selection = ('a'..'z').to_a.select { |letter| s1.count(letter) > 1 || s2.count(letter) > 1 }\n selection.map! do |selection| \n if s1.count(selection) > s2.count(selection)\n \"1:#{selection * s1.count(selection)}\"\n elsif s1.count(selection) < s2.count(selection)\n \"2:#{selection * s2.count(selection)}\"\n else\n \"=:#{selection * s1.count(selection)}\"\n end\n end\n selection.sort_by { |x| [-x.size, x[0], x[-1]] }.join(\"/\")\nend", "def phase_two?(str1,str2)\n str1.each_char do |chr1|\n str2.each_char do |chr2|\n p chr1\n p chr2\n if chr1 == chr2\n str1.delete!(chr1)\n str2.delete!(chr1)\n end\n p str1,str2\n end\n end\n str1.empty? && str2.empty?\nend", "def scramble(str1, str2)\n str2.chars.all? { |letter| str1.include?(letter) }\nend", "def scramble(s1,s2)\n s2.chars.all? { |c| s1.sub!(c, '') }\nend", "def third_anagram?(word1, word2)\n first = word1.chars.sort.join(\"\")\n second = word2.chars.sort.join(\"\")\n\n p first == second \nend", "def common_letter?(w1, w2)\n\tw1 = w1.downcase\n\tw2 = w2.downcase\n\t# Iterating through all the characters of w1, until finding a character\n\t# that w2 includes\n\tw1.each_char do |char1|\n\t\treturn true if w2.include?(char1)\n\tend\n false\nend", "def borrow(speech)\n answer = []\n speech.split(' ').each do |word|\n word.split('').each do |char|\n ascii = char.ord\n if (ascii >= 97 && ascii <= 122) || (ascii >= 65 && ascii <= 90)\n answer << char.downcase\n end\n end\n end\n answer.join\nend", "def third_anagram?(word1, word2)\n word1_chars = word1.split(\"\").sort\n word2_chars = word2.split(\"\").sort\n word1_chars == word2_chars\n\nend", "def scramble(s1, s2)\n # p s1.chars.include?(s2)\n p s2.chars.uniq.all?{ |letter| s2.count(letter) <= s1.count(letter) }\nend", "def third_anagram?(word_1, word_2)\n word_1.chars.sort == word_2.chars.sort\nend", "def scramble(str1, str2)\n str1_chars = str1.chars\n str2_chars = str2.chars\n\n str2_chars.all? do |char|\n str1_chars.count(char) >= str2_chars.count(char)\n end\nend", "def third_anagram?(word1, word2)\n split1= word1.chars.sort\n split2 = word2.chars.sort\n split1 == split2\nend", "def spell_out strings\n strings.downcase.split(\"\").join(\"-\")\nend", "def wordplay(string1, string2)\n str1_hash = {}\n str2_hash = {}\n position_hash = {}\n\n # save string1 into an hash\n string1.split('').each do |ch|\n if str1_hash[ch]\n str1_hash[ch] += 1\n else\n str1_hash[ch] = 1\n end\n end\n\n # save string2 into an hash\n string2.split('').each do |ch|\n if str2_hash[ch]\n str2_hash[ch] += 1\n else\n str2_hash[ch] = 1\n end\n end\n\n # check if each character of string2 can be found in string1\n # and if there are enough characters to make string2\n str2_hash.each do |key, value|\n if !str1_hash.key?(key)\n return false\n elsif str1_hash[key] < value\n return false\n end\n end\n\n # save the position of the characters found in string1\n # i = 0\n # while i < string1.length\n # position_hash[string1[i]] = i unless position_hash[string1[i]]\n # i += 1\n # end\n i = 0\n while i < string1.length\n if !position_hash.key?(string1[i])\n position_hash[string1[i]] = i\n end\n i += 1\n end\n\n # save the indices needed to create string2\n i = 0\n str2_arr = []\n while i < string2.length\n str2_arr << position_hash[string2[i]]\n i += 1\n end\n\n str2_arr\nend", "def concat_without_endcaps(string1, string2)\n return nil if !string1 and !string2\n return string1 if string2.nil?\n return string2 if string1.nil?\n s1 = string1[-1] == ?$ ? string1[0..-2] : string1\n s2 = string2[0] == ?^ ? string2[1..-1] : string2\n s1 + s2\n end", "def better_ana?(word1, word2)\n word1.each_char do |char|\n j = word2.index(char)\n \n return false if j == nil\n word2.delete!(word2[j])\n end\n return word2.empty?\nend", "def two_strings(s1, s2)\n dict = {}\n answer = \"NO\"\n\n s1.split('').each do |char|\n dict[char] = 1\n end\n\n s2.split('').each do |char|\n if dict[char]\n answer = \"YES\"\n break\n end\n end\n\n return answer\nend", "def second_anagram(word1, word2)\n return false unless word1.length == word2.length\n word1_arr = word1.split(\"\")\n characters = word2.chars\n\n word1_arr.each do |letter1|\n idx = characters.find_index(letter1)\n characters.delete_at(idx) unless idx.nil?\n\n end\n characters.empty?\nend", "def twoStrings(s1, s2)\n require 'set'\n set1 = Set.new(s1.chars)\n set2 = Set.new(s2.chars)\n if set1.intersect? set2\n return \"YES\"\n else\n return \"NO\"\n end\nend", "def phase_iii(s1, s2)\n s1.chars.sort! == s2.chars.sort!\n\nend", "def scramble(s1,s2)\n s2.chars.each { |letter| return false unless s1.slice!(letter) }\n true\nend", "def third_anagram?(first_word, second_word)\n first_word = first_word.chars.sort\n second_word = second_word.chars.sort\n\n first_word == second_word\nend", "def love_test(str_1, str_2)\n str_1 = str_1.gsub(\" \", \"\").split(\"\")\n str_2 = str_2.gsub(\" \", \"\").split(\"\")\n p \"Total Chars in Common: #{((str_1 + str_2).count - 1) / (str_1 & str_2).count}\"\nend", "def not_common(a,b)\n not_common = []\n a_chars = a.split(\"\")\n b_chars = b.split(\"\")\n\n #if either string is empty, it should return the other string that has a value\n if (a_chars.length < 0) || (b_chars.length < 0)\n return not_common\n end\n\n # puts \"a #{a_chars}\"\n # puts \"b #{b_chars}\"\n\n #loop through a chars\n a_chars.each do |char|\n # puts \"a char #{char}\"\n #check if char from a_char included in b_char\n if !b_chars.include?(char)\n not_common.push(char)\n end\n end\n\n #loop through a chars\n b_chars.each do |char|\n # puts \"b char #{char}\"\n #check if char from b_char included in a_char\n if !a_chars.include?(char)\n not_common.push(char)\n end\n end\n\n not_common = not_common.join\n return not_common\nend", "def char_check(word_1, word_2, template_length)\n matches = [] # array for containing matching characters from word_2 ('char')\n $iterator = 0\n # Character loop\n word_1.each do |char_1|\n sample = char_1\n word_2.each do |char_2|\n if (sample == char_2)\n matches.push(char_2)\n word_2.delete_at($iterator)\n $iterator += 1\n else\n $iterator += 1\n end\n end\n end\n # Check that the number of matches implies an anagram\n matches.length == template_length ? true : false\nend", "def join_strings(word_1, word_2)\n join_strings=\"#{word_1} #{word_2}\"\nend", "def spell_out (foo)\n foo.downcase.split(\"\").join(\"-\")\n end", "def third_anagram?(first_word, second_word)\n first_word.chars.sort == second_word.chars.sort\nend", "def merge_word_strings str1, str2\n return str2||\"\" if str1.blank?\n return str1 if str2.blank?\n (str1.split(/\\s+/) + str2.split(/\\s+/)).uniq.join(' ')\nend", "def scramble(str1, str2)\n str2.chars.all? do |char|\n str1.count(char) >= str2.count(char)\n end\nend", "def first_anagram?(word1, word2)\n permutations = word1.split('').permutation.to_a.map(&:join)\n permutations.include?(word2)\nend", "def mutation?(base_word, mutation)\n\n word1_array = base_word.downcase.split(\"\")\n word2_array = mutation.downcase.split(\"\")\n\n word2_array.each do |letter|\n if word1_array.include?(letter)\n @result = true\n else\n @result = false\n end \n end\n @result\nend", "def single_word(word)\n if word[0] =~ /[aeiou]/i\n word + \"way\"\n else\n # nueva condicionante para palabras de puras consonantes sin vocales. ej: by\n if word.split(/([aeiou].*)/).length > 1\n # Parte palabra en 2 y lo mete en un array. 1er elemento es de consonat(es) iniciales, 2o la parte restante de la palabra a partir de e incluyendo la primera vocal\n word.split(/([aeiou].*)/)[1] + word.split(/([aeiou].*)/)[0] + \"ay\"\n else\n word.split(/([aeiou].*)/)[0]\n end\n end\nend", "def firstColorSecondWord()\n\n\tcolor1 = $rng.rand(0..5)\n\tword1 = $rng.rand(6..11)\n\n\tcolor2 = $rng.rand(0..5)\n\tword2 = $rng.rand(6..11)\n\n\tprintJumble(color1,word1)\n\tprintJumble(color2,word2)\n\n\tlineArr = gets.chomp.split\n\n\tif(lineArr[0] == $arr[color1+6] && lineArr[1] == $arr[word2])\n\t\t\treturn 1\n\telse\n\t\t\treturn 0\n\tend\n\nend", "def allwords?\n vowels = 'aeiouy'\n words1 = @input1.downcase.gsub(/[!@#$%^&*()-=_+|;':\",.<>?']/, '').split(\" \")\n words2 = @input2.downcase.gsub(/[!@#$%^&*()-=_+|;':\",.<>?']/, '').split(\" \")\n if\n words1.all? {|str| str.count(vowels) >= 1} && words2.all? {|str| str.count(vowels) >= 1}\n return true\n else\n return \"You need to input actual words!\"\n end\n end", "def or(argument1, argument2)\n argument1 || argument2\n end", "def either(left, right)\n \"#{quotify left} OR #{quotify right}\"\n end", "def mutation?(str_one, str_two)\n str_two.chars.all? {|char| str_one.include? char}\nend", "def is_same_word(a,b)\n a = strip_cs_chars(a).downcase\n b = strip_cs_chars(b).downcase\n a == b\n end", "def fourth_anagram2?(word1, word2)\n hash = Hash.new { |h, k| h[k] = 0 }\n word1.chars.each do |char|\n hash[char] += 1\n end\n word2.chars.each do |char|\n hash[char] -= 1\n end\n hash.keys.all? { |v| v == 0 }\nend", "def compare_words(first_word, second_word)\n\tif first_word.length != second_word.length\n\t\treturn -1\n\tend\n\n\tcount = 0\n\t\t\n\t(0..first_word.length - 1).each do |i|\n\tif first_word[i] != second_word[i]\n\t\tcount = count + 1\n\tend\n\tend\n\n\treturn count\nend", "def third_anagram?(word1, word2)\n a, b = word1.split(''), word2.split('')\n\n a.sort == b.sort\nend", "def bitSetOr (a,b)\n\t\tresult = Array.new\n\t\tfor i in 0...(a.size)\n\t\t\tresult[i] = a[i] || b[i]\n\t\tend\n\t\treturn result\n\tend", "def common_chars(array)\n chars = array.shift.chars\n\n chars.select do |char|\n array.all? { |word| word.sub!(char, '') }\n end\nend", "def short_long_short(txt1, txt2)\n txt1.length < txt2.length ? (shrt_txt, lng_txt = txt1, txt2) : (shrt_txt, lng_txt = txt2, txt1) \n shrt_txt + lng_txt + shrt_txt\nend", "def scramble(st1, st2)\n st1_arr = st1.chars\n st2_arr = st2.chars\n st2_arr.all? { |char| st1_arr.count(char) >= st2.count(char) }\nend", "def second_anagram?(str_1, str_2)\n return false if str_1.length != str_2.length \n word_1 = str_1.chars\n word_2 = str_2.chars\n word_1.each_with_index do |char, idx| #\n idx_2 = word_2.find_index(char) \n if !idx_2.nil? #\n word_2.delete_at(idx_2) #m\n end\n end\n word_2.empty? #m\nend", "def second_anagram?(word1, word2)\n a, b = word1.split(''), word2.split('')\n \n a.each do |char|\n i = b.find_index(char)\n return false if !i\n b.delete_at(i)\n end\n \n b.length == 0\n \nend", "def scramble(s1,s2)\n pile_of_letters = s1.chars\n target_letters = s2.chars\n target_letters.uniq.all? { |letter| pile_of_letters.count(letter) >= target_letters.count(letter) }\nend", "def xor(argument1, argument2)\n argument1 && !argument2 || !argument1 && argument2\n end", "def |( other )\n\t\treturn Regexp.new( \"(?:%s|%s)\" % [self.to_s, other.to_s] )\n\tend", "def second_anagram?(word1, word2)\n split1, split2 = word1.chars, word2.chars\n\n idx = 0\n while idx < split1.length\n split2.each_with_index do |el2, idx2|\n if el2 == split1[idx]\n split1.delete_at(idx)\n split2.delete_at(idx2)\n idx = -1\n break\n end\n end\n idx += 1\n end\n return false unless split1.empty? && split2.empty?\n true\nend", "def commonCharacterCount(s1, s2) s1.each_char.count { |x| s2.include?(x) && s2[x] = \"\" } end", "def common_chars(array)\n array = array.map { |word| word.dup }\n chars = array.shift.chars\n\n chars.select do |char|\n array.all? { |word| word.sub!(char, '') }\n end\nend", "def is_permutation_two?(string_one, string_two)\n array = Array.new(26, 0)\n\n string_one.split('').each do |val|\n array[val.ord - 97] += 1\n end\n\n string_two.split('').each do |val|\n array[val.ord - 97] -= 1\n end\n\n array.all? { |x| x == 0 }\nend", "def get_word(word_needed)\n print \"#{word_needed} \"\n gets.chomp\nend", "def char_concat(word)\n (1..word.length/2).map {|i| word[i - 1] + word[-i] + i.to_s}.join\nend", "def addEncoded(char1, char2)\n char1 ^ char2\n end", "def similar(string1, string2)\n string1 = string1.downcase\n string2 = string2.downcase\n\nend", "def work_on_strings(string1, string2)\n [swap_letters(string1, string2), swap_letters(string2, string1)].join\nend", "def second_anagram?(word1, word2)\n second_word = word2.split(\"\")\n word1.each_char do |char|\n second_word.delete_at(second_word.find_index(char)) if second_word.include?(char)\n end\n second_word == []\n\nend", "def shiftr(word)\n word[-1] << word[0..-2]\nend", "def firstWordSecondColor()\n\n\tcolor1 = $rng.rand(0..5)\n\tword1 = $rng.rand(6..11)\n\n\tcolor2 = $rng.rand(0..5)\n\tword2 = $rng.rand(6..11)\n\n\tprintJumble(color1,word1)\n\tprintJumble(color2,word2)\n\n\tlineArr = gets.chomp.split\n\n\tif (lineArr[0] == $arr[word1] && lineArr[1] == $arr[color2+6])\n\t\t\treturn 1\n\telse\n\t\t\treturn 0\n\tend\n\nend", "def BitwiseOne(strArr)\n first = strArr[0]\n second = strArr[1]\n third = \"\"\n \n i = 0 \n while i < first.length\n # When i change this line to first[i] == \"0\" &&\n # second[i] == \"0\"\n # all tests passes. Why????????????????????????\n if first[i] == second[i]\n \t third << \"0\"\n else\n \t third << \"1\"\n end\n \n i += 1\n end\n third \nend", "def commonCharacterCount(s1, s2)\n a1 = s1.split(\"\").uniq\n a2 = s2.split(\"\").uniq\n \n b = a1 - a2\n c = a2 - a1\n \n check_a = a1 - b - c\n \n count = 0\n \n check_a.each do |char|\n count_1 = s1.split(\"\").count(\"#{char}\")\n count_2 = s2.split(\"\").count(\"#{char}\")\n \n if count_1 < count_2\n count += count_1\n else\n count += count_2\n end\n end\n \n count\nend", "def spell_out str\n str.downcase.split('').join('-')\nend" ]
[ "0.6312455", "0.6115193", "0.60888296", "0.6020706", "0.5973992", "0.594146", "0.5910836", "0.5907535", "0.59036034", "0.5901428", "0.5901428", "0.589075", "0.5874602", "0.5841897", "0.57952756", "0.5777708", "0.57760334", "0.574149", "0.57329607", "0.5681136", "0.5671368", "0.5630066", "0.56296647", "0.5623941", "0.5618755", "0.5602441", "0.5595365", "0.5585262", "0.5581635", "0.55742216", "0.5570874", "0.5556629", "0.5556195", "0.554872", "0.55474156", "0.55229616", "0.5517938", "0.551203", "0.5511265", "0.5506713", "0.5506321", "0.5502619", "0.5486917", "0.5483828", "0.5479888", "0.547342", "0.5470269", "0.54272205", "0.54218143", "0.5419512", "0.5417588", "0.54145926", "0.54132193", "0.5405376", "0.5401601", "0.54004395", "0.5398729", "0.5396265", "0.5393053", "0.53875977", "0.5380627", "0.5378134", "0.53706026", "0.53663856", "0.5362105", "0.53552103", "0.5341906", "0.53410053", "0.53399557", "0.53356946", "0.53347754", "0.5331718", "0.53289664", "0.532715", "0.5326253", "0.5324499", "0.5322916", "0.5319877", "0.53166866", "0.5312535", "0.5310701", "0.53087926", "0.52900714", "0.52845055", "0.52833396", "0.52746594", "0.5268627", "0.5267646", "0.5267126", "0.52654314", "0.5260701", "0.52589536", "0.5253316", "0.52496207", "0.52482074", "0.5245646", "0.5242868", "0.52405894", "0.52339876", "0.5233459", "0.5229969" ]
0.0
-1
gets c(word_3 | word_1 word_2)
def trigram_count() @corpus.each { |sentence_arr| prev_word_1 = "" prev_word_2 = "" sentence_arr.each { |word| if(prev_word_1 != "" && prev_word_2 != "") @trifreq[prev_word_1 + " " + prev_word_2 + " " + word] += 1 elsif(prev_word_1 == "" && prev_word_2 != "") @trifreq["PHI "+prev_word_2+" "+word] += 1 elsif(prev_word_1 == "" && prev_word_2 == "") @trifreq["PHI PHI "+word] += 1 end prev_word_1 = prev_word_2 prev_word_2 = word } } end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def combinations\nputs \"Enter text\"\ntext = gets.chomp\nputs text\nwords = text.scan /\\w/\n\n\n\nputs words\nend", "def secondstrc(f)\n res = \"\"\n flag = 0\n while l = f.gets\n if flag == 2 then\n res << l\n flag = 0\n elsif flag == 1 then\n flag = 2\n elsif /^>/ =~ l then\n flag = 1\n end\n end\n return res\nend", "def secondstrc(f)\n res = \"\"\n flag = 0\n while l = f.gets\n if flag == 2 then\n res << l\n flag = 0\n elsif flag == 1 then\n flag = 2\n elsif /^>/ =~ l then\n flag = 1\n end\n end\n return res\nend", "def spell_out word6\n word6.downcase.split(\"\").join(\"-\")\nend", "def bitwise_or(a, b)\n\tresult = ''\n\ta.each_char.with_index do |val, index|\n\t\tif val == '1' || b[index] == '1'\n\t\t\tresult.concat '1'\n\t\telse\n\t\t\tresult.concat '0'\n\t\tend\n\tend\n\treturn result\nend", "def union(arg1, *arg2)\n first = arg1\n rest = []\n arg2.each do |char|\n char.each do |subchar|\n rest << subchar\n end\n end\n return first + rest\n \nend", "def stuff(input)\n\twords = input.split(\" \")\n\n\twords.each do |x| \n\t\tif x == 'the' || x == 'of' || x == 'and' || x == 'is' || x == 'a'\n\t\t\telse\n\t\t\t\t@acronym << x.to_s.chars.first.upcase + \".\"\n\t\tend\n\tend\nend", "def unite_words\n _, a_rest, a_word = %r{^(.* )?(\\w+)$}.match(a.text).to_a # a ends with word character\n _, b_word, b_rest = %r{^(\\w+\\s*)(.* )?$}.match(b.text).to_a # b starts with word character\n\n if a_word && b_word\n if a_word.length > b_word.length\n shift_backward(b_word)\n else\n shift_forward(a_word)\n end\n end\n end", "def borrow(speech)\n answer = []\n speech.split(' ').each do |word|\n word.split('').each do |char|\n ascii = char.ord\n if (ascii >= 97 && ascii <= 122) || (ascii >= 65 && ascii <= 90)\n answer << char.downcase\n end\n end\n end\n answer.join\nend", "def flip_words (first_word, second_word)\n puts second_word + first_word\nend", "def spell_out strings\n strings.downcase.split(\"\").join(\"-\")\nend", "def typoglycemiaWord(input)\n if input.length <= 3\n input\n end\n letters = input.chars\n last = letters.pop\n first = letters.shift\n letters.shuffle!\n letters << first\n letters.rotate!(-1)\n letters << last\n letters.join\nend", "def repeater(word)\n word.chars.map { |char| char * 2 }.join\nend", "def rule3(str = @word)\n newstr = str.chars\n\n reverse = false\n output = ''\n\n newstr.each do |char|\n if char == '!'\n reverse = !reverse\n else\n output << char\n end\n end\n\n output.reverse! unless !reverse\n output.to_s.squeeze(\" \")\n end", "def words\n retval = [opcode]\n\n if a_param.needs_word?\n retval << a_param.param_word\n end\n\n if b_param && b_param.needs_word?\n retval << b_param.param_word\n end\n\n return retval\n end", "def shiftr(word)\n word[-1] << word[0..-2]\nend", "def wrong_and_option(wrong, option) #function to take input of wrong word and number of options.\n $wrong_word = wrong\n $no_option = option\n end", "def spell_out (foo)\n foo.downcase.split(\"\").join(\"-\")\n end", "def edits1 word\n n = word.length\n deletion = (0...n).collect {|i| word[0...i]+word[i+1..-1] }\n transposition = (0...n-1).collect {|i| word[0...i]+word[i+1,1]+word[i,1]+word[i+2..-1] }\n alteration = []\n n.times {|i| @letters.each_byte {|l| alteration << word[0...i]+l.chr+word[i+1..-1] } }\n insertion = []\n (n+1).times {|i| @letters.each_byte {|l| insertion << word[0...i]+l.chr+word[i..-1] } }\n result = deletion + transposition + alteration + insertion\n result.empty? ? nil : result\n end", "def get_word(word_needed)\n print \"#{word_needed} \"\n gets.chomp\nend", "def complementWord()\n\n\tcolor = $rng.rand(0..5)\n\tword = $rng.rand(6..11)\n\n\tprintJumble(color,word)\n\n\tentry = gets.chomp\n\n\tif (entry == getComplement($arr[word]))\n\t\t\treturn 1\n\telse\n\t\t\treturn 0\n\tend\n\nend", "def o_words(sentence)\n select_words = sentence.split.select { |word| word.include?(\"o\") }\n return select_words\nend", "def solve_cipher(input, shift)\n words = input.split(\"\")\n \n string_container = \"\" \n words.each do | letter|\n \tif letter == \" \"\n \t\tstring_container += \" \"\n\n \telse\n \tstring_container += (letter.ord + shift).chr\n\tend\n end\n\tstring_container\n\t#your code goes here\nend", "def oxford_comma(words)\n if words.length == 1\n words.join\n elsif words.length == 2\n return \"#{words[0]} and #{words[1]}\"\n else words.length > 2\n words[-1].insert(0 , \"and \")\n end\n words.join(\", \") \n end", "def single_word(word)\n if word[0] =~ /[aeiou]/i\n word + \"way\"\n else\n # nueva condicionante para palabras de puras consonantes sin vocales. ej: by\n if word.split(/([aeiou].*)/).length > 1\n # Parte palabra en 2 y lo mete en un array. 1er elemento es de consonat(es) iniciales, 2o la parte restante de la palabra a partir de e incluyendo la primera vocal\n word.split(/([aeiou].*)/)[1] + word.split(/([aeiou].*)/)[0] + \"ay\"\n else\n word.split(/([aeiou].*)/)[0]\n end\n end\nend", "def word_cap3(str)\n str.split.map { |word| word.sub(word[0], word[0].upcase) }.join(\" \")\nend", "def third_anagram?(word_1, word_2)\n word_1.chars.sort == word_2.chars.sort\nend", "def process_word word\n const_match = /^[BCDFGHIJKLMNPQRSTVWXYZbcdfhhtjklmnpqrstvwxyz]+/.match(word)\n letter_match = /^[a-zA-Z]+/.match(word) #Match letters only\n other_chars = word[letter_match.to_s.length..-1]\n if const_match.to_s.length > 0\n return const_trans(letter_match.to_s, const_match) + other_chars\n else\n return vowel_trans(letter_match.to_s) + other_chars \n end\n \n end", "def gets(sep=$/) end", "def gets(sep=$/) end", "def separate word7, separator=\"-\"\n word7.split(\"\").join(separator)\nend", "def third_anagram?(word1, word2)\n word1_chars = word1.split(\"\").sort\n word2_chars = word2.split(\"\").sort\n word1_chars == word2_chars\n\nend", "def o_words(sentence)\n words = sentence.split(' ')\n return words.select { | val | val.include?('o') }\nend", "def translateForOneWord(msg)\n aryChar = msg.split(\"\")\n if aryChar[0].match(/a|e|i|o|u/)\n aryChar << \"ay\"\n elsif (not aryChar[0].match(/a|e|i|o|u/)) && aryChar[1] == \"q\" && aryChar[2] == \"u\" # consonant + q and u\n aryChar << aryChar[0]\n aryChar << aryChar[1]\n aryChar << aryChar[2]\n aryChar.shift #this will remove the first character\n aryChar.shift #this will remove the first character\n aryChar.shift #this will remove the first character\n aryChar << \"ay\"\n elsif aryChar[0] == \"q\" && aryChar[1] == \"u\" # q and u\n aryChar << aryChar[0]\n aryChar << aryChar[1]\n aryChar.shift #this will remove the first character\n aryChar.shift #this will remove the first character\n aryChar << \"ay\"\n elsif (not aryChar[0].match(/a|e|i|o|u/)) && (not aryChar[1].match(/a|e|i|o|u/)) && (not aryChar[2].match(/a|e|i|o|u/)) # if 3 consonants\n aryChar << aryChar[0]\n aryChar << aryChar[1]\n aryChar << aryChar[2]\n aryChar.shift #this will remove the first character\n aryChar.shift #this will remove the first character\n aryChar.shift #this will remove the first character\n aryChar << \"ay\"\n elsif (not aryChar[0].match(/a|e|i|o|u/)) && (not aryChar[1].match(/a|e|i|o|u/)) #if it starts with 2 consonants\n aryChar << aryChar[0]\n aryChar << aryChar[1]\n aryChar.shift #this will remove the first character\n aryChar.shift #this will remove the first character\n aryChar << \"ay\"\n\n elsif not aryChar[0].match(/a|e|i|o|u/)\n aryChar << aryChar[0]\n aryChar.shift #this will remove the first character\n aryChar << \"ay\"\n end\n aryChar.join(\"\")\nend", "def match?(wordA, wordB)\n wordA == wordB.each_char.sort.join('') \nend", "def spell_out str\n str.downcase.split('').join('-')\nend", "def reverberate(sent) \n vowels = 'aeiou'\n words = sent.split\n new_words = []\n\n\n words.each do |word|\n if word.length < 3 \n new_words << word \n elsif vowels.include?(word[-1].downcase)\n new_words << word + word.downcase\n else\n i = word.length - 1\n while i >= 0 \n if vowels.include?(word[i]) \n new_words << word + word[i .. -1]\n break\n end \n\n i -= 1\n end \n end \n end \n\n new_words.join(' ')\n\nend", "def swap (str)\n words = str.split\n new_words = []\n\n words.each do |word|\n if word.length > 3\n new_words << word[-1] + word[1..-1] + word[0]\n else \n \n end \n\n new_words.join(' ')\nend \n\n\np swap('Oh what a wonderful day it is') #== 'hO thaw a londerfuw yad ti si'\np swap('Abcde') #== 'ebcdA'\np swap('a') #== 'a'", "def caesar_cipher(str, shift)\n word = str.split(\" \")\n ans = \"\"\n word.each do |var|\n if var == word.last\n ans << caeser_helper(var, shift)\n else\n ans << caeser_helper(var, shift) + \" \"\n end\n end\n \n ans\nend", "def select_words\n input = \"\"\n while input != \"1\" && input != \"2\" #&& input != \"q\"\n puts \"Press 1 for EASY (seeded random words), press 2 for HARD (really random words).\" #q to QUIT\n input = gets.chomp\n if input == \"1\"\n Word.random_words_from_seed\n elsif input == \"2\"\n Word.random_arr_of_words\n elsif input == \"q\"\n ClearPage.clear\n goodbye\n end\n end\nend", "def third_anagram?(str, word)\n str.chars.sort == word.chars.sort\n end", "def join_strings(word_1, word_2)\n join_strings=\"#{word_1} #{word_2}\"\nend", "def danish(text)\n p text.sub(/\\b(apple|cherry|blueberry)\\b/, 'danish')\nend", "def smash(words)\n sentence = \"\"\n words.each do |w|\n sentence = sentence + \"#{w} \"\n end\n sentence[0..-2]\nend", "def find_ocurrences(text, first, second)\n text = text.split(' ')\n \n word_output = []\n \n text.each_with_index do |word, index|\n next if index == 0 || index == 1\n \n word_output << word if text[index - 1] == second && text[index - 2] == first\n end\n \n word_output\nend", "def solve(s)\n s_arr = s.split(\" \")\n acro = \"\"\n s_arr.each do |word|\n if word.downcase != 'and'\n acro += word[0].upcase\n end\n end\n\n return acro\nend", "def spinWords(string)\n string.split(\" \").map { |word| word.length >= 5 ? word.reverse : word }.join(\" \")\nend", "def concatenate_words(w1, w2)\r\n raise ArgumentError.new(\"Only Strings allowed!\") if !w1.is_a? String or !w2.is_a? String\r\n w1 + ' ' + w2\r\nend", "def char_concat(word)\n (1..word.length/2).map {|i| word[i - 1] + word[-i] + i.to_s}.join\nend", "def word_combos_to(all_word_combo)\n\t\ttrue_perms = all_word_combo.flatten(1)\n\t\treturn true_perms.map {|x| x.join}\n\tend", "def alternate_words(s)\nwords = s.gsub(/[!@$%^&*()-=_+:;,.<>?\\|]/ , ' ').split\n#removing all symbols and spliting the words. Setting variable to words\nfirst = words.sort\n#sorting the words\nprint first[0..2]\n#print the first 0 to 2 indexes\n\n\nend", "def or(argument1, argument2)\n argument1 || argument2\n end", "def spell_out string\n string.downcase.split(\"\").join(\"-\")\nend", "def spell_out string\n string.downcase.split(\"\").join(\"-\")\nend", "def crazy_strings(first_word, second_word)\nputs \"#{first_word} #{second_word.gsub(\"s\",\"z\").swapcase}!\"\nend", "def spinWords(string)\n string.split.map { |word| word.length > 4 ? word.reverse : word }.join(' ')\nend", "def word_combos(word)\n\t\tword = word.chars.to_a\n\t\tall_word_combo = []\n\t\ti = 1\n\t\twhile i <= word.size\n\t\t\tall_word_combo << word.permutation(i).to_a\n\t\t\ti+=1\n\t\tend\n\t\treturn all_word_combo\n\tend", "def typoglycemiaSentence(input)\n words = input.split(' ')\n words.map! { |x| typoglycemiaWord(x) }\n words.join(\" \")\nend", "def or( *args ); { $or => args } end", "def translate_three_consonants( s )\n\n\t\t\t\t# store the letters in an array\n\t\t\t\tch_array = s.chars\n\n\t\t\t\t# then grab the first letter from the array\n\t\t\t\tfirst_letter = ch_array.shift\n\n\t\t\t\t# then grab the second letter from the array\n\t\t\t\tsecond_letter = ch_array.shift\n\n\t\t\t\t# then grab the third letter from the array\n\t\t\t\tthird_letter = ch_array.shift\n\n\t\t\t\t# put both letters at the back with push and add 'ay'\n\t\t\t\tch_array << first_letter + second_letter + third_letter + \"ay\"\n\n\t\t\t\t# bring it back together\n\t\t\t ch_array.join\n\n\tend", "def XOR(str1,str2)\n ret =\"\"\n str1.split(//).each_with_index do |c, i|\n ret[i] = (str1[i].ord ^ str2[i].ord).chr\n end\n return ret\nend", "def |(parslet); end", "def |(parslet); end", "def spell_out(str)\n str.downcase.split(\"\").join(\"-\")\nend", "def o_words(sentence)\n\treturn sentence.split.select { |word| word.include?('o') } \nend", "def double_char(str)\n # x = 0\n # word = \"\"\n # until x == str.length do\n # 2.times { word << str[x] }\n # x += 1\n # end\n # puts word\n #\n # or\n #\n word = \"\"\n for i in (0..str.length-1)\n 2.times { word << str[i]}\n end\n puts word\nend", "def processor(str, num)\n # Split arguments into arrays of individual letters\n arr = str.split(\"\")\n\n # For each arr, locate index location of first vowel after the first letter\n vow = arr.find_index { |let|\n let =~ /[aeiouy]/ if arr.index != 0\n }\n\n # Cut letters from array to create wom1 and wom2\n # Only return both if they are different (which will be whenever the\n # first vowels are different between the two words)\n\n # If first word...\n if num == 1\n # Option 1: begin from index 0 and length is vow + 1 if using word 1 vowel\n # Option 2: begin from index 0 and length is vow if using word 2 vowel\n wom1 = arr.slice(0, vow + 1)\n wom2 = arr.slice(0, vow)\n\n # If second word...\n else\n # Option 1: begin from index vow + 1 if using word 1 vowel\n # Option 2: begin from index vow if using word 2 vowel\n # To ensure length is enough to get to last letter, use arr.length\n wom1 = arr.slice(vow + 1, arr.length)\n wom2 = arr.slice(vow, arr.length)\n\n end\n\n # Return array of wom1 and wom2 joined up into strings\n res = [wom1.join(\"\"), wom2.join(\"\")]\nend", "def word_unscrambler(str, words)\n str = str.split('').sort.join('')\n possible = []\n words.map do |word|\n sort_word = word.split('').sort.join('')\n possible << word if word_c == str\n end\n return possible\nend", "def word_pattern(pattern, input)\n \nend", "def puts_first_word(words)\r\n word = words.shift()\r\n puts word\r\nend", "def get_search_term\n puts \"Enter search term\"\n search_term = gets.chomp.split(\" \").join(\"+\")\nend", "def set_args\n get_keywords.join(\"\\\\|\")\n end", "def puts_first_word(words)\n word = words.shift()\n puts word\nend", "def spell_out(str)\n str.downcase.split('').join('-')\nend", "def joinor(array, delimiter = ', ', word = 'or')\n if array.size == 2\n array.join(\" #{word} \")\n else\n array[-1] = \"#{word} #{array.last}\"\n array.join(delimiter)\n end\nend", "def combine(str1, str2)\n # Create an array which we will read the final outputs from\n output = []\n\n # Run processor for word 1 and push result to output\n output.push(processor(str1, 1))\n\n # Run processor for word 2 and push result to output\n output.push(processor(str2, 2))\n\n # Join the partial words together to form the wombinations\n wombination1 = (output[0][0] + output [1][0])\n wombination2 = (output[0][1] + output [1][1])\n\n # If the words are the same, then just return one of them...\n if wombination1 == wombination2\n puts wombination1\n\n # otherwise return both as an array\n else\n puts [wombination1, wombination2]\n\n end\nend", "def second_anagram?(word1, word2)\n w1_copy = word1.dup\n w2_copy = word2.dup\n w1_copy.each_char do |ch1|\n w2_copy.each_char do |ch2|\n if ch1 == ch2\n w1_copy[w1_copy.index(ch1)] = \"\"\n w2_copy[w2_copy.index(ch2)] = \"\"\n end\n end\n end\n w1_copy.empty? && w2_copy.empty?\nend", "def third_anagram?(word1, word2)\n split1= word1.chars.sort\n split2 = word2.chars.sort\n split1 == split2\nend", "def start_of_word(a, b)\n return a.chars.first(b).join\nend", "def ADDER(a,b,cin)\n\treturn XOR(XOR(a,b),cin) , OR(AND(a,b),AND(cin,XOR(a,b)))\nend", "def pirates_say_arrrrrrrrr(string)\n other_word = \"\"\n string.length.times do | index |\n if ((string[index].downcase.include? \"r\") && (string[index + 1] != nil))\n other_word << string[index + 1]\n end\n end\n\n return other_word\nend", "def third_anagram?(word1, word2)\n word1.split(\"\").sort == word2.split(\"\").sort\nend", "def pig_it text\n text.gsub(/(\\w)(\\w+)*/, '\\2\\1ay')\nend", "def pig_it text\n text.gsub(/(\\w)(\\w+)*/, '\\2\\1ay')\nend", "def permute_word(word)\n word2 = word.clone\n if word.length > 3\n p = crp(word.length - 2, 1)\n 1.upto(word.length - 2) {|i| word2[p[i]] = word[i]}\n end\n word2\nend", "def mutation(array)\n p first_word = array[0].downcase.chars.sort.join(\" \")\n p second_word = array[1].downcase.chars.sort.join(\" \")\n p first_word.include?(second_word) ? true: false\nend", "def translate s\n \n words = s.split.map{|word|\n if %w(a e i o u).include?(word[0])\n word << \"ay\"\n elsif %w(thr sch squ).include?(word[0..2])\n word[3..-1] << (word[0..2] << \"ay\")\n elsif %w(ch qu br th).include?(word[0..1])\n word[2..-1] << word[0..1] << \"ay\" \n else\n word[1..-1] << (word[0] << \"ay\") \n end\n }\n words.join(\" \")\n\nend", "def strs\n gets.split\n end", "def spell_out(string)\n string.downcase.split(\"\").join(\"-\")\nend", "def crunch(words)\n words.split.map! do|word|\n word.squeeze\n end.join(' ')\nend", "def puts_first_word(words)\n word = words.shift() #Error 11: Again, no such method as 'poop', using inbuilt string method shift on words.\n puts word\nend", "def pig_it(str)\n str.gsub(/(\\w)(\\w+)*/, '\\2\\1ay')\nend", "def pig_it_ms_two(str)\n pig = []\n str.split.each do |w|\n pig << w.chars.rotate.join + \"ay\" if w =~ /\\w/\n pig << w if w =~ /\\W/\n end\n pig.join(\" \")\nend", "def spinWords(string)\n string.split.map { |word| word.length >= 5 ? word.reverse : word }.join(' ')\nend", "def print_first_word(words)\n word = words.shift\n puts wor\n end", "def words(*args); data(2, *args); end", "def encrypt_this(text)\n text.split.map { |word|\n word[1], word[-1] = word[-1], word[1] if word.size > 2\n word[0] = word[0].ord.to_s\n word\n } .join(' ')\nend", "def third_anagram?(word1, word2)\n a, b = word1.split(''), word2.split('')\n\n a.sort == b.sort\nend", "def bitwise_and(a, b)\n\tresult = ''\n\ta.each_char.with_index do |val, index|\n\t\tif val == b[index]\n\t\t\tresult.concat '1'\n\t\telse\n\t\t\tresult.concat '0'\n\t\tend\n\tend\n\treturn result\nend", "def spinWords(string)\n string.split.map { |word| word.length >= 5 ? word.reverse : word }.join(\" \")\nend", "def oxfordize(parts)\n case parts.size\n when 0..1\n parts.first\n when 2\n parts.join(' and ')\n else\n \"#{parts.slice(0..-2).join(', ')}, and #{parts.slice(-1)}\"\n end\nend" ]
[ "0.6176229", "0.57886636", "0.57886636", "0.5743057", "0.5568961", "0.5372955", "0.5369312", "0.5365232", "0.5332327", "0.5262724", "0.5248385", "0.524807", "0.52473235", "0.5227573", "0.52124655", "0.521071", "0.5202199", "0.51969796", "0.5196516", "0.5187681", "0.51863533", "0.51788646", "0.5164539", "0.516023", "0.5126262", "0.5126053", "0.51257443", "0.5114203", "0.5103874", "0.5103874", "0.5103375", "0.51012725", "0.50964624", "0.5095504", "0.5080147", "0.50797004", "0.5071516", "0.5061304", "0.505863", "0.5056219", "0.50412947", "0.5040667", "0.5038032", "0.5037246", "0.50371665", "0.5035677", "0.50290406", "0.50261587", "0.5015989", "0.50145817", "0.5012717", "0.501232", "0.5011084", "0.5011084", "0.5007573", "0.5004386", "0.5001686", "0.49909553", "0.49894446", "0.49887085", "0.49861708", "0.49856305", "0.49856305", "0.4982859", "0.49818558", "0.49792135", "0.49774143", "0.49772933", "0.49760225", "0.49725437", "0.4964377", "0.49605516", "0.4958186", "0.49575713", "0.49575156", "0.49537975", "0.495156", "0.4951337", "0.49461785", "0.49460492", "0.49458036", "0.49453956", "0.4944197", "0.4944197", "0.49421093", "0.49381822", "0.49309647", "0.49303922", "0.4928245", "0.49272543", "0.49267414", "0.49257275", "0.49246952", "0.4918923", "0.49167436", "0.49157587", "0.49155703", "0.49134403", "0.4913435", "0.49120817", "0.49084172" ]
0.0
-1
return shortest path(as Array) from sp to ep. return nil if shortest way is no exit
def AStar(sp, ep, walkable) nodes = Hash.new {|hash, key| hash[key] = nil} for pos in walkable h = (pos.row - ep.row).abs + (pos.col - ep.col).abs g = @m * @n nodes[pos] = Node .new(g, h, nil) end nodes[sp].g = 0 open_list = { sp => nodes[sp] } close_list = {} #searching until open_list.empty? key, node = open_list.min {|a, b| (a[1].g + a[1].h) <=> (b[1].g + b[1].h) } open_list.delete(key) close_list[key] = node break if key == ep #current r, c = key [[0,1],[0,-1],[1,0],[-1,0]].each do |dr,dc| nr, nc, nkey = r+dr, c+dc, [r+dr, c+dc] #ingnore if (not walkable.include?(nkey)) or close_list.has_key?(nkey) next end #adjacent next_g, next_d = node.g + 1, [dr, dc] next_node = nodes[nkey] #already in openlist if open_list.has_key?(nkey) if next_g < next_node.g next_node.g = next_g next_node.d = next_d end end #not in openlist if not open_list.has_key?(nkey) next_node.g = next_g next_node.d = next_d open_list[nkey] = next_node end end end # no shortest path return nil if not close_list.include?(ep) # find shortest path pos, node = Array.new(ep) , close_list[ep] ret = [] loop do ret << Array.new(pos) break if pos == sp pos.col -= node.d.col pos.row -= node.d.row node = nodes[pos] end return ret.reverse! end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def shortest_path\n initial_position_obj = { position: start_position, source: {} }\n\n knights_path = [initial_position_obj]\n\n while knights_path.present?\n current_position = knights_path.shift\n\n position = current_position[:position]\n\n if position == end_position\n return path_to_destination(current_position, initial_position_obj)\n end\n\n add_possible_destination(position, current_position, knights_path)\n end\n end", "def shortest_path\n pa = AI::AStarAlgorithm.new($map.grid, $map.gen_coordinates)\n pa.astar\n end", "def call\n return nil unless on_the_graph? # Prevents a stack overflow in the gem\n return shortest_path[1..-1] if shortest_path.length > 1\n end", "def shortest_path_to(node)\n return nil if @previous_nodes[node].nil?\n\n nodes = [node]\n while previous_node = @previous_nodes[nodes[0]] do\n nodes.unshift(previous_node)\n end\n\n nodes\n end", "def shortest_path\n dist, previous = Hash.new(Infinity), {}\n dist[@source] = 0.0\n queue = @graph.vertex_set.dup\n\n until queue.empty?\n u = queue.min { |a,b| dist[a.name] <=> dist[b.name] }\n break if dist[u.name].infinite?\n queue.delete(u)\n\n u.each_edge do |e, v|\n alt = dist[u.name] + e.weight\n if alt < dist[v.name]\n dist[v.name] = alt\n previous[v.name] = u.name\n end\n end\n end\n\n path = []\n u = @dest\n until previous[u].nil?\n path.unshift(u)\n u = previous[u]\n end\n\n path.unshift(@source)\n end", "def shortest_paths(s)\n @source = s\n dijkstra s\n puts \"Source: #{@source}\"\n @nodes.each do |dest|\n puts \"\\nTarget: #{dest}\"\n print_path dest\n if @d[dest] != @INFINITY\n puts \"\\nDistance: #{@d[dest]}\"\n else\n puts \"\\nNO PATH\"\n end\n end\n end", "def compute_shortest_path\n update_distance_of_all_edges_to(Float::INFINITY)\n @distance_to[@source_node] = 0\n\n # The prioriy queue holds a node and its distance from the source node.\n @pq.insert(@source_node, 0)\n while @pq.any?\n node = @pq.remove_min\n node.adjacent_edges.each do |adj_edge|\n relax(adj_edge)\n end\n end\n end", "def shortest_paths(s)\n\t\t@source = s\n\t\tdijkstra s\n\t\tputs \"Source: #{@source}\"\n\t\[email protected] do |dest|\n\t\t\tputs \"\\nTarget: #{dest}\"\n\t\t\tprint_path dest\n\t\t\tif @d[dest] != @INFINITY\n\t\t\t\tputs \"\\nDistance: #{@d[dest]}\"\n\t\t\telse\n\t\t\t\tputs \"\\nNO PATH\"\n\t\t\tend\n\t\tend\n\tend", "def shortest_path_to(dest_node)\n return unless has_path_to?(dest_node)\n path = []\n while (dest_node != @node) do\n path.unshift(dest_node)\n dest_node = @edge_to[dest_node]\n end\n path.unshift(@node)\n end", "def shortest_path(start_coord, destination_coord)\n queue = Queue.new\n queue << [start_coord]\n seen = Set.new([start_coord])\n while queue\n begin\n path = queue.pop(non_block = true)\n rescue ThreadError\n return nil\n end\n x, y = path[-1]\n if [x, y] == destination_coord\n return path\n end\n for x2, y2 in [[x + 1, y], [x - 1, y], [x, y + 1], [x, y - 1]]\n if (0 <= x2 && x2 < @map[0].length) && (0 <= y2 && y2 < @map.length) && (@map[y2][x2] != @WALL && @map[y2][x2] != @PERMANENT_WALL) && !seen.include?([x2, y2])\n queue << (path + [[x2, y2]])\n seen.add([x2, y2])\n end\n end\n end\n end", "def get_path(start, stop)\n @graph.dijkstra_shortest_path(@weight_map, start, stop)\n end", "def shortest_path(start, finish)\n queue << [start, 0]\n loop do\n break if queue.empty?\n vertex, d = queue.pop\n graph[*vertex] = d\n break if vertex == finish\n enqueue_neighbours(*vertex, d + 1)\n end\n queue.clear\n !blank?(finish) ? build_path(start, finish) : []\n end", "def shortest_paths(source, dest)\n @graph_paths=[]\n @source = source\n dijkstra source\n @path=[]\n find_path dest\n actual_distance=if @distance[dest] != INFINITY\n @distance[dest]\n else\n \"no path\"\n end\n \"Shortest route and distance : #{@path.join(\"-->\")}, #{actual_distance} km\"\n end", "def shortest_path(nodes, starting, ending)\n queue = [starting]\n previous = {}\n previous[starting] = nil\n while !queue.empty?\n p queue\n last_node = queue.pop\n if last_node == ending\n path = []\n while previous[last_node]\n path.unshift(last_node)\n last_node = previous[last_node]\n end\n path.unshift(starting)\n return path\n end\n if neighbors = nodes[last_node]\n neighbors.each do |neighbor|\n unless previous.has_key?(neighbor)\n queue.unshift(neighbor) \n previous[neighbor] = last_node\n end\n end\n end\n end\nend", "def shortest_walk(arr)\n short_path=[]\n # outer loop continues until we can't eliminate any more directions\n done = false\n while !done do\n done = true\n i=0\n while (i < arr.length) \n if ((arr[i] == \"NORTH\" and arr[i+1] == \"SOUTH\") or (arr[i] == \"SOUTH\" and arr[i+1] == \"NORTH\") or (arr[i] == \"EAST\" and arr[i+1] == \"WEST\") or (arr[i] == \"WEST\" and arr[i+1] == \"EAST\"))\n # if adjacent directions are opposite, skip them both and reset done to false\n # because we may need to make another pass of the resulting array\n i+=2\n done = false\n else\n # Otherwise, store this direction in our shortest path and move to the next direction\n short_path << arr[i]\n i+=1\n end \n end\n # Set our arr in the loop to the shortest path. We'll check it again if we've\n # removed anything from the previous iteration\n arr = short_path\n # Empty the shortest path array for the next round if needed\n short_path = [] \n end\n # now arr stores the shortest path\n return arr\n end", "def shortest_path(start_node, end_node, graph)\n adjacent_edges = graph.select{ | edge | edge[NODES].include?(start_node) }\n remaining_edges = graph - adjacent_edges\n shortest_path = Path.new\n adjacent_edges.each do | edge |\n path = Path.new [edge]\n neighbor_node = (edge[NODES] - [start_node])[0] # ['A', 'B'] - ['A'] => ['B']\n unless neighbor_node == end_node\n path_ahead = shortest_path(neighbor_node, end_node, remaining_edges)\n (path_ahead.empty?)? path.clear : path.concat(path_ahead)\n end \n shortest_path = path if path.distance < shortest_path.distance\n end\n shortest_path\n end", "def shortest_paths(src, destinations)\n return [] if destinations.empty?\n\n paths = []\n visited = Set.new([src])\n queue = Containers::MinHeap.new\n queue.push([1, [src]])\n\n until queue.empty?\n _, path = queue.pop\n\n # Not going to find shorter paths than current best, return.\n break if paths.any? && paths[0].size < path.size\n\n cur = path.last\n paths << path if destinations.include?(cur)\n\n neighbors(cur).each do |pos|\n next if visited.include?(pos) || occupied?(pos)\n\n visited.add(pos)\n new_path = Array.new(path.size) { |i| path[i].dup }\n new_path << pos\n queue.push([new_path.size, new_path])\n end\n end\n\n paths\n end", "def shortest_paths(dest)\n position = dest\n final = {}\n analisados = {}\n route = []\n route << dest\n @previous['a'] = -1\n\n @nodes.each do |n|\n analisados[n] = false\n end\n analisados[position] = true\n\n while analisados(analisados)\n adyacentes(position, analisados).each do |n|\n if @distance[n] == (@distance[position] - graph[n][position])\n @previous[position] = n\n position = n\n route << n\n end\n analisados[n] = true\n end\n\n end\n route << 'a'\n route\n end", "def plan(s1, s2)\r\n if s1 == s2\r\n return []\r\n end\r\n\r\n condensed_path = Array.new\r\n full_path = Array.new\r\n temp = BFS.new(graph, find_node(s1)).shortest_path_to(find_node(s2))\r\n\r\n temp.each {|x| full_path.push(x.to_s)}\r\n condensed_path.push(full_path.first)\r\n condensed_path = condensed_path + transfer_stations(full_path)\r\n \r\n if condensed_path.last != full_path.last #need to test this more\r\n condensed_path << full_path.last\r\n end\r\n\r\n return condensed_path\r\n end", "def shortest_path_from(from, check=nil)\n dirs = [ [1,0], [0,1], [0,-1], [-1,0] ]\n \n #return [1,0]\n \n unless @cache and @cached_for == towers.keys+[check]\n\tmarked = {}\n\tmarked.default = false\n\n\tq = [Config.monsters_end_at]\n\tfirst = 0\n\t\n\tmarked[Config.monsters_end_at] = true\n\t\n\twhile first < q.size\n\t v = q[first]\n\t first += 1\n\t for i in dirs\n\t w = [v[0]+i[0], v[1]+i[1]]\n\t next if w != Config.monsters_start_at and w != Config.monsters_end_at and\n\t\t (w[0] < 0 or w[1] < 0 or w[0] >= Config.map_size[0] or w[1] >= Config.map_size[1])\n\t next if marked[w] or w == check or towers[w]\n\t marked[w] = [-i[0], -i[1] ]\n\t q << w\n\t end\n\tend\n\t\n\t@cached_for = towers.keys+[check]\n\t@cache = marked\n end\n \n return @cache[from]\n end", "def shortest_way(source, dest)\n\t\t@source = source\n dijkstra source\n \n if @distances[dest] != @infinity\n return @distances[dest]\n end\n\tend", "def shortest_paths(source, dest)\n\t\t\t@source = source\n\t\t\tdijkstra source\n\t\t\tprint_path dest\n\t\t\treturn @distance[dest]\n\t\tend", "def find_shortest_path(rolling_node)\n\n @backtrack = []\n @backtrack << @goal_node\n\n # iterate until we arrive at the start node\n while rolling_node[:prev] != nil do\n temp_node = @node_list.find { |hash| hash[:id] == rolling_node[:prev] }\n @backtrack << temp_node[:id]\n rolling_node = temp_node\n end\n\n # create a table with the 1d and the 2d array node values\n @shortest_path = []\n\n @backtrack.each do |p|\n @shortest_path << [p, @table_convert[p]]\n @shortest_path_coords << @table_convert[p][1]\n end\n end", "def shortest_paths(source)\n init(source)\n relax_edges\n PathBuilder.new(source, @visitor.parents_map).paths(@graph.vertices)\n end", "def get_path(s, e, maze)\n if maze[s[0]][s[1]] == 1 || # found a wall or visited area\n s[0] < 0 || s[1] < 0 || # top or left edge\n s[0] >= maze.length || s[1] >= maze[0].length # bottom or right edge\n\n path = nil\n\n elsif s[0] == e[0] && s[1] == e[1] # exit spot\n\n path = [s]\n\n else\n\n maze[s[0]][s[1]] = 1 # visit current node\n path =\n get_path([s[0] - 1, s[1]], e, maze) ||\n get_path([s[0], s[1] + 1], e, maze) ||\n get_path([s[0], s[1] - 1], e, maze) ||\n get_path([s[0] + 1, s[1]], e, maze)\n\n path << s unless path.nil?\n\n end\n \n return path\nend", "def endpoints\n connectors = {\n :NW => [2, 2],\n :SE => [98, 98],\n :NE => [98, 2],\n :SW => [2, 98],\n :N => [50, 0],\n :S => [50, 100],\n :E => [100, 50],\n :W => [0, 50]\n }\n shortest_path = 1000000\n generated_endpoints = []\n connectors.each do |from_key, from_connector|\n ep_from = [from.x + from_connector[0], from.y + from_connector[1]]\n connectors.each do |to_key, to_connector|\n ep_to = [to.x + to_connector[0], to.y + to_connector[1]]\n path = Math.sqrt(((ep_from[0] - ep_to[0]).abs ** 2) + ((ep_from[1] - ep_to[1]).abs ** 2)).to_i\n if path < shortest_path + 25\n shortest_path = path\n generated_endpoints = [ep_from, ep_to]\n end\n end\n end\n return generated_endpoints\n end", "def shortest_path(from_x, from_y, to_x, to_y)\n @visited = Array.new(@matrix.size) { Array.new(@matrix.first.size) { false } }\n @farthest_node = nil\n queue = Queue.new\n queue << Node.new(from_x, from_y, 0)\n\n while !queue.empty? do\n node = queue.pop\n\n if !@farthest_node || node.dist > @farthest_node.dist\n @farthest_node =node\n end\n\n if node.x == to_x && node.y == to_y\n # We pathed to the target\n target_node = node\n break\n end\n [[-1,0],[1,0],[0,1],[0,-1]].each do |dir|\n x = node.x + dir[0]\n y = node.y + dir[1]\n if is_valid?(x, y)\n @visited[y][x] = true\n queue.push(Node.new(x, y, node.dist + 1, node))\n end\n end\n end\n\n # We didn't find a path to the target\n return nil unless target_node\n\n # Trace back the journey\n journey = []\n journey.push [node.x,node.y]\n while !node.parent.nil? do\n node = node.parent\n journey.push [node.x,node.y]\n end\n journey.reverse.drop(1)\n end", "def return_shortest_path(from)\r\n\r\n queue = Queue.new\r\n queue << from\r\n from.distance = 0\r\n while(!queue.empty?)\r\n v= queue.pop\r\n count=0\r\n adjDir = find_adjacent_rooms(v.roomObject)\r\n while(count < adjDir.length)\r\n w = @vertices[v.roomObject.return_title(adjDir[count])]\r\n\r\n if(w.distance==Float::INFINITY)\r\n w.distance = v.distance + 1\r\n w.path = v.path + \" \" + adjDir[count].to_s()\r\n queue << w\r\n end\r\n count = count + 1\r\n end\r\n count=0\r\n end\r\n\r\n end", "def find_shortest_path(start_node, end_node)\n\n\t\tif (!start_node || !end_node)\n\t\t\traise \"start and end nodes must be specified\"\n\t\tend\n\n\t\tqueue = Hash[@edges.keys.map { |k| [k, nil] }]\n\t\tqueue[start_node] = 0\n\n\t\tdistances = queue.dup\n\t\tcrumbs = {}\n\n\t\twhile queue.size > 0\n\n\t\t\texpanded_node = get_min(queue)\n\n\t\t\t# Check if the current path to each neighbor of the expanded_node\n\t\t\t# is shorter than the path currently stored on the distances hash\n\t\t\t@edges[expanded_node].each do |node, edge|\n\n\t\t\t\tif distances[expanded_node]\n\t\t\t\t\n\t\t\t\t\tcurrent_path_distance = distances[expanded_node] + edge.weight\n\n\t\t\t\t\t# The distance to node is shorter via the current path or the distance to node hasn't yet been computed.\n\t\t\t\t\t# Either way, the distance from start_node->node is updated with the current distance (since it is shorter)\n\t\t\t\t\tif (!distances[node] || current_path_distance < distances[node])\n\t\t\t\t\t\tdistances[node], queue[node] = current_path_distance, current_path_distance\n\t\t\t\t\t\tcrumbs[node] = expanded_node\n\t\t\t\t\tend\n\n\t\t\t\tend\n\n\t\t\tend\n\n\t\t\tqueue.delete(expanded_node)\n\n\t\tend\n\n\t\t# List of edges representing the shortest path from start_node to end_node\n\t\tshortest_path = []\n\t\tcurrent_node = end_node\n\n\t\twhile (current_node && current_node != start_node && crumbs.size > 0)\n\t\t\tprevious_node = crumbs[current_node]\n\t\t\tif (previous_node)\n\t\t\t\tshortest_path << @edges[previous_node][current_node]\n\t\t\t\tcrumbs.delete(current_node)\n\t\t\tend\n\t\t\tcurrent_node = previous_node\n\t\tend\n\n\t\treturn shortest_path.reverse\n\n\tend", "def next_step_to_shortest_path(from_x, from_y, to_x, to_y)\n move = shortest_path(from_x, from_y, to_x, to_y)&.first\n return nil unless move\n if move[0] == from_x && move[1] == from_y + 1\n return 'S'\n elsif move[0] == from_x && move[1] == from_y - 1\n return 'N'\n elsif move[0] == from_x + 1 && move[1] == from_y\n return 'E'\n elsif move[0] == from_x - 1 && move[1] == from_y\n return 'W'\n end\n raise 'This should not happen'\n end", "def shortest_path(src, dst = nil)\n distances = {}\n previouses = {}\n self.each do |node|\n distances[node] = nil\n previouses[node] = nil\n end\n distances[src] = 0\n nodes = self.clone\n until nodes.empty?\n nearest_node = nodes.inject do |a, b|\n next b unless distances[a]\n next a unless distances[b]\n next a if distances[a] < distances[b]\n b\n end\n break unless distances[nearest_node] # Infinity\n if dst and nearest_node == dst\n return distances[dst]\n end\n neighbors = nodes.neighbors_from(nearest_node)\n neighbors.each do |node|\n alt = distances[nearest_node] + nodes.distance(nearest_node, node)\n if distances[node].nil? or alt < distances[node]\n distances[node] = alt\n previouses[node] = nearest_node\n # decrease-key v in Q # ???\n end\n end\n nodes.delete nearest_node\n end\n if dst\n return nil\n else\n return distances\n end\n end", "def shortest_path( dest, exclusions = [] )\n exclusions ||= []\n previous = shortest_paths( exclusions )\n s = []\n u = dest.hex\n while previous[ u ]\n s.unshift u\n u = previous[ u ]\n end\n s\n end", "def shortest_path(v,w)\n raise ArgumentError unless path?(v,w) \n to_edge = []\n bfs(w) { |v1,v2| to_edge[v2] = v1 }\n result = []\n x = v\n while x != w\n result << x\n x = to_edge[x]\n end\n result << x\n end", "def find_path(start, goal)\n raise \"loc1 must not be the same as loc2\" if start == goal\n\n # Using A* path-finding algorithm\n # See pseudocode here: https://en.wikipedia.org/wiki/A*_search_algorithm\n # https://www.redblobgames.com/pathfinding/a-star/introduction.html\n # NOTE that this is overkill for this problem...\n open_set = Set.new([start])\n came_from = {}\n\n # Default value of \"Infinity\", but we can just use nil\n g_score = {}\n g_score[start] = 0\n\n # f_score = g_score[node] + h_score[node]\n # This uses both current best path (g score) aka similar to Djikstra's algorithm,\n # plus the heuristic score.\n f_score = {}\n # g_score[start] is 0, so not included here\n f_score[start] = h_score(start, goal)\n\n # Note that we add d_score as the weight of the edge, but in our\n # case, we consider all edges equally, so hardcode 1\n d_score = 1\n\n until open_set.empty? do\n # Node in open set with lowest f score (would ideally use PriorityQueue)\n current = open_set.min_by { |node| f_score[node] }\n\n if current == goal\n return reconstruct_path(came_from, current)\n end\n\n open_set.delete(current)\n\n valid_neighbours(current).each do |neighbour_loc|\n tentative_g_score = g_score[current] + d_score\n if g_score[neighbour_loc].nil? || tentative_g_score < g_score[neighbour_loc]\n # This path to neighbor is better than any previous one. Record it!\n came_from[neighbour_loc] = current\n g_score[neighbour_loc] = tentative_g_score\n f_score[neighbour_loc] = g_score[neighbour_loc] + h_score(neighbour_loc, goal)\n if !open_set.include?(neighbour_loc)\n open_set << neighbour_loc\n end\n end\n end\n end\n\n raise \"error, no path found!\"\n end", "def shortest_paths( exclusions = [] )\n # Initialization\n exclusions ||= []\n source = hex\n dist = Hash.new\n previous = Hash.new\n q = []\n @game.map.each do |h|\n if not exclusions.include? h\n dist[ h ] = INFINITY\n q << h\n end\n end\n dist[ source ] = 0\n \n # Work\n while not q.empty?\n u = q.inject { |best,h| dist[ h ] < dist[ best ] ? h : best }\n q.delete u\n @game.map.hex_neighbours( u ).each do |v|\n next if exclusions.include? v\n alt = dist[ u ] + entrance_cost( v )\n if alt < dist[ v ]\n dist[ v ] = alt\n previous[ v ] = u\n end\n end\n end\n \n # Results\n previous\n end", "def shortest_path_between_nodes(initial, destination)\n initial.distance = 0\n\n current = initial\n loop do\n # at the destination node, stop calculating\n break if current == destination\n\n unvisited.delete(current)\n\n calculate_neighbor_shortest_distances(current)\n\n return nil if no_reachable_nodes\n\n current = unvisited.min_by(&:distance)\n end\n\n destination.path\n end", "def find_shortest_path(initial_node, final_node)\n\t\tunless @nodes.include?(initial_node) && @nodes.include?(final_node)\n\t\t raise(\"Either of the nodes not found in the Graph\") \n\t\tend\n\t\tdistance = {}\n\t previous = {}\n\t\tdistance[initial_node] = 0 # Distance from initial_node to initial_node\n\t previous[initial_node] = nil\n\t\tnodes_counted = @nodes\n\t\t\t\n\t\tnodes_counted.each do |n|\n\t\t if n != initial_node \n\t\t\t distance[n] = Float::INFINITY # Unknown distance function from initial_node to final_node\n\t\t\t previous[n] = nil \t # Previous node in optimal path from initial_node\n\t\t\tend\n\t\tend\n\n\t\tuntil nodes_counted.empty? \n\t\t\n\t\t\tu = distance.select{|k,v| nodes_counted.include?(k)}.min_by{|k,v| v}.first # Source node in first case\n\t\t\tbreak if (distance[u] == Float::INFINITY)\n\t\t\tnodes_counted.delete(u)\n\t\t\t\n\t\t\t@paths[u].keys.each do |v|\n\t\t\t\talt = distance[u] + @paths[u][v]\n\t\t\t\tif alt < distance[v] # A shorter path to v has been found\n\t\t\t\t\tdistance[v] = alt\n\t\t\t\t\tprevious[v] = u\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t \n\t\tpath = []\n\t\tcurrent = final_node\n\t\twhile current\n\t\t\tpath.unshift(current)\n\t\t\tcurrent = previous[current]\n\t\tend\n \n\t\treturn distance[final_node], path\n\n\tend", "def get_route(start_arr, end_arr)\n root = Position.new(start_arr[0], start_arr[1])\n target = Position.new(end_arr[0], end_arr[1])\n solution = get_target_value(target, root)\n\n route = []\n route.unshift([target.x, target.y])\n location = solution.parent\n until location == nil\n route.unshift [location.x, location.y]\n location = location.parent\n end\n return route\nend", "def computeSP\n @graph.nodes.each do |node|\n @dist_to[node]=Float::INFINITY\n end\n @dist_to[@source] = 0\n \n #node and it's distance to source (source -> source = 0)\n @queue.insert(@source, 0)\n \n #while elements exist in the queue\n while @queue.any?\n #check the lowest distance (closest)\n close = @queue.remove_min\n #check each adjacent edge\n close.adjacent_edges.each do |adjacent|\n relax(adjacent)\n end\n end\n end", "def shortest_path_to_all_nodes(initial)\n initial.distance = 0\n\n current = initial\n loop do\n unvisited.delete(current)\n\n calculate_neighbor_shortest_distances(current)\n\n return graph.vertices if no_reachable_nodes\n\n current = unvisited.min_by(&:distance)\n end\n end", "def shortest_path(root, dest)\n priority = Hash.new\n visited = Hash.new\n previous = Hash.new\n q = PriorityQueue.new\n @vertices.each do |k, up|\n if up\n priority[k] = @@infinity\n visited[k] = false\n previous[k] = nil\n end\n end\n priority[root] = 0\n q[root] = 0\n until q.empty?\n u = q.delete_min\n visited[u[0]] = true\n break if u[1] == @@infinity\n @edges_cost.each_key do |s|\n @edges_cost[s].each_key do |d|\n if edges_up[s].fetch(d) and vertices[s] and vertices[d]\n if !visited[d] and priority[s] + @edges_cost[s].fetch(d) < priority[d]\n previous[d] = s\n priority[d] = priority[s] + @edges_cost[s].fetch(d)\n q[d] = priority[d]\n end\n end\n end\n end\n end\n prior = dest\n out = \"#{prior} \"\n while previous[prior]\n out = \"#{previous[prior]} \" + out\n prior = previous[prior]\n end\n out += \"#{priority[dest]}\\n\"\n print out\n priority[dest]\n end", "def find_path(goal = @maze.find_end)\n path = [goal]\n spot = goal\n until @branching_paths[spot] == nil\n path << @branching_paths[spot]\n spot = @branching_paths[spot]\n end\n path\n end", "def search_jps\n open_list = [@nodes[@route.start_id]]\n close_list = []\n goal = @nodes[@route.goal_id]\n\n until open_list.empty?\n n = open_list.min_by { |node| @route.estimated_cost(node) }\n if n == goal\n @route.found = true\n break\n end\n\n close_list.push( open_list.delete(n) )\n\n adjacents_of_n = n.pruned_neighbors(@route.parent(n))\n adjacents_of_n.keys.each do |m|\n j = jump(n, clamp(m.x - n.x, -1, 1), clamp(m.y - n.y, -1, 1))\n next if j == nil or close_list.include?(j)\n h = @heuristic.call(j, goal)\n new_real_cost_j = @route.real_cost(n) + Math.sqrt((n.x-j.x)**2 + (n.y-j.y)**2) # g\n new_estimated_cost_j = new_real_cost_j + h # f = g + h\n if open_list.include?(j)\n # If estimated costs are equal then use real costs for more precise comparison (or we may get less optimal path).\n next if new_estimated_cost_j > @route.estimated_cost(j)\n next if new_estimated_cost_j == @route.estimated_cost(j) && new_real_cost_j >= @route.real_cost(j)\n @route.record(j, n, new_real_cost_j, h)\n else\n open_list.push(j)\n @route.record(j, n, new_real_cost_j, h)\n end\n @visited << j.id unless @visited.include? j.id # stats\n end\n @search_iter += 1 # stats\n end\n end", "def next_step_from(paths)\n paths.map do |path|\n possible_neighbours(path.most_recent_step.coordinate, @max_x, @max_y) \n .map { |c| coord_to_step_on_path(c, path) }\n .reject { |p| p == nil }\n end\n .flatten\n end", "def shortest_route visit_path, dst, condition=nil\n return route(visit_path, dst).first.cost\n end", "def get_path(double_array, path_cost, path_time)\n double_array.each do |path|\n correct_path = []\n cost = 0\n time = 0\n count = 0\n until count == path.size - 1\n correct_edge = @edge_array.find { |i| \n i.source == path[count] && i.destination == path[count+1] \n }\n cost += correct_edge.cost\n time += correct_edge.time\n count += 1\n end\n if cost == path_cost && time == path_time\n return path\n end\n end\n end", "def find_path()\n visited = Array.new(8) {Array.new(8)}\n return [] if @destination == @currentPosition\n paths = [[@currentPosition]]\n visited[@currentPosition[0]][@currentPosition[1]] = true\n\n until paths.empty?\n new_paths = []\n paths.each do |path|\n next_positions = possibleMoves(path.last, visited)\n next_positions.each do |move|\n newpath = path.dup << move\n if move == @destination #if we reached our destination stop and return the path\n return newpath\n end\n visited[move[0]][move[1]] = true\n new_paths.push(newpath)\n end\n end\n paths = new_paths\n end\n end", "def path(start,predicate)\n array = Array.new\n start.each { |elem|\n array.push(elem)\n if predicate.call(elem)\n return array\n end\n }\n nil\n end", "def solve_dijkstra\n\n unvisited_set = @unvisited_set.dup\n\n # create a queue for nodes to check\n @queue = []\n current_node = @start_node\n @queue << current_node\n\n # Stop If there are no unvisited nodes or the queue is empty\n while unvisited_set.size > 0 && @queue.size > 0 do\n\n # set the current node as visited and remove it from the unvisited set\n current_node = @queue.shift\n\n # remove visited node from the list of unvisited nodes\n unvisited_set.delete(current_node)\n\n # find the current node's neighbours and add them to the queue\n rolling_node = @node_list.find { |hash| hash[:id] == current_node }\n rolling_node[:neighs].each do |p|\n # only add them if they are unvisited and they are not in the queue\n if unvisited_set.index(p) && [email protected]?(p)\n @queue << p\n # set the previous node as the current for its neighbours\n change_node = @node_list.find { |hash| hash[:id] == p }\n change_node[:prev] = current_node\n # increase the distance of each node visited\n change_node[:dist] = rolling_node[:dist] + @step\n end\n end\n\n if current_node == @goal_node\n find_shortest_path(rolling_node)\n break\n end\n end\n return @shortest_path_coords\n end", "def select_possible_path(possible_paths)\n vertex, data = possible_paths.min_by do |vertex, data|\n data[:cost]\n end\n vertex\nend", "def get_closest_edge_vertices(lat, lon)\n #Override this function in the corresponding extension (tiger and osm initially)\n return nil\n end", "def dijkstra_shortest_path(start, finish)\n visited, unvisited = Array.new, Array.new\n distances = Hash.new\n\n distances[start] = 0\n unvisited << start\n\n # find the distance\n while not unvisited.empty?\n curr_node = unvisited.pop\n visited << curr_node\n get_edges(curr_node).each do |edge| \n if visited.find_index(edge.out_vertex) == nil\n unvisited.unshift(edge.out_vertex) if unvisited.find_index(edge.out_vertex) == nil\n curr_distance, min_distance = distances[curr_node], distances[edge.out_vertex] || 1.0 / 0.0\n if curr_distance + edge.distance < min_distance\n distances[edge.out_vertex] = curr_distance + edge.distance\n end\n end\n end\n end\n\n # figure out the path\n previous = finish\n path = Array.new() \n path << previous\n while distances[previous] != 0\n get_edges(previous).each do |edge|\n if previous != edge.in_vertex && distances[edge.in_vertex] + edge.distance == distances[previous]\n previous = edge.in_vertex\n path << previous\n break\n end\n end\n end\n \n return distances[finish], path.reverse\n end", "def shortest_path_to_user(start_coord)\n queue = Queue.new\n queue << [start_coord]\n seen = Set.new([start_coord])\n while queue\n begin\n path = queue.pop(non_block = true)\n rescue ThreadError\n return nil\n end\n x, y = path[-1]\n if @map[y][x] == @USER\n return path\n end\n for x2, y2 in [[x + 1, y], [x - 1, y], [x, y + 1], [x, y - 1]]\n if (0 <= x2 && x2 < @map[0].length) && (0 <= y2 && y2 < @map.length) && (@map[y2][x2] != @WALL && @map[y2][x2] != @PERMANENT_WALL) && !seen.include?([x2, y2])\n queue << (path + [[x2, y2]])\n seen.add([x2, y2])\n end\n end\n end\n end", "def shortest_paths(source)\n level = 0\n nextlevel = [source]\n seen = { source => level }\n pred = { source => [] }\n until nextlevel.empty?\n level += 1\n thislevel = nextlevel\n nextlevel = []\n thislevel.each do |v|\n neighbors_of(v).each do |w|\n next if (seen.keys.include? w) && (seen[w] != level)\n unless seen.keys.include? w\n pred[w] = []\n seen[w] = level\n nextlevel << w\n end\n pred[w] << v\n end\n end\n end\n [pred, seen]\n end", "def get_spaces_to_pass\n return nil unless @start_space\n return nil unless @end_space\n\n dijkstra = Dijkstra.new(SpaceConnection.all)\n path = dijkstra.shortest_path(@start_space.id, @end_space.id)\n path\n end", "def shortest_circuit()\n shortest_cir = Array.new\n shortest_weight = 99999\n\n vert = vertex_list()\n start_point = vert[0]\n vert.delete_at(0)\n\n vert_perm = vert.permutation.to_a()\n\n vert_perm.each{ |x|\n x.insert(0,start_point)\n x.insert(x.length,start_point)\n weight = path_weight(x)\n \n if weight == nil\n weight = 99999\n end\n\n if weight < shortest_weight\n shortest_weight = path_weight(x)\n shortest_cir = x\n end\n }\n return \"Shortest Circuit = \",shortest_cir.inspect, \"\\nWeight = \", shortest_weight\n\n end", "def best_path(start, target)\n queue = []\n path = []\n targetX = target[0]\n targetY = target[1] \n update_possible_moves(start)\n path << [@x, @y]\n until @x == targetX && @y == targetY\n @moves.each do |valid_move|\n queue << valid_move unless out_of_bounds?(valid_move) \n end\n #shift because we want bread-first search\n next_move = queue.shift\n update_possible_moves(next_move)\n path << [@x, @y] \n end\n # Filter out the best path and present it\n best_possible_path = filter_path(path)\n puts \"You made it in #{best_possible_path.length} moves! The path is:\\n#{best_possible_path}\"\n end", "def bfs_shortest_path(node1, node2)\n distance, route = breadth_first_search(node1)\n step = distance[node2]\n node = node2\n path = [ node2 ]\n while node != node1 and route[node]\n node = route[node]\n path.unshift(node)\n end\n return step, path\n end", "def dijkstras([email protected][0],goal=nil)\n\t\t# Set of visited nodes\n\t\tvisited = []\n\n\t\t# Step 1 \n\t\t# - Start node weighs 0\n\t\t# - Set all other to infinity its done in constructor already\n\t\t@vertices[start] = 0\n\n\t\t# Step 2 \n\t\t# - Set initial node as current\n\t\t# - Mark all nodes unvisited except start node\n\t\tunvisited = @vertices.keys - [start]\n\t\tcurrent = start\n\n\t\twhile(!unvisited.empty?)\n\t\t\t# Step 3\n\t\t\t# - Consider all neighbors of current node\n\t\t\t# - Calculate distance cost: current path weight + edge weight\n\t\t\t# - Update distance if this distance is less than recorded distance\n\t\t\t\n\t\t\t@map[current].each do |neighbor|\n\t\t\t\tnext if visited.include?(neighbor.to)\n\t\t\t\tweight = @vertices[current] + neighbor.weight.to_i\n\t\t\t\tif weight < @vertices[neighbor.to]\n\t\t\t\t\t@vertices[neighbor.to] = weight\n\t\t\t\t\t@prev[neighbor.to] = current\n\t\t\t\tend\n\t\t\tend\n\n\t\t\t# Step 4\n\t\t\t# - Add current node to visited\n\t\t\t# - Remove current node from unvisited\n\t\t\tvisited << current\n\t\t\tunvisited -= [current]\n\n\t\t\t# Find the smallest weighted node, could use a PQueue instead\n\t\t\tsmallest = @infinity\n\t\t\tcurrent = @infinity\n\t\t\tunvisited.each do |node|\n\t\t\t\tif @vertices[node] < smallest\n\t\t\t\t\tsmallest = @vertices[node]\n\t\t\t\t\t# Step 6\n\t\t\t\t\t# - Set smallest weighted node as current node, continue\n\t\t\t\t\tcurrent = node\n\t\t\t\tend\n\t\t\tend\n\n\t\t\t# Step 5\n\t\t\t# - If goal is in visited, stop\n\t\t\t# - If full traversal without a goal? \n\t\t\t# \tCheck for infinity weighted node\n\t\t\tif visited.include? goal\t\t\n\t\t\t\tpath(goal)\n\t\t\t\treturn\n\t\t\tend\n\t\t\tbreak if current == @infinity\n\t\tend\n\t\t\n\t\t# Print all shortest paths\n\t\tputs \"Initial Node: #{start}\"\n\t\tvisited.each do |x|\n\t\t\tpath(x)\n\t\t\tputs\n\t\tend\n\tend", "def dijkstra_shortest_path(source, destination, graph)\n distances, previouses = Hash.new(Float::INFINITY), Hash.new\n distances[source] = 0\n\n vertices = graph.vertices.dup\n\n while !vertices.empty?\n closest_vertex = vertices.min_by { |v| distances[v] }\n vertices.delete closest_vertex\n\n if closest_vertex == destination\n return path(previouses, destination, [])\n end\n\n closest_vertex.adjacent_edges.each do |e|\n neighbor = e.destination\n distance = e.weight + distances[closest_vertex]\n\n if distance < distances[neighbor]\n distances[neighbor] = distance\n previouses[neighbor] = closest_vertex\n end\n end\n end\n\n return [] #no path to destination\nend", "def calc_path\n endpoint = grid.target\n while endpoint\n search.path[endpoint] = true\n endpoint = search.came_from[endpoint]\n end\n end", "def min_path\n if @layers.count == 2\n return [ top ] + [ @layers[1].min ]\n end\n\n left_min_path = left_child.min_path\n left_min_path_sum = left_min_path.inject(0, :+)\n right_min_path = right_child.min_path\n right_min_path_sum = right_min_path.inject(0, :+)\n\n if left_min_path_sum < right_min_path_sum\n [ top ] + left_child.min_path \n else\n [ top ] + right_child.min_path\n end\n end", "def shortest_length(start, finish)\n infinity = (2**(0.size * 8 - 2) - 1) # max Fixnum integer value\n distances = {} # smallest distance from starting vertex to this one\n previous = {}\n cyclic = start == finish # true if starting vertex = ending vertex\n loops = 0 # useful for cyclic path\n vertex_pq = PriorityQueue.new\n\n adj_lists.each do |vertex|\n vname = vertex.name\n if vname == start\n distances[vname] = 0\n vertex_pq.enq(vname, 0)\n else\n distances[vname] = infinity\n vertex_pq.enq(vname, infinity)\n end\n previous[vname] = nil\n end\n\n while vertex_pq\n loops += 1\n # if cyclic, pretend starting vertex is unvisited. put it back in queue.\n if cyclic && loops == 2\n vertex_pq.enq(start, infinity)\n distances[start] = infinity\n end\n # vertex currently being checked. picks closest one first.\n current = vertex_pq.deq\n vname = current.keys.first\n\n # if we've arrived at final vertex, return shortest distance\n # if cyclic, skip this code during first loop\n if vname == finish && loops > 1\n shortest_path = []\n vname2 = vname\n while previous[vname2]\n shortest_path << vname2\n vname2 = previous[vname2]\n previous[start] = nil if cyclic # pretend starting vertex is unvisited\n end\n shortest_path = [start] + shortest_path.reverse\n print \"Shortest path: #{shortest_path}, Length = #{path_length(shortest_path)}\\n\"\n return distances[finish]\n end\n\n # leave if we never get to final vertex\n break if vname == nil || distances[vname] == infinity\n\n adj_vertices(vname, adj_lists).each do |vertex|\n alt_distance = distances[vname] + dist(vname, vertex)\n # if total distance to neighbor < last minimum total distance\n # to neighbor, replace it with this new distance\n if alt_distance < distances[vertex]\n distances[vertex] = alt_distance\n previous[vertex] = vname\n vertex_pq.enq(vertex, alt_distance)\n end\n end\n end\n\n end", "def knight_path(from, to)\r\n\topen_queue = [PositionPath.new( from, [copy(from)] )]\r\n\tputs open_queue.inspect\r\n\tputs open_queue.empty?\r\n\tdiscovered = [from]\r\n\r\n\tuntil open_queue.empty?\r\n\t\tcurrent = open_queue.shift\r\n\t\tputs current.inspect\r\n\r\n\t\treturn current.path if current.position == to\r\n\t\tvalid_moves(current.position).each do |move|\r\n\t\t\tputs \"ruch #{move} jest ok\"\r\n\t\t\tunless discovered.include?(move)\r\n\t\t\t\tputs \"tego ruchu jeszce nie bylo = #{move}\"\r\n\t\t\t\tdiscovered << move\r\n\t\t\t\topen_queue.push(make_position_path(current, move)) \r\n\t\t\t\tputs \"open_queue = #{open_queue.size}\"\r\n\t\t\tend\r\n\t\tend\r\n\tend\r\n\t\r\nend", "def build_path(start, finish)\n path = [finish]\n loop do\n vertex = path.last\n d = graph[*vertex]\n neighbours = get_neighbours(*vertex)\n next_vertex = neighbours.select{|n_vert| graph[*n_vert] == d - 1}.first\n path << next_vertex if next_vertex\n break if vertex == start\n end\n path\n end", "def find_path(start_node, end_node, grid)\n start_node = sanitize(start_node)\n end_node = sanitize(end_node)\n if grid.nil?\n [start_node]\n else\n _max_x = grid.max_x\n _max_y = grid.max_y\n\n @current_grid = grid.inner_grid.clone\n\n raise 'max_x & max_y required' unless _max_x && _max_y\n\n _start_node = start_node.clone\n _end_node = end_node.clone\n\n heuristic = @heuristic.new(_end_node, @weight)\n\n _start_node[:f] = 0 # sum of g and h\n _start_node[:g] = 0 # steps to start node\n _start_node[:h] = nil # steps to end node\n _start_node[:opened] = true\n\n # use heap or tree for better perf\n open = []\n open.push _start_node\n\n while !open.empty? do\n _current_node = open.pop\n\n _current_node[:closed] = true\n @current_grid[node_to_a(_current_node)] = _current_node\n\n if node_to_a(_current_node) == node_to_a(_end_node)\n return final_path(_current_node)\n end\n\n new_g = _current_node[:g] + 1\n\n x = _current_node[:x]\n y = _current_node[:y]\n\n neighbors = []\n\n neighbors << [x-1, y] if x > 0\n neighbors << [x, y-1] if y > 0\n neighbors << [x+1, y] if x < _max_x-1\n neighbors << [x, y+1] if y < _max_y-1\n\n _neighbors = neighbors.map do |position|\n node = @current_grid[position]\n if node.nil? || node[:walkable]\n node ||= {}\n @current_grid[position] = node.merge({\n x: position.first,\n y: position[1],\n closed: false,\n opened: false\n })\n end\n end.compact\n\n _neighbors.each do |neighbor|\n if (!neighbor[:opened] || new_g < neighbor[:g])\n neighbor[:g] = new_g\n neighbor[:h] ||= heuristic.h(neighbor)\n neighbor[:f] = neighbor[:g] + neighbor[:h]\n neighbor[:parent] = node_to_a(_current_node)\n\n if (!neighbor[:opened])\n open.push neighbor\n neighbor[:opened] = true\n else\n # ???\n puts \"got here some how!!!\"\n end\n end\n end\n\n open.sort_by! {|i| [-i[:f], -i[:h]]}\n # grid_p\n end\n end\n end", "def find_shortest_path(exclude_path_with_tile = nil)\n node = find_end_tile(exclude_path_with_tile)\n return [@start_tile, @end_tile] if node.nil?\n expand_path(node)\n end", "def as_solution()\n idx = @tour.index(0)\n path = @tour[idx, @tour.length] + @tour[0, idx]\n path.push(0)\n end", "def path_to(node)\n return unless has_path_to?(node)\n path = []\n current_node = node\n\n while(current_node != @source_node) do\n path.unshift(current_node)\n current_node = @edge_to[current_node]\n end\n\n path.unshift(@source_node)\n end", "def path_to(node)\n return unless has_path_to?(node)\n path = []\n current_node = node\n\n while(current_node != @source_node) do\n path.unshift(current_node)\n current_node = @edge_to[current_node]\n end\n\n path.unshift(@source_node)\n end", "def dijkstras([email protected][0],goal=nil)\n\t\t# Set of visited nodes\n\t\tvisited = []\n\n\t\t# Step 1 \n\t\t# - Start node weighs 0\n\t\t# - Set all other to infinity its done in constructor already\n\t\t@vertices[start] = 0\n\n\t\t# Step 2 \n\t\t# - Set initial node as current\n\t\t# - Mark all nodes unvisited except start node\n\t\tunvisited = @vertices.keys - [start]\n\t\tcurrent = start\n\n\t\twhile(!unvisited.empty?)\n\t\t\t# Step 3\n\t\t\t# - Consider all neighbors of current node\n\t\t\t# - Calculate distance cost: current path weight + edge weight\n\t\t\t# - Update distance if this distance is less than recorded distance\n\t\t\t\n\t\t\t@map[current].each do |neighbor|\n\t\t\t\tnext if visited.include?(neighbor.to)\n\t\t\t\tweight = @vertices[current] + neighbor.weight.to_i\n\t\t\t\tif weight < @vertices[neighbor.to]\n\t\t\t\t\t@vertices[neighbor.to] = weight\n\t\t\t\t\t@prev[neighbor.to] = current\n\t\t\t\tend\n\t\t\tend\n\n\t\t\t# Step 4\n\t\t\t# - Add current node to visited\n\t\t\t# - Remove current node from unvisited\n\t\t\tvisited << current\n\t\t\tunvisited -= [current]\n\n\t\t\t# Find the smallest weighted node\n\t\t\tcurrent = unvisited.collect { |node| [@vertices[node],node] }\n\t\t\tcurrent.empty? ? current = @infinity : current = current.min[1]\n\n\t\t\t# Step 5\n\t\t\t# - If goal is in visited, stop\n\t\t\t# - If full traversal without a goal? \n\t\t\t# \tCheck for infinity weighted node\n\t\t\tif visited.include? goal\t\t\n\t\t\t\tpath(goal)\n\t\t\t\treturn\n\t\t\tend\n\t\t\tbreak if current == @infinity\n\t\tend\n\t\t\n\t\t# Print all shortest paths\n\t\tputs \"Initial Node: #{start}\"\n\t\tvisited.each do |x|\n\t\t\tpath(x)\n\t\t\tputs\n\t\tend\n\tend", "def path(start, goals, mode=:move_to, ignore={})\n start = start.map(&:to_i)\n goals = goals.flatten.size == 3 ? [goals.flatten] : goals\n\n if goals.reject { |goal| (start[0] - goal[0]).abs + (start[1] - goal[1]).abs + (start[2] - goal[2]).abs > MAX_PATH_SIZE }.empty?\n puts \"target too far away\"\n return nil\n elsif mode == :move_to && goals.select { |g| allowed?(*g) }.empty?\n puts \"can't go there...\"\n return nil\n # elsif mode == :next_to && goals.map { |g| available(*g, :next_to).any? { |l| allowed?(*l) } }.empty?\n # puts \"nothing to move next to anymore...\"\n # return nil\n end\n visited = {}\n next_to = {}\n examined = 0\n\n heap = Heap.new { |a, b| a.cost <=> b.cost }\n heap.add Path.new(start, goals, [])\n\n while !heap.empty?\n point = heap.next\n\n if point.path.size > MAX_PATH_SIZE\n puts \"examined #{examined} paths before giving up\"\n return nil\n end\n\n next if visited[point.point]\n visited[point.point] = point\n\n examined += 1\n\n case mode\n when :move_to\n if goals.include?(point.point)\n final_path = point.path + [point.point]\n final_path.shift # don't need the start point, we're already there\n # puts \"examined #{examined} paths\"\n return final_path\n end\n\n when :away_from\n above = point.point.dup\n above[1] += 1\n if !goals.include?(point.point) && !goals.include?(above)\n final_path = point.path + [point.point]\n return final_path\n end\n\n when :next_to\n next_to[point.point] ||= available(*point.point, :build)\n available_for_building = next_to[point.point]\n if available_for_building.any? { |a| goals.include? a }\n final_path = point.path + [point.point]\n final_path.shift # don't need the start point, we're already there\n # puts \"examined #{examined} paths\"\n return final_path\n end\n\n else\n raise \"unknown pathfinding mode: #{mode.inspect}\"\n end\n\n next_available = available(*point.point, :move, ignore).each do |test|\n next if visited[test]\n heap.add Path.new(test, goals, point.path + [point.point])\n end\n end\n nil\n end", "def a_star pitch, start, goal\n\t\t# The set of nodes already evaluated.\n\t\tclosedset = []\n\t\t# The set of tentative nodes to be evaluated.\n\t\topenset = []\n\t\t# Visited nodes\n\t\tfrontier = []\n\t\topenset << start\n\t\t# The map of navigated nodes.\n\t\tcame_from = { }\n\t\t# Distance from start along optimal path.\n\t\tg_score, h_score, f_score = { }, { }, { }\n\t\tg_score[start] = 0\n\t\th_score[start] = dist start, goal, :manhattan\n\t\t# Estimated total distance from start to goal through y.\n\t\tf_score[start] = h_score[start]\n\n\t\t# Main loop\n\t\twhile not openset.empty?\n\t\t\t# Fetching the node among openset with the least f_score\n\t\t\tx, _value = [], 1_000_000\n\t\t\topenset.each do |key|\n\t\t\t\tx, _value = key, f_score[key] if f_score[key] < _value\n\t\t\tend\n\n\t\t\tbreak if x == goal # We reached target point and thus finished looking for it !!\n\n\t\t\t# Moving x from openset to closedset\n\t\t\topenset.delete x\n\t\t\tclosedset << x\n\n\t\t\t(-1..1).each do |i|\n\t\t\t\t(-1..1).each do |j|\n\t\t\t\t\ty = [x[0] + i, x[1] + j]\n\t\t\t\t\tunless i == 0 and y == 0\n\t\t\t\t\t\tif pitch[y].nil? # We only want to explore neighbours\n\t\t\t\t\t\t\tnext if closedset.include? y # If already in closedset, we skip it\n\n\t\t\t\t\t\t\tbetter = false\n\t\t\t\t\t\t\th = dist x, y, :manhattan\n\t\t\t\t\t\t\tg = g_score[x] + h\n\n\t\t\t\t\t\t\tif not openset.include? y then\n\t\t\t\t\t\t\t\treturn [] if frontier.include? y\n\t\t\t\t\t\t\t\tfrontier << y\n\t\t\t\t\t\t\t\topenset << y # Adding current neighbours to openset\n\t\t\t\t\t\t\t\tbetter = true\n\t\t\t\t\t\t\telsif g < g_score[y]\n\t\t\t\t\t\t\t\tbetter = true\n\t\t\t\t\t\t\telse\n\t\t\t\t\t\t\t\tbetter = false\n\t\t\t\t\t\t\tend\n\n\t\t\t\t\t\t\t# Updating what needs to be\n\t\t\t\t\t\t\tif better then\n\t\t\t\t\t\t\t\tcame_from[y] = x\n\t\t\t\t\t\t\t\tg_score[y] = g\n\t\t\t\t\t\t\t\th_score[y] = dist y, goal, :manhattan # heuristic estimate of distance (y, coords)\n\t\t\t\t\t\t\t\tf_score[y] = g_score[y] + h_score[y]\n\t\t\t\t\t\t\tend\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\n\t\t# Finally assembling path and returning it\n\t\tpath = []\n\t\t_cur = goal\n\t\twhile _cur != start do\n\t\t\tpath << _cur\n\t\t\t_cur = came_from[_cur]\n\t\tend\n\n\t\treturn path.reverse\n\tend", "def find_path(source, target, map)\n @main_loop_count = 0\n\n max_y = map.size - 1\n max_x = map[0].size - 1\n target_x = target[0]\n target_y = target[1]\n # target heuristic is 0\n target = [target_x, target_y, 0]\n\n # Sets up the search to begin from the source\n source = source.dup.push((target_x - source[0]).abs + (target_y - source[1]).abs)\n came_from = {}\n came_from[source] = nil\n frontier = [source]\n\n # Until the target is found or there are no more cells to explore from\n until came_from.has_key?(target) || frontier.empty?\n @main_loop_count += 1\n\n # Take the next frontier cell\n new_frontier = frontier.shift\n\n # Find the adjacent neighbors\n adjacent_neighbors = []\n\n # Gets all the valid adjacent_neighbors into the array\n # From southern neighbor, clockwise\n nfx = new_frontier[0]\n nfy = new_frontier[1]\n adjacent_neighbors << [nfx , nfy - 1, (target_x - nfx).abs + (target_y - nfy + 1).abs] unless nfy == 0\n adjacent_neighbors << [nfx - 1, nfy - 1, (target_x - nfx + 1).abs + (target_y - nfy + 1).abs] unless nfx == 0 || nfy == 0\n adjacent_neighbors << [nfx - 1, nfy , (target_x - nfx + 1).abs + (target_y - nfy).abs] unless nfx == 0\n adjacent_neighbors << [nfx - 1, nfy + 1, (target_x - nfx + 1).abs + (target_y - nfy - 1).abs] unless nfx == 0 || nfy == max_y\n adjacent_neighbors << [nfx , nfy + 1, (target_x - nfx).abs + (target_y - nfy - 1).abs] unless nfy == max_y\n adjacent_neighbors << [nfx + 1, nfy + 1, (target_x - nfx - 1).abs + (target_y - nfy - 1).abs] unless nfx == max_x || nfy == max_y\n adjacent_neighbors << [nfx + 1, nfy , (target_x - nfx - 1).abs + (target_y - nfy).abs] unless nfx == max_x\n adjacent_neighbors << [nfx + 1, nfy - 1, (target_x - nfx - 1).abs + (target_y - nfy + 1).abs] unless nfx == max_x || nfy == 0\n\n new_neighbors = adjacent_neighbors.select do |neighbor|\n # That have not been visited and are not walls\n unless came_from.has_key?(neighbor) || map[neighbor[1]][neighbor[0]] != '.'\n # Add them to the frontier and mark them as visited\n # frontier << neighbor\n came_from[neighbor] = new_frontier\n end\n end\n\n # Sort the frontier so cells that are close to the target are then prioritized\n if new_neighbors.length > 0\n new_neighbors = merge_sort(new_neighbors)\n if frontier.length > 0 && new_neighbors[0][2] >= frontier[0][2]\n frontier = merge_sort(new_neighbors.concat(frontier))\n else\n frontier = new_neighbors.concat(frontier)\n end\n end\n end\n\n # If the search found the target\n if came_from.has_key?(target)\n # Calculates the path between the target and star for the greedy search\n # Only called when the greedy search finds the target\n path = []\n next_endpoint = came_from[target]\n while next_endpoint\n path << [next_endpoint[0], next_endpoint[1]]\n next_endpoint = came_from[next_endpoint]\n end\n path\n else\n return nil\n end\n end", "def shortest_distance_to(node)\n @distances[node]\n end", "def findPath(startPoint, endPoint)\n # Initialize node array\n # This array will host all the map points we are going to visit\n # On each visit the point will be removed and marked as visited\n q = []\n # This is tie visited array that keeps the visit index\n @visited = Array.new(@width * @height, 0)\n \n # Add first point to visited\n @visited[self.getXYIndex(startPoint)] = 1\n \n # Add current point (the start point) to the search pool\n currentPoint = XYLoc.new(startPoint.x, startPoint.y)\n q.push(currentPoint)\n\n # Search until all points are searched\n while (q.count() > 0)\n # Get first item (and remove from the pool)\n pnext = q.shift\n \n # Get neighbors/children\n succList = self.getNeighbors(pnext)\n for succ in succList\n # Check if point already visited\n if (@visited[self.getXYIndex(succ)] >= 1)\n next\n end\n # Set visited index as the current visited index + 1\n @visited[self.getXYIndex(succ)] = @visited[self.getXYIndex(pnext)] + 1\n \n # Check if the end point is found\n if (succ.x == endPoint.x && succ.y == endPoint.y)\n # Extract path\n return self.extractPath(endPoint)\n end\n \n # Point is not the goal point.\n # Push the point into the search pool.\n q.push(succ)\n end\n end\n \n # The search pool is empty and the goal point hasn't\n # been reached. Return empty path.\n return []\n end", "def decode_path(goal_node)\n path = []\n until goal_node.nil?\n path.unshift goal_node\n goal_node = goal_node.previous\n end\n return path\nend", "def grue_path(grue_room, player_room, edges)\n shortest_path = []\n\n edges_to_check = edges.select do |arr|\n arr.first == grue_room\n end\n\n unchecked_edges = edges - edges_to_check\n\n edges_to_check.each do |e|\n path = [e]\n if e[1] != player_room\n\n next_node = (e - [grue_room]).first\n\n remaining_path = grue_path(next_node, player_room, unchecked_edges)\n\n if remaining_path.empty?\n path.clear\n else\n path += remaining_path\n end\n else\n path\n end\n\n if path_distance(path) < path_distance(shortest_path)\n shortest_path = path\n end\n end\n\n shortest_path\n end", "def min_cost(cost, inf)\n # dist[i] stores minimum cost to reach station i from station 0.\n dist = {};\n\n i = 0\n n = cost.length\n while i < n do\n dist[i] = inf\n i += 1\n end\n dist[0] = 0\n\n # Go through every station and check if using it as an intermediate station gives better path\n i = 0\n while i < n do\n j = i + 1\n while j < n do\n c = dist[i] + cost[i][j]\n dist[j] = c if dist[j] > c\n j += 1\n end\n i += 1\n end\n return dist[n-1];\nend", "def path_from_src_to_dest(graph, src=0, dest=0)\n\t\t# Update source and destination\n\t\t@source, @destination = src, dest\n\n\t\t# Check if source is undefined, if so return empty path\n\t\tif @source == 0\n\t\t\treturn []\n\t\tend\n\n\t\t# Generate a connections hash based on graph edges\n\t\toutgoing = Hash.new()\n\t\tnodes = graph.nodes.keys\n\t\tresult = Array.new()\n\n\t\tgraph.nodes.keys.each {|key| outgoing[key] = Hash.new() }\n\t\tgraph.edges.values.each do |edge|\n\t\t\t# Is it possible for any two issues to have multiple links\n\t\t\t# between them?\n\t\t\toutgoing[edge.a.id][edge.b.id] = edge\t\t\n\t\tend\n\n\t\t# If an edge already exists in the graph from source to destination\n\t\tif outgoing[@source].has_key?(@destination)\n\t\t\tresult.push(outgoing[@source][@destination].id)\n\t\t\treturn result\n\t\tend\n\t\t\t\n\t\t# Compute all paths from source\n\t\tpaths_tracer, paths_distances, relationships_on_paths = compute_paths_from_source(outgoing, nodes)\n\t\t\n\t\t# Find the shortest path through the graph between source and destination\n\t\tif destination != 0\n\t\t\treturn trace_path_src_to_dest(outgoing, paths_tracer)\n\t\tend\n\n\t\t# This happens only if the destination is 0, as it would have returned otherwise.\n\t\t# Return available relationships, distances, \n\t\treturn important_relationships_from_source(paths_tracer, paths_distances, relationships_on_paths)\n\tend", "def dijkstra\n # Intialise the algorithom if first run\n init_dijkstra if empty_path?\n\n # Stop the execution if all the nides have been reached\n return path if completed_path?\n\n # Make sure that all the weights are updated\n current_node[:node].adjacent_nodes.values.each do |node|\n @pool << node.merge(\n from: current_node[:node],\n weight: node[:weight] + current_node[:weight]\n )\n end\n\n # Sort the pool of nodes/edges by weight so to be able to grab the smallest\n # weight.\n pool.sort_by! { |edge| edge[:weight] }\n\n # Pick the next untouched node by shifting the nodes in the pool starting\n # from the smallest to the greatest.\n next_node = nil\n loop do\n next_node = pool.shift\n break unless values_in_path.include?(next_node[:node].value)\n end\n\n # Push the next step (from -> to) in the path\n @path << \"#{next_node[:from].value} ==> #{next_node[:node].value}\"\n\n # Track the node as seen\n @values_in_path += [next_node[:node].value, current_node[:node].value]\n\n # Update the current node\n @current_node = next_node\n\n # Keep the execution going\n dijkstra\n end", "def build_paths(start)\n step = 0\n visited = []\n unvisited = [[board_node_by_location(start),step]]\n \n while !unvisited.empty?\n node = unvisited[0][0]\n step = unvisited[0][1] + 1\n \n node.neighbors.each do |x|\n if not_visited(board_node_by_location(x),visited, unvisited)\n unvisited << [board_node_by_location(x),step]\n end\n end\n visited << unvisited.shift\n end\n return visited\nend", "def path_to(node)\n return unless @visited.include?(node)\n path = []\n while(node != @node) do\n path.unshift(node) \n node = @edge_to[node]\n end\n path.unshift(@node)\n end", "def shortest_single_flight\n\n min_distance = @largest_integer\n flight = \"\"\n @query.get_graph.each_key do |city|\n route_dict = get_outgoing_routes(city)\n route_dict.each do |dest, dist|\n if dist < min_distance\n min_distance = dist\n flight = \"#{get_city_info(city,\"name\")}-#{dest}\"\n end\n end\n end\n\n return flight\n\n end", "def dijkstra(src, target = nil)\n frontier = PriorityQueue.new\n shortest_paths = {src => 0}\n frontier[src] = 0\n\n until frontier.empty?\n v, c = frontier.pop_min # much faster\n\n return c if target == v\n shortest_paths[v] = c\n\n v.outer_edges.each do |e|\n v2, c2 = e.to, e.cost\n next if shortest_paths[v2]\n\n frontier.insert([v2, c + c2]) # faster\n end\n end\n\n shortest_paths\nend", "def findShortestPathsFromNode(root)\n # Clear graph for search\n @nodes.each do |node|\n node.setVisited false\n end\n\n # But (of course) we start with a node, so let's say it's been visited\n\n root.setVisited true\n\n # Set up the root of the tree\n treeRoot = TElement.new(root, nil)\n # Set up parents vector (just treeRoot) and leaves (empty)\n parents = []\n parents.push treeRoot\n leaves = []\n # indicates whether to keep going\n continuing = true\n # todo: this is going to look rather different in Ruby! double-check w/ original java\n while (continuing)\n children = []\n continuing = false\n parents.each do |parent| \n parentNode = parent.node\n puts \"Doing parent: #{parentNode} \" \n parentNode.getEdges.each do |edge|\n puts \"Doing edge(s): #{edge} \" \n childNode = edge.getOtherNode parentNode\n if childNode.notVisited\n \t childNode.setVisited true \n puts \"visited: #{childNode}\"\n child = TElement.new (childNode, parent, edge)\n leaves.push child\n children.push child\n continuing = true\n end\n\t parents = children\n\tend\n end\n end\n #puts \"returning: #{leaves}\"\n return leaves \n end", "def find_any_path_between_vertices(source_vertex, destination_vertex)\n validate_integer(source_vertex, destination_vertex)\n return nil if @vertices[source_vertex].nil? || @vertices[destination_vertex].nil?\n return path_between_vertices(source_vertex, destination_vertex)\n end", "def knight_path(from, to)\n\topen_queue = [PositionPath.new( from, [copy(from)] )]\n\tdiscovered = [from]\n\n\tuntil open_queue.empty?\n\t\tcurrent = open_queue.shift\n\n\t\treturn current.path if current.position == to\n\t\tvalid_moves(current.position).each do |move|\n\t\t\tunless discovered.include?(move)\n\t\t\t\tdiscovered << move\n\t\t\t\topen_queue.push(make_position_path(current, move)) \n\t\t\tend\n\t\tend\n\tend\n\t\nend", "def all_paths_source_target(graph)\n current_path = []\n results = []\n\n dfs(graph, results, 0, current_path)\n return results\nend", "def pathfind begTile, endTile\n @traveled_tiles = [begTile]\n @current_tiles = [begTile]\n @next_tiles = Array.new\n #iterate through the maze one movement at a time, hard stop when all tiles have been exhausted\n while (!@current_tiles.include? endTile) && @traveled_tiles.length < @maze.size\n @current_tiles.each do |tile|\n (get_adjacent_floors tile).each do |next_tile|\n #makes sure no tiles are double counted, the first to hit will always be the shortest\n if (next_tile.is_floor) && (!@next_tiles.include? next_tile) && (!@traveled_tiles.include? next_tile)\n @next_tiles.push next_tile\n next_tile.previous_tile tile\n end\n end\n end\n @traveled_tiles.concat @next_tiles\n @current_tiles = @next_tiles.dup\n @next_tiles.clear\n end\n endTile.get_path\n end", "def paths\n end_verts = ends\n paths = []\n vertices.each do |v|\n end_verts.each do |e|\n x = path?(v.id, e.id)\n if x.is_a?(Array)\n x[1] << v.data\n paths << x[1]\n end\n end\n end\n end_verts.each { |e| paths << e.data }\n paths\n end", "def astar pos, phi, current_road, goal\n fringe = PriorityQueue.new\n # figure out which node of our road we're facing\n facing, other = [current_road.n0, current_road.n1].sort_by{|n|\n ((n.pos - pos).dir - phi).abs\n }\n fringe.add(AStarNode.new(facing, AStarNode.new(other, nil)))\n closed_states = Set.new [other]\n nodes_expanded = 0\n until fringe.isEmpty\n current = fringe.remove\n next if closed_states.include? current.state\n return current if current.state == goal\n\n expanded = current.expand\n \n closed_states << current.state\n # since we have a bunch of nodes that only go to a single\n # node, it seems kind of silly to count those as expansions\n nodes_expanded += 1 if expanded.size > 1\n \n if nodes_expanded > MAX_NODES_EXPANDED\n puts \"Reached max expansion depth\"\n return nil\n end\n expanded.each{|successor|\n successor.g = current.g + current.state.pos.dist(successor.state.pos)\n successor.h = successor.state.pos.dist goal.pos\n fringe.add(successor)\n }\n end\n puts \"Failed to find solution after #{nodes_expanded} expansions\"\n return nil\n end", "def unused_paths(start_node, end_node, graph)\n graph - shortest_path(start_node, end_node, graph)\n end", "def get_path(start, finish) \n @retreat_algorithm.set_graph(memory.construct_grid) \n @retreat_algorithm.run(start, finish)\n end", "def shortest_path_to(other, options = {:method => :djikstra})\n latch = options[:method] == :breadth_first ? 2 : 1\n self.class.shortest_path(latch, id, other.id)\n end", "def solution\n @solution ||= cells.first.shortest_path_to(cells.last)\n end", "def build_path(start, end_pos)\n node = Node.new(start[0], start[1])\n target = Node.new(end_pos[0], end_pos[1])\n visited_nodes = []\n next_moves = [node]\n until next_moves.empty? do\n node = next_moves.shift\n puts \"Current node: #{node.x}, #{node.y}\"\n if node.x == target.x && node.y == target.y \n return node\n end\n visited_nodes.push(node)\n node.moves = get_moves(node)\n node.moves.reject do |square|\n visited_nodes.include?(square)\n end\n node.moves.each do |move| \n next_moves.push(move)\n end\n end\n return node\nend", "def find_path(char)\n # get pixel movement rate\n pix = $BlizzABS.pixel\n # use request\n request = @request[char]\n # if no nodes to test\n if request.open.size == 0\n # abort testing for this character\n @request.delete(char)\n # resets state\n char.ai.state = (char.ai.state == Invalid ? Return : Ready)\n # stop execution\n return []\n end\n # found\n found = false\n # find minimal key\n key = request.open.keys.min {|a, b|\n Math.hypot(a[0] - request.tx, a[1] - request.ty) <=>\n Math.hypot(b[0] - request.tx, b[1] - request.ty)}\n # this node is now logged as checked\n request.closed[key[0], key[1]] = request.open[key]\n # remove this node from the pending array\n request.open.delete(key)\n # iterate through all possible directions with relative offsets\n Cache::PathDirs.each {|dir|\n # coordinates of new position\n kx, ky = key[0] + dir[0], key[1] + dir[1]\n # if new coordinates are destination\n if kx == request.tx && ky == request.ty\n # the new node was checked\n request.closed[kx, ky] = dir[2]\n # path was found\n found = true\n # stop checking\n break\n # if new node not checked yet and coordinates are passable\n elsif request.closed[kx, ky] == 0 &&\n char.passable?(key[0] * pix, key[1] * pix, dir[2])\n # add new node to be checked\n request.open[[kx, ky]] = dir[2]\n end}\n # stop execution except if found path\n return nil unless found\n # backtrack the path\n result = request.backtrack\n # finish testing for this character\n @request.delete(char)\n # resets state\n char.ai.state = (char.ai.state == Invalid ? Return : Ready)\n # return movement command array\n return result\n end", "def shortest\n '-shortest'\n end", "def find_path(start, target)\n node = build_path(start, target)\n path = [node]\n until node.next_node.nil? do\n node = node.next_node\n path.push(node)\n end\n path = path.reverse\n puts \"You made it in #{path.length} moves. Here is your path: \"\n path.each do |node|\n puts \"[#{node.x}], [#{node.y}]\"\n end\nend" ]
[ "0.69755864", "0.68290687", "0.67324525", "0.6677658", "0.6673822", "0.65540713", "0.65273005", "0.6516278", "0.650917", "0.64267683", "0.64223987", "0.6387411", "0.63870925", "0.63182175", "0.6293437", "0.6283225", "0.62775296", "0.61802834", "0.61568344", "0.61428314", "0.6131014", "0.612855", "0.608041", "0.6067917", "0.6053945", "0.60158384", "0.59584945", "0.5957835", "0.5914683", "0.5902349", "0.5889913", "0.5862854", "0.5843382", "0.5831181", "0.58284086", "0.58263576", "0.5824623", "0.5741525", "0.57210803", "0.5713959", "0.570877", "0.57085925", "0.5692507", "0.5692127", "0.5691396", "0.5683034", "0.5677604", "0.5664127", "0.56510866", "0.56470054", "0.56344587", "0.5627318", "0.5625503", "0.5614698", "0.5602651", "0.5590016", "0.5559838", "0.5536103", "0.54905975", "0.548095", "0.5474247", "0.54502785", "0.5443353", "0.54429334", "0.5440169", "0.5440022", "0.54366463", "0.5426116", "0.5423008", "0.5423008", "0.54164296", "0.5414494", "0.5401554", "0.5399728", "0.5393717", "0.53922266", "0.5381082", "0.537835", "0.5377015", "0.5373443", "0.53699946", "0.536713", "0.53617275", "0.5347982", "0.5343354", "0.5335642", "0.5334911", "0.533094", "0.53186154", "0.531538", "0.53026104", "0.5281871", "0.52683556", "0.52585983", "0.52584267", "0.5257932", "0.5238553", "0.5227983", "0.5225533", "0.52099925" ]
0.60134226
26
put a bomb explosion on position pos and regardless of wall. return explosion range as a set
def exposion_range_without_wall(pos) range = Set.new @template.each do |orig_pos| trans_pos = Array.new(orig_pos) trans_pos.row += pos.row trans_pos.col += pos.col if (0...n).cover?(trans_pos.row) and (0...m).cover?(trans_pos.col) range << trans_pos end end return range end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def explosion_range(bomb_pos)\n\t\tcandidate = exposion_range_without_wall(bomb_pos) - @wall\n\t\telect = Set.new\n\t\twalkable = Set.new(candidate)\n\t\t\n\t\tloop do\n\t\t\tbreak if candidate.empty?\n\t\t\ts, e = bomb_pos, candidate.first\n\t\t\t\n\t\t\tpath = AStar(s, e, walkable)\n\t\t\t\n\t\t\t#no shortest path\n\t\t\tif not path\n\t\t\t\tcandidate.delete(s)\n\t\t\t\tcandidate.delete(e)\n\t\t\t\telect << s\n\t\t\t\tnext\n\t\t\tend\n\t\t\t\n\t\t\t#find shortest path\n\t\t\tpath.each_index do |step|\n\t\t\t\tpos = path[step]\n\t\t\t\tcandidate.delete(pos)\n\t\t\t\telect << pos if step <= @d # bomb on bom_pos can reach pos\n\t\t\tend\n\t\tend\n\t\treturn elect\n\tend", "def bomb_explosion_range(action = :increment)\n @bomb_manager.explosion_range action\n end", "def make_particles_stay_in_bounds scale\n # TODO: Better boundary conditions (THESE ARE A LAME WORKAROUND)\n particles.each do |particle|\n if particle.position.x >= scale - 0.01\n particle.position.x = scale - (0.01 + 0.1*rand)\n particle.velocity.x = 0\n elsif particle.position.x < 0.01\n particle.position.x = 0.01 + 0.1*rand\n particle.velocity.x = 0\n end\n\n if particle.position.y >= scale - 0.01\n particle.position.y = scale - (0.01+rand*0.1)\n particle.velocity.y = 0\n elsif particle.position.y < 0.01\n particle.position.y = 0.01 + rand*0.1\n particle.velocity.y = 0\n end\n end\n end", "def map_in_range\n clean_grid\n UNITS.each do |unit|\n position = MAP[unit.y][unit.x]\n position.occupied = true\n if unit.team == 'G'\n position.goblin_in_grid = true\n position.up.goblin_in_range = true if position.up.type == '.'\n position.right.goblin_in_range = true if position.right.type == '.'\n position.down.goblin_in_range = true if position.down.type == '.'\n position.left.goblin_in_range = true if position.left.type == '.'\n else\n position.elf_in_grid = true\n position.up.elf_in_range = true if position.up.type == '.'\n position.right.elf_in_range = true if position.right.type == '.'\n position.down.elf_in_range = true if position.down.type == '.'\n position.left.elf_in_range = true if position.left.type == '.'\n end\n end\nend", "def drop_bomb(x,y)\n return nil if out_of_range?(x,y) || !spot_empty?(x,y)\n self[x,y] = spot_unoccupied?(x,y) ? MISS_MARK : HIT_MARK\n end", "def range(pos = @move_pos) \n if attack?\n max, min, bow, line, field, proj = battler.weapon_range\n if bow\n action_range = battler.calc_pos_bow(max, min, [pos])\n else\n action_range = battler.calc_pos_attack(max, min, [pos])\n end\n elsif skill?#skill\n max, field, line, exclude, min, v_range = battler.skill_range(item.id)\n if line\n action_range = battler.calc_pos_attack(max, min, [pos])\n else\n action_range = battler.calc_pos_spell(max, min, [pos], v_range)\n end\n elsif item?\n max, field, skill_id, v_range = battler.item_range(item.id)\n action_range = battler.calc_pos_spell(max, 0, [pos], v_range)\n else#item/guard \n action_range = []\n end\n return action_range\n end", "def is_a_bomb?(pos)\n tile = @grid[pos[0]][pos[1]]\n tile.value == BOMB\n end", "def set_end_postion(pos)\n actor_pos = []\n dir = $game_player.direction\n for i in 0...$game_party.actors.size\n if i == 0\n actor_pos[i] = pos\n else\n cat = $game_player.caterpillar[i - 1]\n case dir\n when 2 then actor_pos[i] = [pos[0], pos[1] - cat.member]\n when 4 then actor_pos[i] = [pos[0] + cat.member, pos[1]]\n when 6 then actor_pos[i] = [pos[0] - cat.member, pos[1]]\n when 8 then actor_pos[i] = [pos[0], pos[1] + cat.member]\n end\n end\n end\n return actor_pos\n end", "def calculate_aoe( range_max, range_min, x = @x, y = @y, v_range_aoe = 0) # mod MGC\n return [] if range_max == nil \n return [[x, y]] if range_max == 0\n\n #prevents items from being pushed if height change between core and outer is less than range/2\n positions = []\n #save position for later processing \n \n #prevents items from being pushed if height change between core and outer is less than range/1.5\n for i in 0..range_max\n it = range_max - i \n #test all couples (it, oy) like: 0 <= it+oy <= range_max\n for oy in 0..i\n next if it+oy < range_min \n for ux, uy in[ [x - it, y - oy], [x - it, y + oy], [x + it, y + oy], [x + it, y - oy] ]\n positions.push([ux, uy]) if valid_new_aoe_pos(ux, uy, x , y, v_range_aoe) and not positions.include?([ux, uy]) # mod MGC\n end\n end\n end\n return positions\n end", "def salmon_around(position, range=10)\n (objects_around(position, range)[1]).find_all {|fish| fish.type == Salmon}\n end", "def pos=(pos)\n x, y, step_x = pos[0], pos[1], FOUNDATION['indent']\n\n piles.each do |pile|\n pile.pos = [x, y]\n pile.background = Ray::Sprite.new image_path('foundation_bg')\n x += step_x # Margin between piles along the axis X.\n end\n end", "def position(val)\n @inside = true\n return val if (low..high).cover? val\n @inside = false\n return constrain(val, low, high)\n end", "def create_explosion\n explosion = ParticleFactory.buildParticles(\"big\", 80).set! :emission_direction => Vector3f.new(0, 1, 0),\n :maximum_angle => FastMath::PI, :speed => 1, :minimum_life_time => 600, \n :start_size => 3, :end_size => 7, \n :start_color => ColorRGBA.new(1, 0.312, 0.121, 1), \n :end_color => ColorRGBA.new(1, 0.24313726, 0.03137255, 0), \n :control_flow => false,\n :initial_velocity => 0.02, :particle_spin_speed => 0, :repeat_type => Controller::RT_CLAMP\n\n explosion.warmUp 1000\n explosion.render_state(@ts, @bs, @zstate)\n\n @explosions << explosion\n explosion\n end", "def attack(start_pos, end_pos)\n unless self[end_pos].color == self[start_pos].color\n self[start_pos].position = end_pos\n self[end_pos].position = start_pos\n self[end_pos] = NullPiece.new unless self[end_pos].is_a?(NullPiece)\n self[end_pos], self[start_pos] = self[start_pos], self[end_pos]\n end\n end", "def place_bombs\n range = ([email protected]).to_a\n total_bombs = (@grid.length ** 2) / 4\n @grid[range.sample][range.sample].value = BOMB until @grid.flatten.map(&:value).count(BOMB) == total_bombs\n end", "def food_around(position, range=100)\n objects_around(position, range)[0]\n end", "def minmax_position(passes)\n # It's somewhere in the positions\n all_positions = positions(passes)\n\n # between the minimum and maximum seat\n min_pos, max_pos = all_positions.minmax\n\n # So we just eliminate any seat that's taken. Whatever is left over\n # is ours\n (min_pos..max_pos).to_a - all_positions\nend", "def pos_out_of_range(position)\n Error \"Position out of range! (#{position}) given, \" \\\n \"but the minefield is #{@num_cells} long.\"\n end", "def touched_by bullet\n ([@width, @height].max/3).times do\n p_vx = rand(100)/10.0 - 5\n p_vy = rand(100)/10.0 - 5\n #@window.particles << Particle.new(@window, @x + rand(@width) - @width/2, @y + rand(@height) - @height/2, p_vx, p_vy)\n @window.particles << Live_Particle.new(@window, bullet.x + rand(@width) - @width/2, bullet.y + rand(@height) - @height/2, p_vx, p_vy, @colors.rand_in, 1 + rand)\n end\n damage bullet.damage\n end", "def probable_enemy_occupations(options = {})\n fail 'Position not given!' unless options[:pos]\n # starting with the rightmost...leftmost. Then bottommost...topmost\n occupation_points = []\n center = Vector[*options[:pos]]\n # horizontally, until ship goes too left\n head = center.dup\n until head == center + Vector[-(options[:ship_length]), 0]\n body = body_from_head(head, options[:ship_length], ACROSS)\n occupation_points << body if valid?(body, options[:board])\n head -= ACROSS\n end\n # vertically, again until ship goes too far up\n head = center.dup\n until head == center + Vector[0, -(options[:ship_length])]\n body = body_from_head(head, options[:ship_length], DOWN)\n occupation_points << body if valid?(body, options[:board])\n head -= DOWN\n end\n points = remove_invalid_shots!(occupation_points.flatten, options[:board])\n EdenPlayer.prob_board_matrix points\n end", "def enemies_in_range(range)\n return case attacking_direction\n when :right\n @game_state.enemies.select do |e| \n (e.x > @x && e.x < (self.right + range)) && (e.bottom > @y || e.y < self.bottom)\n end\n when :left\n @game_state.enemies.select do |e| \n (e.right < @x && e.right > (@x - range)) && (e.bottom > @y || e.y < self.bottom)\n end\n when :up\n @game_state.enemies.select do |e| \n (e.bottom < @y && e.bottom > (@y - range)) && (e.right > @x && e.x < self.right)\n end\n when :down\n @game_state.enemies.select do |e| \n (e.y > self.bottom && e.y < (self.bottom + range)) && (e.right > @x && e.x < self.right)\n end\n end\n end", "def place_mines(num_mines, pos)\n coords = Array.new(height * width) { |i| [i / width, i % width] }\n coords.delete(pos)\n coords.shuffle!\n @mines = Array.new(height) { Array.new(width, false) }\n num_mines.times { |i| @mines[coords[i][0]][coords[i][1]] = true }\n nil\n end", "def set_screen_move_postion(pos)\n max_x = ($game_map.width - 20) * 128\n max_y = ($game_map.height - 15) * 128\n pos_x = [0, [pos[0] * 128 - $game_player.center_x, max_x].min].max\n pos_y = [0, [pos[1] * 128 - $game_player.center_y, max_y].min].max\n return [pos_x, pos_y]\n end", "def collisions\n [collided_bombs, collided_rubies]\n end", "def check_collisions!\n 1000.times do\n @particles.values.map(&:tick)\n position_groups = @particles.values.group_by(&:position)\n collisions = position_groups.select { |pos, particles| particles.size > 1 }\n collisions.values.flatten.collect(&:num).map { |num| @particles.delete(num) }\n end\n @particles\n end", "def getMinMax(array,pos)\n minimum = 1000000\n maximum = -1000000\n for player in array\n if player[4] == pos\n minimum = [player[player.length-1],minimum].min\n maximum = [player[player.length-1],maximum].max\n end\n end\n return [minimum,maximum]\nend", "def death\n death1 = Explosion.new(@x, @y, 0, @map, @color)\n death2 = Explosion.new(@x, @y, Math::PI / 2, @map, @color)\n death3 = Explosion.new(@x, @y, Math::PI, @map, @color)\n death4 = Explosion.new(@x, @y, Math::PI * 3 / 2, @map, @color)\n\n @map.EobjectArray.push(death1)\n @map.EobjectArray.push(death2)\n @map.EobjectArray.push(death3)\n @map.EobjectArray.push(death4)\n\n @map.PobjectArray.delete(self)\n end", "def create_drops\n drops = []\n @number_of_drop_points.times do\n number_in_this_drop = rand(@max_particles - @min_particles) +\n @min_particles\n drops.push(Array.new(number_in_this_drop,\n Particle.new(@particle_stability_radius)))\n end\n\n drops\n end", "def kick_bomb(bomb)\n bomb_manager.getBomb(bomb).moveTo(@new_x, @new_y)\n #or bomb.moveTo(:direction)\n end", "def eva\n [[super, 0].max, 100].min\n end", "def extremes\n left = @position.x - @radius\n right = @position.x + @radius\n top = @position.y - @radius\n bottom = @position.y + @radius\n [left, right, top, bottom]\n end", "def updatePosition\n super\n\n Bullet.BulletInstances.each { |bullet|\n if collision?(bullet)\n bullet.destroy\n explode\n end\n }\n\n Ship.ShipInstances.each { |ship|\n if collision?(ship)\n ship.destroy\n explode\n end\n }\n end", "def get_mines(positions)\n positions.select { |pos| mine?(pos) }\n end", "def set_actors_screen_postion(pos)\n base_pos = [pos[0] - $game_player.x, pos[1] - $game_player.y]\n pos_x = $game_player.screen_x + (base_pos[0] * 32)\n pos_y = $game_player.screen_y + (base_pos[1] * 32)\n return [pos_x, pos_y]\n end", "def seed_grid\n @BOMBS.times do\n bomb_placed = false\n until bomb_placed\n pos = [rand(@Y_DIM), rand(@X_DIM)]\n unless self[pos].bomb\n self[pos].bomb = true\n bomb_placed = true\n end\n end\n end\n end", "def extremes\n point = @points[0]\n #puts \"next\"\n #p [point.x, point.y]\n left = right = point.x # @position.x + point.x\n top = bottom = point.y # @position.y + point.y\n @points[1..-1].each do |point|\n #p [point.x, point.y]\n x, y = point.to_a # (@position + point).to_a\n left = x if x < left\n right = x if x > right\n top = y if y < top\n bottom = y if y > bottom\n end\n #p [left, right, top, bottom]\n [left, right, top, bottom]\n end", "def pos=(pos)\n x, y = pos[0], pos[1]\n step_x, step_y = TABLEAU['indent']['x'], TABLEAU['indent']['y']\n\n piles.each { |pile|\n pile.pos = [x, y]\n x += step_x # Margin between piles along the axis X.\n y2 = 0 # Y position of the first card.\n\n pile.cards.each_with_index do |card, i|\n card.sprite.y += y2\n y2 += step_y # Y axis margin.\n card.face_down unless pile.last_card?(card)\n end\n }\n end", "def destroy!\n (rand(3) + 5).times { Explosion.new(@x + rand(BoomOffset * 2) - BoomOffset, @y + rand(BoomOffset * 2) - BoomOffset) }\n kill!\n end", "def destroy!\n (rand(3) + 5).times { Explosion.new(@x + rand(BoomOffset * 2) - BoomOffset, @y + rand(BoomOffset * 2) - BoomOffset) }\n kill!\n end", "def pos=(pos); end", "def make_damage(a_point)\n \tmsj = 'hit'\n\tship = ship_at(a_point)\n\tship.get_hit(a_point)\n\tif(ship.state() == 'sink')\n\t\tremove_to_the_fleet(ship)\n\t\tmsj = 'sink'\n\tend\n\tmsj\t\n end", "def handle_explosions_to_spaceships\n return if @spaceships.empty? || @explosions.empty?\n @spaceships.product(@explosions).select {|pair| Collision.detect(*pair)}.each do |spaceship, explosion|\n spaceship.damage!(explosion.projectile)\n end\n end", "def set_pos(base_pos, base_screen, set_pos)\n diff_x = (base_screen[0] + ((set_pos[0] - base_pos[0]) * 32))\n diff_y = (base_screen[1] + ((set_pos[1] - base_pos[1]) * 32))\n return [diff_x, diff_y]\n end", "def projectile\n end", "def collided_bombs\n bomb_layers.map do |bomb|\n bomb if bomb.collide_with?(GameData.player_layer.rect_version)\n end.compact\n end", "def collision(damage)\n end", "def get_hit(a_point)\n\toccupied_points.reject! { |point| point.is_equal(a_point) }\n\tset_state('damage') \n\tcheck_if_is_sink()\n end", "def Inrange?\n ## check if enemy is in meele range (ahead of player or ahead and one step left or right)\n ## d : Direction (2,4,6,8) up left right buttom\n xmax = $game_player.real_x * 32 + 16\n xmin = $game_player.real_x * 32 - 16\n ymax = $game_player.real_y * 32 + 16\n ymin = $game_player.real_y * 32 - 16\n\n ## set hit range in relation to player facing direction\n if $game_player.direction == 8 then ## facing up\n xmin -= 32\n xmax += 32\n ymin -= 32\n ymax -= 32\n end\n \n if $game_player.direction == 2 then ## facing bottom\n xmin -= 32\n xmax += 32\n ymin += 32\n ymax += 32\n end\n \n if $game_player.direction == 4 then ## facing left\n xmin -= 32\n xmax -= 32\n ymin -= 32\n ymax += 32\n end \n \n if $game_player.direction == 6 then ## facing right\n xmin += 32\n xmax += 32\n ymin -= 32\n ymax += 32\n end\n \n ## check if enemy is in hit range\n if self.mapx < xmax && self.mapx > xmin && \\\n self.mapy < ymax && self.mapy > ymin then\n return true\n else\n return false \n end\n end", "def relative_range_era(points)\n relative_range = []\n player = $game_player\n x,y = player.x, player.y\n points.each do |v|\n relative_range.push(Vertex.new(v.x + x, v.y + y))\n end\n relative_range\n end", "def reveal_tile(pos = self.cursor.pos)\n raise \"spot taken\" if revealed?(pos)\n reveal_bombs if bomb?(pos)\n cascade(pos) unless bomb?(pos)\n end", "def insert_mines\n @random_spots = []\n @num_of_mine.times do\n\n while @random_spots.length < @num_of_mine\n rand_num = Random.rand(@num_of_tiles**2)\n\n if !@random_spots.include?(rand_num)\n @random_spots << rand_num\n end\n\n end\n end\nend", "def start_projectile\n subj = (@setup[PROJ_REVERSE] ? @target : @subject)\n subj = @subject if @target.is_a?(Array)\n ypos = 0\n xpos = 0\n if subj.is_a?(Array)\n size = subj.size\n xpos = subj.inject(0) {|r,battler| r + battler.screen_x}/size \n ypos = subj.inject(0) {|r,battler| r + battler.screen_y}/size \n xpos += @setup[PROJ_STARTPOS][0]\n else\n spr_subj = subj.sprite\n case @setup[PROJ_START]\n when PROJ_POSITION_HEAD; ypos = subj.y - spr_subj.height\n when PROJ_POSITION_MID; ypos = subj.y - spr_subj.height/2\n when PROJ_POSITION_FEET; ypos = subj.y\n when PROJ_POSITION_NONE; ypos = xpos = 0\n else; ypos = subj.y;\n end\n xpos = subj.x + @setup[PROJ_STARTPOS][0]\n end\n ypos += @setup[PROJ_STARTPOS][1]\n @angle = (self.mirror ? 360 - @setup[PROJ_ANGLE] : @setup[PROJ_ANGLE])\n set_point(xpos, ypos)\n @point.continue = @setup[PROJ_PIERCE] \n @afterimage_opac = @setup[PROJ_AFTOPAC]\n @afterimage_rate = @setup[PROJ_AFTRATE]\n @anim_top = @setup[PROJ_ANIMPOS]\n if @setup[PROJ_ANIMSTART]\n if @setup[PROJ_ANIMSTART] == PROJ_ANIMDEFAULT\n anim = $data_animations[item.animation_id] \n else\n anim = $data_animations[@setup[PROJ_ANIMSTART]]\n end\n @anim_start.start_animation(anim,subj.flip)\n end\n @anim_start.target_sprite = [subj.sprite] if @setup[PROJ_FLASH_REF][0]\n apply_item(target, target.is_a?(Array)) if @setup[PROJ_DAMAGE_EXE] == -1\n make_aim(@dur, @jump)\n end", "def is_bomb(x,y)\n @board[x,y].is_bomb?\n end", "def pick(position)\n\t\[email protected] do |entity|\n\t\t\tentity.hit?(position)\n\t\tend\n\tend", "def pos(type = @type)\n @respawns[type] ||= []\n end", "def makeLiving(points)\n points.map do |p|\n getCell(p[0],p[1]).spawn\n end\n end", "def in_bounds(pos)\n pos.all? { |coord| coord.between?(0, 7) }\n end", "def possibleEggMoves\n v=MultipleForms.call(\"possibleEggMoves\",self)\n return v if v!=nil\n pbRgssOpen(\"Data/eggEmerald.dat\",\"rb\"){|f|\n f.pos=(self.species-1)*8\n offset=f.fgetdw\n length=f.fgetdw\n if length>0\n bob=[]\n f.pos=offset\n i=0; loop do break unless i<length\n atk=f.fgetw\n bob.push(atk)\n i+=1\n end\n return bob\n else\n return []\n end\n }\n end", "def make_shoot(a_point)\n\tmsj = 'water'\n\tif(!is_empty(a_point))\n\t\tmsj = make_damage(a_point)\n\tend\n\tmsj\n end", "def build_obstruction_array(x_end, y_end)\n y_change = y_position - y_end\n x_change = x_position - x_end\n\n # Build array squares which piece must move through\n obstruction_array = []\n if x_change.abs == 0 # If it's moving vertically\n (1..(y_change.abs-1)).each do |i|\n obstruction_array << [x_position, y_position - (y_change/y_change.abs) * i]\n end\n elsif y_change.abs == 0 # If horizontally\n (1..(x_change.abs-1)).each do |i| # 7 times do (0..6).each do\n obstruction_array << [x_position - (x_change/x_change.abs) * i, y_position]\n end\n elsif y_change.abs == x_change.abs #if diagonally\n (1..(y_change.abs-1)).each do |i|\n obstruction_array << [x_position - (x_change/x_change.abs) * i, y_position - (y_change/y_change.abs) * i]\n end\n end\n obstruction_array\n end", "def touch_end_explosion\n vector = @current_point - @selected.position\n magnitude = Math.sqrt(vector.x**2 + vector.y**2)\n @delegate.explosion = magnitude\n @delegate.close_modal_view\n end", "def set_actor_postions(actors_pos)\n $game_temp.battle_move = true\n $game_temp.actors_start_position = []\n @moving_actors = []\n for i in 0...$game_party.actors.size\n $game_temp.actors_position[i] = set_actors_screen_postion(actors_pos[i])\n @moving_actors[i] = i == 0 ? $game_player : $game_player.caterpillar[i-1]\n @moving_actors[i].move_update.clear if i > 0\n end\n loop do\n for i in 0...$game_party.actors.size\n set_battle_position(@moving_actors[i], actors_pos[i])\n $game_temp.actors_position[i] = set_actors_screen_postion(actors_pos[i])\n end\n update_basic(false, true, true)\n break if all_in_postion(actors_pos)\n end\n $game_temp.battle_move = false\n wait(10)\n end", "def alien_hit(alien, bullet)\r\n # Deleting the alien and the bullet object\r\n @aliens.delete alien\r\n @bullets.delete bullet\r\n @explode = true\r\n @player.update_score()\r\n # Setting the coordinates for the explosion\r\n @explode_x = alien.x\r\n @explode_y = alien.y\r\n @explosion_sound.bmp.play \r\n end", "def startcondense(pos)\n set = [pos]\n numfound = 0\n\n numfound = condense(set,pos,numfound)\n piecetype = @board[set[0]].identity\n\n #find bottommost piece\n endpoint = set.max\n\n #find bottommost/leftmost piece\n for i in (0..BRDSZ-1)\n if !set.include?(endpoint-1) and !@board[endpoint].isleft\n break\n else\n endpoint -= 1\n end\n end\n\n #cleanup\n for i in (0..numfound-1)\n if(set[i] != endpoint)\n @board[set[i]].assiden(\" \")\n end\n end\n @board[endpoint].next\n end", "def get_available_positions\n\t\tpositions = []\n\t\tfor i in (1..9) do\n\t\t\tx = ((i - 0.1) / 3).truncate\n\t\t\ty = (i - 1) % 3\n\t\t\tif self.is_valid?(x, y)\n\t\t\t\tpositions.push(i)\n\t\t\tend\n\t\tend\n\t\t\n\t\treturn positions\n\tend", "def repos_enemy\n @enemies.each do |i|\n i.x -= @delta_x\n i.y -= @delta_y\n end\n @ground.x -= @delta_x\n @ground.y -= @delta_y\n end", "def shoot\n\t\tt = (Rubygame::Time.get_ticks() - @parent.stamp[:p])/1000.0\n\t\tv = Vector.new(1,0).rotate(@parent.angle)\n\t\tv.magnitude += 150\n\t\tp = @parent.project(t)\n\t\[email protected][0].push(@type.new(p,v,@lifespan))\n\tend", "def shots_on_target\n self.positions.where(water: false, shooted: true).count\n end", "def set_battle_position(actor, pos)\n return if actor.moving? or pos == [actor.x, actor.y]\n if actor.x == pos[0] and actor.y > pos[1]\n actor.y -= 1\n actor.turn_up\n elsif actor.x == pos[0] and actor.y < pos[1]\n actor.y += 1\n actor.turn_down\n elsif actor.x > pos[0] and actor.y == pos[1]\n actor.x -= 1\n actor.turn_left\n elsif actor.x < pos[0] and actor.y == pos[1]\n actor.x += 1\n actor.turn_right\n elsif actor.x < pos[0] and actor.y > pos[1]\n actor.turn_up\n actor.y -= 1\n actor.x += 1\n elsif actor.x > pos[0] and actor.y < pos[1]\n actor.turn_down\n actor.y += 1\n actor.x -= 1\n elsif actor.x > pos[0] and actor.y > pos[1]\n actor.turn_left\n actor.y -= 1\n actor.x -= 1\n elsif actor.x < pos[0] and actor.y < pos[1]\n actor.turn_right\n actor.y += 1\n actor.x += 1\n end\n actor.increase_steps\n end", "def set_range\n ref_gene = ref_genes.first\n [ref_gene.txStart, ref_gene.txEnd]\n end", "def spawn_smulg\r\n smulg = spawn_monster(\"Smulg\", 30, 15, 65, 50, rand(3..5), (3..10))\r\nend", "def ranges\n x_start, y_start = absolute(0, 0)\n x_end, y_end = absolute(@diameter - 1, @diameter - 1)\n\n [x_start..x_end, y_start..y_end]\n end", "def update\n if width > Utility::EXPLOSION_SPEED * 30 || height > Utility::EXPLOSION_SPEED * 30\n @decreasing = true\n decrease\n elsif @decreasing && (width > 2 && height > 2)\n decrease\n elsif @decreasing\n level.remove_explosion(self)\n else\n increase\n end\n end", "def create_ScriptMoveto_set(script)\n comm = SAC.new(\"MOVE\", [\"SCRIPT_TO\", script])\n set = SACS.new()\n set.list << comm\n set.skip_procs << get_defScript(\"MOVING\")\n str = %Q(\n data = character.dungeon_obj.get_next_tile()\n data = [-1, -1] if data.nil?\n character.pos?(data[0], data[1])\n )\n set.break_procs << SCR.new([\"character\"], str)\n set.type = :loop\n return set\n end", "def planted_bombs_limit(action = :increment)\n @bomb_manager.planted_bombs_limit action\n end", "def generate_bombs\n until @bomb_locations.length == 10\n coords = [rand(0..@NUM_OF_ROWS-1), rand(0..@NUM_OF_COLUMNS-1)]\n if !@bomb_locations.include?(coords) && (!self[coords] || self[coords].value != \"O\")\n @bomb_locations.add(coords)\n self[coords] = Square.new(\"O\")\n end\n end\n end", "def move_piece!(start_pos,end_pos)\n piece=self[start_pos]\n raise 'Piece cannot move like that' unless piece.moves.include?(end_pos)\n self[end_pos]=piece\n self[start_pos]=sentinel\n piece.pos=end_pos\n nil\n end", "def select_moves(position, posibles)\n moves = []\n posibles.each { |move| moves << [position[0] + move[0], position[1] + move[1]] }\n moves = moves.select { |move| move[0].between?(1,8) && move[1].between?(1,8) }\nend", "def make_bomb\n @has_bomb = true\n end", "def make\n @spaces.each { |position| position.occupied = true }\n # pp \"made ship: #{@spaces}\"\n end", "def person_in_spot(pos)\n\t\tpos % @seats.size\n\tend", "def define_adyacent_bombs\n @board.length.times do |i|\n @board[i][:value] = get_total_adyacent_bombs(i) unless @board[i][:value] == @bomb\n end\n end", "def element_at target_pos\n return game_objects.find{ |object| object.position == target_pos }\n end", "def enemy_bullet_collision\n if cr.defeated == false\n $e_bullets.each do |b|\n if self.bounding_box_collision?(b)\n @life -= 1\n $hud.set_lives(life)\n clear_all_bullets\n end\n end\n end\n end", "def get_poss_moves\n x = @location[0] #x is row\n y = @location[1] #y is column\n\n move_list = [] #quarter circle forward punch\n\n if @colour == \"white\"\n move_list = white_pawn_moves(x,y)\n else\n move_list = black_pawn_moves(x,y)\n end\n\n possible_moves = move_list.select { |e|\n (e[0] >= 0) && (e[0] <= 7) && (e[1] >= 0) && (e[1] <= 7)\n }\n possible_moves\n end", "def place_fleet(pos_list)\n # Try to set all ships on the field\n res = pos_list.inject(true) do |a, l|\n a && @all_ships[l[0]].set!(l[1], l[2], l[3])\n end\n\n # If success, check something???\n res = @all_ships.inject(true) { |a, ship| a && ship.coord } if res\n\n # Remove all ships otherwise\n @all_ships.each { |ship| ship.kill if ship.coord } if !res\n\n res\n end", "def collidesWithPlayer\n for x in @x.round..(@x + @width).round\n for y in (@y - @height)[email protected]\n if @map.player.containsPoint?(x, y)\n @map.PobjectArray.delete(self)\n @map.player.loseHealth\n end\n end\n end\n end", "def position_generator(pos1, pos2, length)\n # generate a position away from earth and moon\n position = rand(1..length - 1)\n if pos1.include? position or pos2.include? position\n position = position_generator(pos1, pos2, length)\n end\n return position\nend", "def isInside(_pos)\n raise \"not defined\" ;\n end", "def collidesWithEnemy\n for x in @x.round..(@x + @width).round\n for y in (@y - @height)[email protected]\n if @map.enemyContainsPoint?(x, y)\n @map.PobjectArray.delete(self)\n return\n end\n end\n end\n end", "def game_bomb_action\n if tool_canuse?(BweaponId) and not @showing_bomb\n if $game_party.has_item?($data_items[BcostItemId]) \n $game_party.consume_item($data_items[BcostItemId])\n @gamebomb.direction = @direction\n @gamebomb.moveto(@x, @y)\n @gamebomb.jump_passable?(1)[email protected]_jump(1) :@gamebomb.jump(0,0)\n @gamebomb.action_time = BombDuration * 60\n @showing_bomb = true\n @gamebomb.through = false\n RPG::SE.new(BombActionSe,90).play\n @tool_anime = 20\n else\n Sound.play_buzzer\n end\n end\n end", "def position=(point); end", "def generate_children_coordinates(x,y)\n child_coordinates = []\n MOVEMENT_DIFF.each do |dx, dy|\n move = [x+dx, y+dy]\n child_coordinates << move if within_limits?(move)\n end\n child_coordinates\n end", "def color_valid_positions\n return if @item.nil?\n \n center_color = Color.new(83,142,250)\n outer_color = Color.new(250,40,100)\n \n cx = cy = (contents.width-@grid_square_size)/@grid_square_size/2 * @grid_square_size + 1\n sq = @grid_square_size-1\n\n points = !(t = @item.tbs_spec_range).nil? ? t[$game_temp.tb_event.dir_to_sym_era] : simple_range\n \n return if points.nil?\n \n points.each do |v|\n offset_x, offset_y = v.x * @grid_square_size, v.y * @grid_square_size\n sz = grid_side\n px,py = cx + offset_x + sq, cy + offset_y + sq\n contents.fill_rect(px-sq,py-sq,sq,sq, outer_color) if px < sz && py < sz\n end\n contents.fill_rect(cx, cy,sq,sq, center_color) # center\n end", "def mate_around(position, range=10)\n salmon_around(position, range).find_all {|salmon| salmon.sex == Female}\n end", "def update_bomb bomb, player\r\n if bomb.isActivated == 1\r\n bomb.explotion_time -= 16.63\r\n cur_time = Gosu.milliseconds.to_i\r\n i = (cur_time - bomb.activation_time ) / bomb.time_interval\r\n if(i < bomb.bomb_images.length)\r\n bomb.cur_image = bomb.bomb_images[i]\r\n end\r\n end\r\nend", "def build_spans on_range\n @spans = Array.new @size[1]\n\n @size[1].times do |y|\n spans = []\n left = (@size[1]-y-1)*@size[0]\n start = nil\n\n @size[0].times do |x|\n d = on_range.include?(@pixels[left+x])\n\n if !start && d\n start = x\n elsif start && !d\n spans << [start, x]\n start = nil\n end\n end\n\n spans << [start, @size[0]] if start\n @spans[y] = spans\n end\n end", "def guess_pos()\n sum = TEAM_REFILL_PLAYERS_POS_TO[PLAYER_POSITION_GK]\n + TEAM_REFILL_PLAYERS_POS_TO[PLAYER_POSITION_D]\n + TEAM_REFILL_PLAYERS_POS_TO[PLAYER_POSITION_M]\n + TEAM_REFILL_PLAYERS_POS_TO[PLAYER_POSITION_S]\n pos = rand(1..sum)\n ret = {\n PLAYER_POSITION_GK => 0,\n PLAYER_POSITION_D => 0,\n PLAYER_POSITION_M => 0,\n PLAYER_POSITION_S => 0\n }\n \n if pos <= TEAM_REFILL_PLAYERS_POS_TO[PLAYER_POSITION_GK]\n ret[PLAYER_POSITION_GK] = PLAYER_POSITION_MAX\n elsif pos <= TEAM_REFILL_PLAYERS_POS_TO[PLAYER_POSITION_GK]\n + TEAM_REFILL_PLAYERS_POS_TO[PLAYER_POSITION_D]\n ret[PLAYER_POSITION_D] = PLAYER_POSITION_MAX\n elsif pos <= TEAM_REFILL_PLAYERS_POS_TO[PLAYER_POSITION_GK]\n + TEAM_REFILL_PLAYERS_POS_TO[PLAYER_POSITION_D]\n + TEAM_REFILL_PLAYERS_POS_TO[PLAYER_POSITION_M]\n ret[PLAYER_POSITION_M] = PLAYER_POSITION_MAX\n else\n ret[PLAYER_POSITION_S] = PLAYER_POSITION_MAX\n end\n \n return ret\n end", "def get_piece(pos)\n @grid[pos[0]][pos[1]]\n end", "def gravity\n @moons.combination(2).to_a.each do |moon_a, moon_b|\n if moon_a[:position][:x] < moon_b[:position][:x]\n moon_a[:velocity][:x] += 1\n moon_b[:velocity][:x] -= 1\n elsif moon_b[:position][:x] < moon_a[:position][:x]\n moon_a[:velocity][:x] -= 1\n moon_b[:velocity][:x] += 1\n end\n\n if moon_a[:position][:y] < moon_b[:position][:y]\n moon_a[:velocity][:y] += 1\n moon_b[:velocity][:y] -= 1\n elsif moon_b[:position][:y] < moon_a[:position][:y]\n moon_a[:velocity][:y] -= 1\n moon_b[:velocity][:y] += 1\n end\n\n if moon_a[:position][:z] < moon_b[:position][:z]\n moon_a[:velocity][:z] += 1\n moon_b[:velocity][:z] -= 1\n elsif moon_b[:position][:z] < moon_a[:position][:z]\n moon_a[:velocity][:z] -= 1\n moon_b[:velocity][:z] += 1\n end\n end\n end" ]
[ "0.7945777", "0.71731836", "0.5733069", "0.5717691", "0.56722784", "0.5618275", "0.56072676", "0.55668104", "0.54659235", "0.5417783", "0.5337474", "0.5304462", "0.52880955", "0.52643615", "0.5250653", "0.5230598", "0.5216091", "0.518172", "0.51293135", "0.5114803", "0.5099758", "0.5086803", "0.5070201", "0.50147194", "0.50016904", "0.49914533", "0.49901855", "0.49730274", "0.49715078", "0.49530646", "0.49516717", "0.49507907", "0.4925206", "0.49247572", "0.4910749", "0.49103028", "0.48971096", "0.4896167", "0.4896167", "0.48952484", "0.4881694", "0.48490027", "0.48468244", "0.48423502", "0.48408186", "0.4839254", "0.48388535", "0.48348588", "0.48246896", "0.48135614", "0.48097873", "0.48024893", "0.4791785", "0.4786687", "0.478557", "0.47631633", "0.47629175", "0.47530577", "0.47499102", "0.47492307", "0.47487694", "0.4745623", "0.4733069", "0.473218", "0.47267818", "0.47218847", "0.47150636", "0.4712543", "0.4709798", "0.47022334", "0.47021508", "0.4701008", "0.46999004", "0.46998858", "0.4692332", "0.46909368", "0.468611", "0.4685505", "0.46793848", "0.46789804", "0.46760798", "0.46756122", "0.46558827", "0.465507", "0.46457353", "0.46428022", "0.4642185", "0.46377045", "0.46352047", "0.46340412", "0.46326262", "0.46325174", "0.4629526", "0.4624702", "0.46244323", "0.46150908", "0.46090838", "0.46090344", "0.4607303", "0.46038616" ]
0.6469481
2
put a bomb explosion on position pos. return explosion range as a set
def explosion_range(bomb_pos) candidate = exposion_range_without_wall(bomb_pos) - @wall elect = Set.new walkable = Set.new(candidate) loop do break if candidate.empty? s, e = bomb_pos, candidate.first path = AStar(s, e, walkable) #no shortest path if not path candidate.delete(s) candidate.delete(e) elect << s next end #find shortest path path.each_index do |step| pos = path[step] candidate.delete(pos) elect << pos if step <= @d # bomb on bom_pos can reach pos end end return elect end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bomb_explosion_range(action = :increment)\n @bomb_manager.explosion_range action\n end", "def exposion_range_without_wall(pos)\n\t\trange = Set.new\n\t\[email protected] do |orig_pos|\n\t\t\ttrans_pos = Array.new(orig_pos)\n\t\t\ttrans_pos.row += pos.row\n\t\t\ttrans_pos.col += pos.col\n\t\t\tif (0...n).cover?(trans_pos.row) and (0...m).cover?(trans_pos.col)\n\t\t\t\trange << trans_pos\n\t\t\tend\n\t\tend\n\t\treturn range\n\tend", "def range(pos = @move_pos) \n if attack?\n max, min, bow, line, field, proj = battler.weapon_range\n if bow\n action_range = battler.calc_pos_bow(max, min, [pos])\n else\n action_range = battler.calc_pos_attack(max, min, [pos])\n end\n elsif skill?#skill\n max, field, line, exclude, min, v_range = battler.skill_range(item.id)\n if line\n action_range = battler.calc_pos_attack(max, min, [pos])\n else\n action_range = battler.calc_pos_spell(max, min, [pos], v_range)\n end\n elsif item?\n max, field, skill_id, v_range = battler.item_range(item.id)\n action_range = battler.calc_pos_spell(max, 0, [pos], v_range)\n else#item/guard \n action_range = []\n end\n return action_range\n end", "def set_end_postion(pos)\n actor_pos = []\n dir = $game_player.direction\n for i in 0...$game_party.actors.size\n if i == 0\n actor_pos[i] = pos\n else\n cat = $game_player.caterpillar[i - 1]\n case dir\n when 2 then actor_pos[i] = [pos[0], pos[1] - cat.member]\n when 4 then actor_pos[i] = [pos[0] + cat.member, pos[1]]\n when 6 then actor_pos[i] = [pos[0] - cat.member, pos[1]]\n when 8 then actor_pos[i] = [pos[0], pos[1] + cat.member]\n end\n end\n end\n return actor_pos\n end", "def drop_bomb(x,y)\n return nil if out_of_range?(x,y) || !spot_empty?(x,y)\n self[x,y] = spot_unoccupied?(x,y) ? MISS_MARK : HIT_MARK\n end", "def is_a_bomb?(pos)\n tile = @grid[pos[0]][pos[1]]\n tile.value == BOMB\n end", "def pos=(pos)\n x, y, step_x = pos[0], pos[1], FOUNDATION['indent']\n\n piles.each do |pile|\n pile.pos = [x, y]\n pile.background = Ray::Sprite.new image_path('foundation_bg')\n x += step_x # Margin between piles along the axis X.\n end\n end", "def salmon_around(position, range=10)\n (objects_around(position, range)[1]).find_all {|fish| fish.type == Salmon}\n end", "def map_in_range\n clean_grid\n UNITS.each do |unit|\n position = MAP[unit.y][unit.x]\n position.occupied = true\n if unit.team == 'G'\n position.goblin_in_grid = true\n position.up.goblin_in_range = true if position.up.type == '.'\n position.right.goblin_in_range = true if position.right.type == '.'\n position.down.goblin_in_range = true if position.down.type == '.'\n position.left.goblin_in_range = true if position.left.type == '.'\n else\n position.elf_in_grid = true\n position.up.elf_in_range = true if position.up.type == '.'\n position.right.elf_in_range = true if position.right.type == '.'\n position.down.elf_in_range = true if position.down.type == '.'\n position.left.elf_in_range = true if position.left.type == '.'\n end\n end\nend", "def attack(start_pos, end_pos)\n unless self[end_pos].color == self[start_pos].color\n self[start_pos].position = end_pos\n self[end_pos].position = start_pos\n self[end_pos] = NullPiece.new unless self[end_pos].is_a?(NullPiece)\n self[end_pos], self[start_pos] = self[start_pos], self[end_pos]\n end\n end", "def make_particles_stay_in_bounds scale\n # TODO: Better boundary conditions (THESE ARE A LAME WORKAROUND)\n particles.each do |particle|\n if particle.position.x >= scale - 0.01\n particle.position.x = scale - (0.01 + 0.1*rand)\n particle.velocity.x = 0\n elsif particle.position.x < 0.01\n particle.position.x = 0.01 + 0.1*rand\n particle.velocity.x = 0\n end\n\n if particle.position.y >= scale - 0.01\n particle.position.y = scale - (0.01+rand*0.1)\n particle.velocity.y = 0\n elsif particle.position.y < 0.01\n particle.position.y = 0.01 + rand*0.1\n particle.velocity.y = 0\n end\n end\n end", "def food_around(position, range=100)\n objects_around(position, range)[0]\n end", "def place_bombs\n range = ([email protected]).to_a\n total_bombs = (@grid.length ** 2) / 4\n @grid[range.sample][range.sample].value = BOMB until @grid.flatten.map(&:value).count(BOMB) == total_bombs\n end", "def position(val)\n @inside = true\n return val if (low..high).cover? val\n @inside = false\n return constrain(val, low, high)\n end", "def create_explosion\n explosion = ParticleFactory.buildParticles(\"big\", 80).set! :emission_direction => Vector3f.new(0, 1, 0),\n :maximum_angle => FastMath::PI, :speed => 1, :minimum_life_time => 600, \n :start_size => 3, :end_size => 7, \n :start_color => ColorRGBA.new(1, 0.312, 0.121, 1), \n :end_color => ColorRGBA.new(1, 0.24313726, 0.03137255, 0), \n :control_flow => false,\n :initial_velocity => 0.02, :particle_spin_speed => 0, :repeat_type => Controller::RT_CLAMP\n\n explosion.warmUp 1000\n explosion.render_state(@ts, @bs, @zstate)\n\n @explosions << explosion\n explosion\n end", "def pos=(pos); end", "def place_mines(num_mines, pos)\n coords = Array.new(height * width) { |i| [i / width, i % width] }\n coords.delete(pos)\n coords.shuffle!\n @mines = Array.new(height) { Array.new(width, false) }\n num_mines.times { |i| @mines[coords[i][0]][coords[i][1]] = true }\n nil\n end", "def calculate_aoe( range_max, range_min, x = @x, y = @y, v_range_aoe = 0) # mod MGC\n return [] if range_max == nil \n return [[x, y]] if range_max == 0\n\n #prevents items from being pushed if height change between core and outer is less than range/2\n positions = []\n #save position for later processing \n \n #prevents items from being pushed if height change between core and outer is less than range/1.5\n for i in 0..range_max\n it = range_max - i \n #test all couples (it, oy) like: 0 <= it+oy <= range_max\n for oy in 0..i\n next if it+oy < range_min \n for ux, uy in[ [x - it, y - oy], [x - it, y + oy], [x + it, y + oy], [x + it, y - oy] ]\n positions.push([ux, uy]) if valid_new_aoe_pos(ux, uy, x , y, v_range_aoe) and not positions.include?([ux, uy]) # mod MGC\n end\n end\n end\n return positions\n end", "def pos=(pos)\n x, y = pos[0], pos[1]\n step_x, step_y = TABLEAU['indent']['x'], TABLEAU['indent']['y']\n\n piles.each { |pile|\n pile.pos = [x, y]\n x += step_x # Margin between piles along the axis X.\n y2 = 0 # Y position of the first card.\n\n pile.cards.each_with_index do |card, i|\n card.sprite.y += y2\n y2 += step_y # Y axis margin.\n card.face_down unless pile.last_card?(card)\n end\n }\n end", "def pick(position)\n\t\[email protected] do |entity|\n\t\t\tentity.hit?(position)\n\t\tend\n\tend", "def pos_out_of_range(position)\n Error \"Position out of range! (#{position}) given, \" \\\n \"but the minefield is #{@num_cells} long.\"\n end", "def create_drops\n drops = []\n @number_of_drop_points.times do\n number_in_this_drop = rand(@max_particles - @min_particles) +\n @min_particles\n drops.push(Array.new(number_in_this_drop,\n Particle.new(@particle_stability_radius)))\n end\n\n drops\n end", "def create_ScriptMoveto_set(script)\n comm = SAC.new(\"MOVE\", [\"SCRIPT_TO\", script])\n set = SACS.new()\n set.list << comm\n set.skip_procs << get_defScript(\"MOVING\")\n str = %Q(\n data = character.dungeon_obj.get_next_tile()\n data = [-1, -1] if data.nil?\n character.pos?(data[0], data[1])\n )\n set.break_procs << SCR.new([\"character\"], str)\n set.type = :loop\n return set\n end", "def death\n death1 = Explosion.new(@x, @y, 0, @map, @color)\n death2 = Explosion.new(@x, @y, Math::PI / 2, @map, @color)\n death3 = Explosion.new(@x, @y, Math::PI, @map, @color)\n death4 = Explosion.new(@x, @y, Math::PI * 3 / 2, @map, @color)\n\n @map.EobjectArray.push(death1)\n @map.EobjectArray.push(death2)\n @map.EobjectArray.push(death3)\n @map.EobjectArray.push(death4)\n\n @map.PobjectArray.delete(self)\n end", "def set_range\n ref_gene = ref_genes.first\n [ref_gene.txStart, ref_gene.txEnd]\n end", "def set_screen_move_postion(pos)\n max_x = ($game_map.width - 20) * 128\n max_y = ($game_map.height - 15) * 128\n pos_x = [0, [pos[0] * 128 - $game_player.center_x, max_x].min].max\n pos_y = [0, [pos[1] * 128 - $game_player.center_y, max_y].min].max\n return [pos_x, pos_y]\n end", "def minmax_position(passes)\n # It's somewhere in the positions\n all_positions = positions(passes)\n\n # between the minimum and maximum seat\n min_pos, max_pos = all_positions.minmax\n\n # So we just eliminate any seat that's taken. Whatever is left over\n # is ours\n (min_pos..max_pos).to_a - all_positions\nend", "def move_piece!(start_pos,end_pos)\n piece=self[start_pos]\n raise 'Piece cannot move like that' unless piece.moves.include?(end_pos)\n self[end_pos]=piece\n self[start_pos]=sentinel\n piece.pos=end_pos\n nil\n end", "def expand_position(value)\r\n return value.map { |v| expand_position(v) } if value.is_a?(Array)\r\n value = positions[value] if value.is_a?(Symbol)\r\n value = value.to_expanded_i if value.is_a?(ActiveRecord::Acts::Positionable::Types::Base)\r\n value\r\n end", "def kick_bomb(bomb)\n bomb_manager.getBomb(bomb).moveTo(@new_x, @new_y)\n #or bomb.moveTo(:direction)\n end", "def select_moves(position, posibles)\n moves = []\n posibles.each { |move| moves << [position[0] + move[0], position[1] + move[1]] }\n moves = moves.select { |move| move[0].between?(1,8) && move[1].between?(1,8) }\nend", "def set_actors_screen_postion(pos)\n base_pos = [pos[0] - $game_player.x, pos[1] - $game_player.y]\n pos_x = $game_player.screen_x + (base_pos[0] * 32)\n pos_y = $game_player.screen_y + (base_pos[1] * 32)\n return [pos_x, pos_y]\n end", "def updatePosition\n super\n\n Bullet.BulletInstances.each { |bullet|\n if collision?(bullet)\n bullet.destroy\n explode\n end\n }\n\n Ship.ShipInstances.each { |ship|\n if collision?(ship)\n ship.destroy\n explode\n end\n }\n end", "def make_damage(a_point)\n \tmsj = 'hit'\n\tship = ship_at(a_point)\n\tship.get_hit(a_point)\n\tif(ship.state() == 'sink')\n\t\tremove_to_the_fleet(ship)\n\t\tmsj = 'sink'\n\tend\n\tmsj\t\n end", "def mate_around(position, range=10)\n salmon_around(position, range).find_all {|salmon| salmon.sex == Female}\n end", "def get_hit(a_point)\n\toccupied_points.reject! { |point| point.is_equal(a_point) }\n\tset_state('damage') \n\tcheck_if_is_sink()\n end", "def person_in_spot(pos)\n\t\tpos % @seats.size\n\tend", "def probable_enemy_occupations(options = {})\n fail 'Position not given!' unless options[:pos]\n # starting with the rightmost...leftmost. Then bottommost...topmost\n occupation_points = []\n center = Vector[*options[:pos]]\n # horizontally, until ship goes too left\n head = center.dup\n until head == center + Vector[-(options[:ship_length]), 0]\n body = body_from_head(head, options[:ship_length], ACROSS)\n occupation_points << body if valid?(body, options[:board])\n head -= ACROSS\n end\n # vertically, again until ship goes too far up\n head = center.dup\n until head == center + Vector[0, -(options[:ship_length])]\n body = body_from_head(head, options[:ship_length], DOWN)\n occupation_points << body if valid?(body, options[:board])\n head -= DOWN\n end\n points = remove_invalid_shots!(occupation_points.flatten, options[:board])\n EdenPlayer.prob_board_matrix points\n end", "def extend_positions\r\n cache.values.each { |p| p.extend(extension) }\r\n # We freeze each of the positions EXCEPT the :number position, since\r\n # we adjust it's value and freeze later because the value is a range.\r\n cache.values.each { |p| p.freeze unless p.value.nil? }\r\n end", "def startcondense(pos)\n set = [pos]\n numfound = 0\n\n numfound = condense(set,pos,numfound)\n piecetype = @board[set[0]].identity\n\n #find bottommost piece\n endpoint = set.max\n\n #find bottommost/leftmost piece\n for i in (0..BRDSZ-1)\n if !set.include?(endpoint-1) and !@board[endpoint].isleft\n break\n else\n endpoint -= 1\n end\n end\n\n #cleanup\n for i in (0..numfound-1)\n if(set[i] != endpoint)\n @board[set[i]].assiden(\" \")\n end\n end\n @board[endpoint].next\n end", "def relative_range_era(points)\n relative_range = []\n player = $game_player\n x,y = player.x, player.y\n points.each do |v|\n relative_range.push(Vertex.new(v.x + x, v.y + y))\n end\n relative_range\n end", "def position=(point); end", "def collisions\n [collided_bombs, collided_rubies]\n end", "def set_pos(base_pos, base_screen, set_pos)\n diff_x = (base_screen[0] + ((set_pos[0] - base_pos[0]) * 32))\n diff_y = (base_screen[1] + ((set_pos[1] - base_pos[1]) * 32))\n return [diff_x, diff_y]\n end", "def touched_by bullet\n ([@width, @height].max/3).times do\n p_vx = rand(100)/10.0 - 5\n p_vy = rand(100)/10.0 - 5\n #@window.particles << Particle.new(@window, @x + rand(@width) - @width/2, @y + rand(@height) - @height/2, p_vx, p_vy)\n @window.particles << Live_Particle.new(@window, bullet.x + rand(@width) - @width/2, bullet.y + rand(@height) - @height/2, p_vx, p_vy, @colors.rand_in, 1 + rand)\n end\n damage bullet.damage\n end", "def get_mines(positions)\n positions.select { |pos| mine?(pos) }\n end", "def draw_ranges(positions, type)\n $spriteset.show_ranges(positions.flatten.compact)\n end", "def enemies_in_range(range)\n return case attacking_direction\n when :right\n @game_state.enemies.select do |e| \n (e.x > @x && e.x < (self.right + range)) && (e.bottom > @y || e.y < self.bottom)\n end\n when :left\n @game_state.enemies.select do |e| \n (e.right < @x && e.right > (@x - range)) && (e.bottom > @y || e.y < self.bottom)\n end\n when :up\n @game_state.enemies.select do |e| \n (e.bottom < @y && e.bottom > (@y - range)) && (e.right > @x && e.x < self.right)\n end\n when :down\n @game_state.enemies.select do |e| \n (e.y > self.bottom && e.y < (self.bottom + range)) && (e.right > @x && e.x < self.right)\n end\n end\n end", "def handle_explosions_to_spaceships\n return if @spaceships.empty? || @explosions.empty?\n @spaceships.product(@explosions).select {|pair| Collision.detect(*pair)}.each do |spaceship, explosion|\n spaceship.damage!(explosion.projectile)\n end\n end", "def pos(type = @type)\n @respawns[type] ||= []\n end", "def check_collisions!\n 1000.times do\n @particles.values.map(&:tick)\n position_groups = @particles.values.group_by(&:position)\n collisions = position_groups.select { |pos, particles| particles.size > 1 }\n collisions.values.flatten.collect(&:num).map { |num| @particles.delete(num) }\n end\n @particles\n end", "def pos_to_slot()\n {1 => 19, 2 => 25, 3 => 31, 4 => 87, 5 => 93, 6 => 99, 7 => 155, 8 => 161, 9 => 167}\nend", "def possibleEggMoves\n v=MultipleForms.call(\"possibleEggMoves\",self)\n return v if v!=nil\n pbRgssOpen(\"Data/eggEmerald.dat\",\"rb\"){|f|\n f.pos=(self.species-1)*8\n offset=f.fgetdw\n length=f.fgetdw\n if length>0\n bob=[]\n f.pos=offset\n i=0; loop do break unless i<length\n atk=f.fgetw\n bob.push(atk)\n i+=1\n end\n return bob\n else\n return []\n end\n }\n end", "def pos() end", "def pos() end", "def pos() end", "def pos() end", "def getMinMax(array,pos)\n minimum = 1000000\n maximum = -1000000\n for player in array\n if player[4] == pos\n minimum = [player[player.length-1],minimum].min\n maximum = [player[player.length-1],maximum].max\n end\n end\n return [minimum,maximum]\nend", "def set_actor_postions(actors_pos)\n $game_temp.battle_move = true\n $game_temp.actors_start_position = []\n @moving_actors = []\n for i in 0...$game_party.actors.size\n $game_temp.actors_position[i] = set_actors_screen_postion(actors_pos[i])\n @moving_actors[i] = i == 0 ? $game_player : $game_player.caterpillar[i-1]\n @moving_actors[i].move_update.clear if i > 0\n end\n loop do\n for i in 0...$game_party.actors.size\n set_battle_position(@moving_actors[i], actors_pos[i])\n $game_temp.actors_position[i] = set_actors_screen_postion(actors_pos[i])\n end\n update_basic(false, true, true)\n break if all_in_postion(actors_pos)\n end\n $game_temp.battle_move = false\n wait(10)\n end", "def eva\n [[super, 0].max, 100].min\n end", "def new_move_positions(pos)\n possible_moves = KnightPathFinder.valid_moves(pos)\n possible_moves.reject! {|a_pos| @considered_position.include?(a_pos)}\n possible_moves.each {|a_pos| @considered_position << a_pos }\n possible_moves\n end", "def touch_end_explosion\n vector = @current_point - @selected.position\n magnitude = Math.sqrt(vector.x**2 + vector.y**2)\n @delegate.explosion = magnitude\n @delegate.close_modal_view\n end", "def insert_mines\n @random_spots = []\n @num_of_mine.times do\n\n while @random_spots.length < @num_of_mine\n rand_num = Random.rand(@num_of_tiles**2)\n\n if !@random_spots.include?(rand_num)\n @random_spots << rand_num\n end\n\n end\n end\nend", "def place_fleet(pos_list)\n # Try to set all ships on the field\n res = pos_list.inject(true) do |a, l|\n a && @all_ships[l[0]].set!(l[1], l[2], l[3])\n end\n\n # If success, check something???\n res = @all_ships.inject(true) { |a, ship| a && ship.coord } if res\n\n # Remove all ships otherwise\n @all_ships.each { |ship| ship.kill if ship.coord } if !res\n\n res\n end", "def alien_hit(alien, bullet)\r\n # Deleting the alien and the bullet object\r\n @aliens.delete alien\r\n @bullets.delete bullet\r\n @explode = true\r\n @player.update_score()\r\n # Setting the coordinates for the explosion\r\n @explode_x = alien.x\r\n @explode_y = alien.y\r\n @explosion_sound.bmp.play \r\n end", "def update_pos(x=0,y=0)\n\n # E.g. for a yet to be hit submarine placed at starting position (0,0) horizontally, pos will be:\n # pos [ {:x => 0, :y => 0, :hit => 0 }, {:x => 1, :y => 0, :hit => 0 } ]\n @pos << {:x => x, :y => y, :hit => 0 }\n\n end", "def update_pos(x=0,y=0)\n\n # E.g. for a yet to be hit submarine placed at starting position (0,0) horizontally, pos will be:\n # pos [ {:x => 0, :y => 0, :hit => 0 }, {:x => 1, :y => 0, :hit => 0 } ]\n @pos << {:x => x, :y => y, :hit => 0 }\n\n end", "def ranges\n x_start, y_start = absolute(0, 0)\n x_end, y_end = absolute(@diameter - 1, @diameter - 1)\n\n [x_start..x_end, y_start..y_end]\n end", "def move_piece(start_pos,end_pos)\n begin \n raise \"No piece here to move\" if grid[start_pos[0]][start_pos[1]].nil?\n raise \"Can't move to this spot\" if end_pos[0] > 7 || end_pos[0] < 0 || end_pos[1] > 7 || end_pos[1] < 0 \n self[start_pos], self[end_pos] = self[end_pos],self[start_pos]\n end\n end", "def make\n @spaces.each { |position| position.occupied = true }\n # pp \"made ship: #{@spaces}\"\n end", "def new_move_positions(pos)\n candidates = KnightPathFinder.valid_moves(pos)\n candidates = candidates.select { |e| !@considered_positions.include?(e) }\n @considered_positions.concat(candidates)\n return candidates\n #@considered_positions = (@considered_positions.concat(candidates)).uniq\n end", "def extremes\n point = @points[0]\n #puts \"next\"\n #p [point.x, point.y]\n left = right = point.x # @position.x + point.x\n top = bottom = point.y # @position.y + point.y\n @points[1..-1].each do |point|\n #p [point.x, point.y]\n x, y = point.to_a # (@position + point).to_a\n left = x if x < left\n right = x if x > right\n top = y if y < top\n bottom = y if y > bottom\n end\n #p [left, right, top, bottom]\n [left, right, top, bottom]\n end", "def exon_on_gene_position(position)\n @vulgar_block.each do |vulgar|\n if position.between?(vulgar.query_start, vulgar.query_end)\n return vulgar\n end\n end\n nil\n end", "def destroy!\n (rand(3) + 5).times { Explosion.new(@x + rand(BoomOffset * 2) - BoomOffset, @y + rand(BoomOffset * 2) - BoomOffset) }\n kill!\n end", "def destroy!\n (rand(3) + 5).times { Explosion.new(@x + rand(BoomOffset * 2) - BoomOffset, @y + rand(BoomOffset * 2) - BoomOffset) }\n kill!\n end", "def makeLiving(points)\n points.map do |p|\n getCell(p[0],p[1]).spawn\n end\n end", "def get_set(start, finish, interval)\n cur_val = start; result = []\n while(cur_val < finish)\n result << cur_val\n cur_val += interval\n end\n result\n end", "def seed_grid\n @BOMBS.times do\n bomb_placed = false\n until bomb_placed\n pos = [rand(@Y_DIM), rand(@X_DIM)]\n unless self[pos].bomb\n self[pos].bomb = true\n bomb_placed = true\n end\n end\n end\n end", "def set_battle_position(actor, pos)\n return if actor.moving? or pos == [actor.x, actor.y]\n if actor.x == pos[0] and actor.y > pos[1]\n actor.y -= 1\n actor.turn_up\n elsif actor.x == pos[0] and actor.y < pos[1]\n actor.y += 1\n actor.turn_down\n elsif actor.x > pos[0] and actor.y == pos[1]\n actor.x -= 1\n actor.turn_left\n elsif actor.x < pos[0] and actor.y == pos[1]\n actor.x += 1\n actor.turn_right\n elsif actor.x < pos[0] and actor.y > pos[1]\n actor.turn_up\n actor.y -= 1\n actor.x += 1\n elsif actor.x > pos[0] and actor.y < pos[1]\n actor.turn_down\n actor.y += 1\n actor.x -= 1\n elsif actor.x > pos[0] and actor.y > pos[1]\n actor.turn_left\n actor.y -= 1\n actor.x -= 1\n elsif actor.x < pos[0] and actor.y < pos[1]\n actor.turn_right\n actor.y += 1\n actor.x += 1\n end\n actor.increase_steps\n end", "def projectile\n end", "def move_piece!(start_pos, end_pos)\n piece = self[start_pos]\n raise 'piece cannot move like that' unless piece.moves.include?(end_pos)\n\n self[end_pos] = piece\n self[start_pos] = sentinel\n piece.pos = end_pos\n\n nil\n end", "def move_piece!(start_pos, end_pos)\n piece = self[start_pos]\n raise 'piece cannot move like that' unless piece.moves.include?(end_pos)\n\n self[end_pos] = piece\n self[start_pos] = sentinel\n piece.pos = end_pos\n\n nil\n end", "def reveal_tile(pos = self.cursor.pos)\n raise \"spot taken\" if revealed?(pos)\n reveal_bombs if bomb?(pos)\n cascade(pos) unless bomb?(pos)\n end", "def _stack_add_attack(skill_index: nil, target_position: nil, target_list: nil, launcher: nil)\n if target_list\n target = target_list\n target_list.each_with_index do |pokemon, i|\n target_list[i] = get_pokemon_o(pokemon)\n end\n else\n target = -target_position - 1\n end\n launcher = get_pokemon_o(launcher)\n # Dirty Mega evolution add\n if BattleEngine.can_pokemon_mega_evolve?(launcher, get_bag(launcher))\n BattleEngine.prepare_mega_evolve(launcher, get_bag(launcher))\n end\n @results << [0, skill_index, target, launcher]\n end", "def in_bounds(pos)\n pos.all? { |coord| coord.between?(0, 7) }\n end", "def shoot\n\t\tt = (Rubygame::Time.get_ticks() - @parent.stamp[:p])/1000.0\n\t\tv = Vector.new(1,0).rotate(@parent.angle)\n\t\tv.magnitude += 150\n\t\tp = @parent.project(t)\n\t\[email protected][0].push(@type.new(p,v,@lifespan))\n\tend", "def pos=(pos)\n @pos = pos\n end", "def pos\n [posx, posy]\n end", "def collision(damage)\n end", "def build_spans on_range\n @spans = Array.new @size[1]\n\n @size[1].times do |y|\n spans = []\n left = (@size[1]-y-1)*@size[0]\n start = nil\n\n @size[0].times do |x|\n d = on_range.include?(@pixels[left+x])\n\n if !start && d\n start = x\n elsif start && !d\n spans << [start, x]\n start = nil\n end\n end\n\n spans << [start, @size[0]] if start\n @spans[y] = spans\n end\n end", "def get_available_positions\n\t\tpositions = []\n\t\tfor i in (1..9) do\n\t\t\tx = ((i - 0.1) / 3).truncate\n\t\t\ty = (i - 1) % 3\n\t\t\tif self.is_valid?(x, y)\n\t\t\t\tpositions.push(i)\n\t\t\tend\n\t\tend\n\t\t\n\t\treturn positions\n\tend", "def d_range\n (d...dn_position).to_a\n end", "def d_range\n (d...dn_position).to_a\n end", "def pos_to_coords(pos)\n x = (pos % size)\n y = size - 1 - (pos / size).to_i\n\n Point.new x, y\n end", "def get_piece(pos)\n @grid[pos[0]][pos[1]]\n end", "def damage\n @points -= [10, 20, 30].sample\n end", "def collided_bombs\n bomb_layers.map do |bomb|\n bomb if bomb.collide_with?(GameData.player_layer.rect_version)\n end.compact\n end", "def forbidden_positions=(forbidden_positions)\n forbidden_positions.each do |position|\n @forbidden_positions << Position.new(position)\n end\n end", "def explore(pos)\n end", "def extremes\n left = @position.x - @radius\n right = @position.x + @radius\n top = @position.y - @radius\n bottom = @position.y + @radius\n [left, right, top, bottom]\n end" ]
[ "0.7093686", "0.6117279", "0.5506248", "0.5503385", "0.54874474", "0.5395577", "0.53761524", "0.53741246", "0.53574306", "0.5285964", "0.5251987", "0.52062863", "0.5205123", "0.5119077", "0.5111693", "0.50999427", "0.5072266", "0.5032305", "0.50094706", "0.49615857", "0.49483663", "0.49132416", "0.48860234", "0.4883632", "0.48723114", "0.48487717", "0.48344734", "0.48123837", "0.48087558", "0.48043808", "0.48034915", "0.47883242", "0.4777419", "0.47731432", "0.4772614", "0.47643173", "0.4761351", "0.4760048", "0.4751065", "0.4750432", "0.47498137", "0.47493544", "0.47487137", "0.47474205", "0.47328582", "0.473046", "0.47218305", "0.47165957", "0.47155416", "0.47107837", "0.47091305", "0.4703332", "0.46969348", "0.46958464", "0.46958464", "0.46958464", "0.46958464", "0.4693196", "0.4680832", "0.4672901", "0.46537602", "0.46518695", "0.4649554", "0.46443093", "0.46383297", "0.46293336", "0.46293336", "0.46250898", "0.4624176", "0.4622205", "0.46215373", "0.46193105", "0.4613019", "0.4611937", "0.4611937", "0.4607609", "0.4607377", "0.4605928", "0.4601678", "0.45980716", "0.45953807", "0.45953807", "0.45939925", "0.4588214", "0.45871747", "0.45762902", "0.45658162", "0.45611858", "0.45567515", "0.45565403", "0.45560727", "0.45517114", "0.45517114", "0.4544033", "0.45439565", "0.45371923", "0.45340753", "0.45337704", "0.45279616", "0.45278615" ]
0.7783826
0
Remove the following if you're only using portrait
def will_animate_rotate(orientation, duration) find.all.reapply_styles end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def portrait?; end", "def landscape?; end", "def set_portrait\n @orientation = 1\n end", "def shouldAutorotateToInterfaceOrientation interfaceOrientation\n # // Return YES for supported orientations.\n # //return interfaceOrientation == UIInterfaceOrientationPortrait;\n true\n end", "def portrait?\n height > width\n end", "def set_landscape\n @orientation = 0\n end", "def portrait?\n width < height\n end", "def portrait\n filter << 'Face:Portrait'\n self\n end", "def landscape?\n xy_ratio >= 1\n end", "def orientation\n if landscape?\n :landscape\n elsif portrait?\n :portrait\n else\n :square\n end\n end", "def supportedInterfaceOrientations\n UIInterfaceOrientationMaskLandscape\n end", "def shouldAutorotateToInterfaceOrientation(toInterfaceOrientation)\n toInterfaceOrientation == UIInterfaceOrientationLandscapeLeft || toInterfaceOrientation == UIInterfaceOrientationLandscapeRight\n end", "def shouldAutorotateToInterfaceOrientation(toInterfaceOrientation)\n toInterfaceOrientation == UIInterfaceOrientationLandscapeLeft || toInterfaceOrientation == UIInterfaceOrientationLandscapeRight\n end", "def landscape\n config\n end", "def orientation\n System::get_property('screen_orientation').to_s\n end", "def kiosk_mode_allow_screen_rotation\n return @kiosk_mode_allow_screen_rotation\n end", "def portrait_format?\n return (self.image_width < self.image_height) ? true : false\n end", "def orientation_name\n return Orientation::PORTRAIT if size[0] < size[1]\n return Orientation::LANDSCAPE\n end", "def clear_device_orientation_override\n {\n method: \"Page.clearDeviceOrientationOverride\"\n }\n end", "def clear_device_orientation_override\n {\n method: \"DeviceOrientation.clearDeviceOrientationOverride\"\n }\n end", "def landscape?\n width > height\n end", "def required_rotation_for_upright_display\n required_rotation_orientation_in_degrees = (360 - self.orientation) % 360\n return required_rotation_orientation_in_degrees\n end", "def wantsFullScreenLayout\n true\n end", "def supportedInterfaceOrientations\n UIInterfaceOrientationMaskAll\n end", "def supportedInterfaceOrientations\n UIInterfaceOrientationMaskAll\n end", "def supportedInterfaceOrientations\n UIInterfaceOrientationMaskAll\n end", "def supportedInterfaceOrientations\n UIInterfaceOrientationMaskAll\n end", "def supportedInterfaceOrientations\n UIInterfaceOrientationMaskAll\n end", "def supportedInterfaceOrientations\n UIInterfaceOrientationMaskAll\n end", "def supportedInterfaceOrientations\n UIInterfaceOrientationMaskAll\n end", "def supportedInterfaceOrientations\n UIInterfaceOrientationMaskAll\n end", "def landscape?\n width > height\n end", "def kiosk_mode_allow_screen_rotation=(value)\n @kiosk_mode_allow_screen_rotation = value\n end", "def orientation(img)\n (img.rows > img.columns) ? \"landscape\" : \"portrait\"\n end", "def portrait1_dimensions_string\n \"#{PORTRAIT1_WIDTH}x#{PORTRAIT1_HEIGHT}\"\n end", "def full_screen; end", "def getOrientation()\n if `#{$orientationCmd}` =~ $orientationRE\n return $1 == '' ? 'normal' : $1\n else\n raise \"Could not determine orientation of #{$screen} from #{$orientationCmd}\"\n end\nend", "def auto_orient\n manipulate! do |image|\n o = image.get('exif-Orientation').to_i rescue nil\n o ||= image.get('exif-ifd0-Orientation').to_i rescue 1\n case o\n when 1\n # Do nothing, everything is peachy\n when 6\n image.rot270\n when 8\n image.rot180\n when 3\n image.rot90\n else\n raise('Invalid value for Orientation: ' + o.to_s)\n end\n image.set('exif-Orientation', '')\n image.set('exif-ifd0-Orientation', '')\n end\n end", "def fix_exif_rotation\n manipulate! do |img|\n img.tap(&:auto_orient)\n end\n end", "def isFlipped\n true\n end", "def autorotateMask\n device = UIDevice.currentDevice.userInterfaceIdiom\n if view.stylesheet and view.stylesheet.is_a?(Teacup::Stylesheet) and view.stylename\n properties = view.stylesheet.query(view.stylename, self, nil)\n\n orientations = 0\n if properties.supports?(:portrait) or properties.supports?(:upside_up)\n orientations |= UIInterfaceOrientationPortrait\n end\n\n if device == UIUserInterfaceIdiomPhone\n # :portrait does not imply upside_down on the iphone\n if properties.supports?(:upside_down)\n orientations |= UIInterfaceOrientationPortraitUpsideDown\n end\n else\n # but does on the ipad\n if properties.supports?(:portrait) or properties.supports?(:upside_down)\n orientations |= UIInterfaceOrientationPortraitUpsideDown\n end\n end\n\n if properties.supports?(:landscape) or properties.supports?(:landscape_left)\n orientations |= UIInterfaceOrientationLandscapeLeft\n end\n\n if properties.supports?(:landscape) or properties.supports?(:landscape_right)\n orientations |= UIInterfaceOrientationLandscapeRight\n end\n\n if orientations == 0\n orientations |= UIInterfaceOrientationPortrait\n end\n return orientations\n end\n\n # returns the system default\n if device == UIUserInterfaceIdiomPhone\n return UIInterfaceOrientationMaskAllButUpsideDown\n else\n return UIInterfaceOrientationMaskAll\n end\n end", "def wantsFullScreenLayout\n\t\ttrue\n\tend", "def orientation=(orientation='h')\n if orientation == 'h' || orientation == 'horizontal'\n self.horizontal = true\n elsif orientation == 'v' || orientation == 'vertical'\n self.horizontal = false\n end\n end", "def set_perspective\n # do nothing\n end", "def orientation\n @orientation ||= :column\n end", "def autorotateToOrientation(orientation)\n device = UIDevice.currentDevice.userInterfaceIdiom\n if view.stylesheet and view.stylesheet.is_a?(Teacup::Stylesheet) and view.stylename\n properties = view.stylesheet.query(view.stylename, self, orientation)\n\n # check for orientation-specific properties\n case orientation\n when UIInterfaceOrientationPortrait\n # portrait is \"on\" by default, must be turned off explicitly\n if properties.supports?(:portrait) == nil and properties.supports?(:upside_up) == nil\n return true\n end\n\n return (properties.supports?(:portrait) or properties.supports?(:upside_up))\n when UIInterfaceOrientationPortraitUpsideDown\n if UIDevice.currentDevice.userInterfaceIdiom == UIUserInterfaceIdiomPhone\n # iphone must have an explicit upside-down style, otherwise this returns\n # false\n return properties.supports?(:upside_down)\n else\n # ipad can just have a portrait style\n return (properties.supports?(:portrait) or properties.supports?(:upside_down))\n end\n when UIInterfaceOrientationLandscapeLeft\n return (properties.supports?(:landscape) or properties.supports?(:landscape_left))\n when UIInterfaceOrientationLandscapeRight\n return (properties.supports?(:landscape) or properties.supports?(:landscape_right))\n end\n\n return false\n end\n\n # returns the system default\n if device == UIUserInterfaceIdiomPhone\n return orientation != UIInterfaceOrientationPortraitUpsideDown\n else\n return true\n end\n end", "def auto_orient\n manipulate! do |image|\n o = image.get('exif-Orientation').to_i rescue nil\n o ||= image.get('exif-ifd0-Orientation').to_i rescue 1\n case o\n when 1\n # Do nothing, everything is peachy\n when 6\n image.rot270\n when 8\n image.rot180\n when 3\n image.rot90\n else\n raise('Invalid value for Orientation: ' + o.to_s)\n end\n image.set_type GObject::GSTR_TYPE, 'exif-Orientation', ''\n image.set_type GObject::GSTR_TYPE, 'exif-ifd0-Orientation', ''\n end\n end", "def screen_z\n super\n end", "def screen_z\n super\n end", "def landscape_format?\n return (self.image_width > self.image_height) ? true : false\n end", "def orient_to_m(orientation)\n if orientation.eql? :across\n [1,0]\n else\n [0,1]\n end\n end", "def handle_offscreen_right() end", "def handle_offscreen_right() end", "def hide_footer?\n is_onboarding? || is_fullscreen_page?\n end", "def _conditional_layout?; end", "def portrait(size)\n unless File.exists? \"#{RAILS_ROOT}/public/system/characters/#{self.id}_#{size.to_i}.jpg\"\n begin\n File.open(\"#{RAILS_ROOT}/public/system/characters/#{self.id}_256.jpg\",\"wb\") do |f|\n Net::HTTP.start(\"image.eveonline.com\") do |http|\n resp = http.get(\"/Character/#{self.id}_256.jpg\")\n f << resp.body\n end\n end\n # Now use MiniMagick to bake some 16/32 images from the larger source\n \n image = MiniMagick::Image.from_file(\"#{RAILS_ROOT}/public/system/characters/#{self.id}_256.jpg\")\n image.resize \"16x16\"\n image.write(\"#{RAILS_ROOT}/public/system/characters/#{self.id}_16.jpg\")\n \n image = MiniMagick::Image.from_file(\"#{RAILS_ROOT}/public/system/characters/#{self.id}_256.jpg\")\n image.resize \"32x32\"\n image.write(\"#{RAILS_ROOT}/public/system/characters/#{self.id}_32.jpg\")\n \n image = MiniMagick::Image.from_file(\"#{RAILS_ROOT}/public/system/characters/#{self.id}_256.jpg\")\n image.resize \"64x64\"\n image.write(\"#{RAILS_ROOT}/public/system/characters/#{self.id}_64.jpg\")\n rescue\n return \"/images/logo_not_found_#{size}.jpg\"\n end\n end\n return \"/system/characters/#{self.id}_#{size.to_i}.jpg\"\n end", "def max_tilt_angle; end", "def landscape_orientation?\n driver.orientation.to_s.upcase == LANDSCAPE\n rescue NameError, Selenium::WebDriver::Error::UnknownError\n Applitools::EyesLogger.debug 'driver has no \"orientation\" attribute. Assuming: portrait.'\n false\n end", "def toggle_fullscreen\n self.fullscreen = !fullscreen?\n end", "def transparency_mode\n super\n end", "def default_flip\n return false\n end", "def default_flip\n return false\n end", "def screen_y\n return 464\n end", "def set_portrait\n @portrait = Portrait.find(params[:id])\n end", "def FT_HAS_VERTICAL(face)\n face.face_flags & FaceFlag[:VERTICAL]\n end", "def no_layout?; end", "def force_tablet_html\n session[:tablet_view] = false\n end", "def unfullscreen_height\n\n return unless @window\n\n @window.decorated = true\n @window.set_resizable(true)\n\n # Resize the width of the window\n width = @window.screen.width\n height = @window.screen.height\n\n # We need to change the minimum size of the window\n min_width = width / 4\n min_height = height / 4\n @window.set_size_request(min_width, min_height)\n #puts \"height: #{width} / #{height}\"\n\n @window.unfullscreen\n\n # then we can resize to a smaller size\n new_height = height / 2\n @window.move(0, 0)\n @window.resize(width, new_height)\nend", "def orientation=(value)\n\t\t\tvalue = GrabzIt::Utility.nil_check(value).capitalize\n\t\t\t@orientation = value\n\t\tend", "def height\n a = self.orientation\n return 0.inch if not a\n a[0].z\nend", "def _screen_width\n\t\t80\n\tend", "def always_face_camera?\n end", "def hide_header?\n @web_view_mode || is_fullscreen_page?\n end", "def use_section_planes?\n end", "def unfullscreen_width\n\n return unless @window\n\n @window.decorated = true\n @window.set_resizable(true)\n\n # Resize the width of the window\n width = @window.screen.width\n height = @window.screen.height\n \n # We need to change the minimum size of the window\n min_width = width / 4\n min_height = height / 4\n @window.set_size_request(min_width, min_height)\n #puts \"width : #{width} / #{height}\"\n\n @window.unfullscreen\n\n # then we can resize to a smaller size\n new_width = width / 2 \n @window.move(0, 0)\n @window.resize( new_width , height)\nend", "def set_device_orientation_override(alpha:, beta:, gamma:)\n {\n method: \"Page.setDeviceOrientationOverride\",\n params: { alpha: alpha, beta: beta, gamma: gamma }.compact\n }\n end", "def fix_exif_rotation\n manipulate! do |img|\n img.auto_orient\n img = yield(img) if block_given?\n img\n end\n end", "def fix_exif_rotation\n manipulate! do |img|\n img.auto_orient\n img = yield(img) if block_given?\n img\n end\n end", "def fix_exif_rotation\n manipulate! do |img|\n img.auto_orient\n img = yield(img) if block_given?\n img\n end\n end", "def orientation=(value)\n if value\n if value == :landscape\n value = :landscape_left\n elsif !value.is_a?(Symbol)\n value = ORIENTATIONS[value] || :unknown\n end\n end\n @custom_orientation = value\n end", "def set_board_pin_mode\n\n @axis_x.set_pin_mode()\n @axis_y.set_pin_mode()\n @axis_z.set_pin_mode()\n\n end", "def is_traitable\n true\n end", "def turn_right\n @orientation == 3 ? @orientation = 0 : @orientation += 1\n end", "def set_device_orientation_override(alpha:, beta:, gamma:)\n {\n method: \"DeviceOrientation.setDeviceOrientationOverride\",\n params: { alpha: alpha, beta: beta, gamma: gamma }.compact\n }\n end", "def use_camera?\n end", "def exempt_from_layout?\n false\n end", "def confirm_plate_orientation\n 'MAKE SURE THAT THE PLATE IS IN THE CORRECT ORIENTATION'\n end", "def confirm_plate_orientation\n 'MAKE SURE THAT THE PLATE IS IN THE CORRECT ORIENTATION'\n end", "def camera_reposition_case?\n $imported[:TSBS_Camera] && @animation.position != 3\n end", "def camera_reposition_case?\n $imported[:TSBS_Camera] && @animation.position != 3\n end", "def portrait_url(id, full = false)\n if id\n if full\n \"http://#{self.region.downcase}.battle.net/static-render/#{self.region.downcase}/#{id.sub!('avatar', 'profilemain')}\"\n else\n \"http://#{self.region.downcase}.battle.net/static-render/#{self.region.downcase}/#{id}\"\n end\n end\n end", "def meta_viewport\n \t\"width=device-width, initial-scale=1\"\n \tend", "def confirm_plate_orientation()\n \"MAKE SURE THAT THE PLATE IS IN THE CORRECT ORIENTATION\"\n end", "def layout_full_center\n nil\n end", "def screen_width()\n end", "def willAnimateRotationToInterfaceOrientation(orientation, duration:duration)\n view.restyle!(orientation)\n end", "def retrofy\r\n Gosu::enable_undocumented_retrofication\r\n self\r\n \r\n #\r\n # The below code depends on the bad opengl gem\r\n # And it could affect other images anyhow... \r\n # So let's use Gosu::enable_undocumented_retrofication until further notice.\r\n #\r\n \r\n #glBindTexture(GL_TEXTURE_2D, self.gl_tex_info.tex_name)\r\n #glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST)\r\n #glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST)\r\n #self\r\n end", "def needs_rotate?\n false\n end", "def profile_mode\n super\n end", "def profile_mode\n super\n end", "def attacking_horizontal?\n false\n end" ]
[ "0.8359375", "0.7413271", "0.7157937", "0.68576723", "0.6791786", "0.6665831", "0.65481746", "0.64277995", "0.6297777", "0.61366683", "0.61036855", "0.6103258", "0.6103258", "0.60684985", "0.60276175", "0.60117775", "0.59446466", "0.5876145", "0.58523935", "0.57331425", "0.5705937", "0.5704452", "0.56921375", "0.5614063", "0.5614063", "0.5614063", "0.5614063", "0.5614063", "0.5614063", "0.5614063", "0.5613455", "0.5562241", "0.5513298", "0.5495666", "0.5484447", "0.5479459", "0.5466109", "0.54652655", "0.5446315", "0.5428117", "0.53924286", "0.53578675", "0.5326544", "0.53060997", "0.5265186", "0.5255082", "0.5241974", "0.5231725", "0.5231725", "0.5218316", "0.5181293", "0.51741046", "0.51741046", "0.5169215", "0.51585346", "0.515502", "0.51222795", "0.5094753", "0.5078161", "0.50743055", "0.5073916", "0.5073916", "0.5056731", "0.5050899", "0.50398564", "0.50345117", "0.5033081", "0.5016945", "0.5008732", "0.49974257", "0.49970123", "0.49802366", "0.4959645", "0.49503833", "0.4948062", "0.494732", "0.49314067", "0.49281272", "0.49269348", "0.49245265", "0.4923152", "0.49081355", "0.49067345", "0.48987004", "0.48971248", "0.48853767", "0.48848078", "0.48848078", "0.4872365", "0.4872365", "0.48691434", "0.4862655", "0.48618212", "0.48530996", "0.4848531", "0.48483828", "0.4840107", "0.48359138", "0.48340076", "0.48340076", "0.4821199" ]
0.0
-1
Subject can be set in your I18n file at config/locales/en.yml with the following lookup: en.mod_submission_mailer.mail_to_admin.subject
def mail_to_admin(mod_submission) @mod_submission = mod_submission mail to: "[email protected]" , subject: "One New Module Submission Available For Review" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def subject\n @mail.subject\n end", "def subject (recipient)\n subject_variables = alert_variables[:subject].dup\n subject_variables.merge!(recipient_details(recipient))\n subject = \"#{I18n.t(\"#{recipient_type.to_s}_subject_#{alert_name.to_s}\", subject_variables)}\"\n subject\n end", "def message_subject=(value)\n @message_subject = value\n end", "def subject\n self['subject'] || msg['subject']\n end", "def set_EmailSubject(value)\n set_input(\"EmailSubject\", value)\n end", "def set_EmailSubject(value)\n set_input(\"EmailSubject\", value)\n end", "def subject\n @subject ||= Envelope::MessageTools.normalize(message.subject || '')\n end", "def translate(mapping, key)\n I18n.t(:\"#{mapping.name}_subject\", :scope => [:devise, :mailer, key],\n :default => [:subject, key.to_s.humanize])\n end", "def set_EmailSubject(value)\n set_input(\"EmailSubject\", value)\n end", "def set_EmailSubject(value)\n set_input(\"EmailSubject\", value)\n end", "def subject=(string)\n set('Subject', string)\n end", "def subject=(subject); @message_impl.setSubject subject; end", "def setSubject(subject)\n @fields['subject'] = subject\n self\n end", "def setSubject(subject)\n @fields['subject'] = subject\n self\n end", "def setSubject(subject)\n @fields['subject'] = subject\n self\n end", "def setSubject(subject)\n @fields['subject'] = subject\n self\n end", "def translate(mapping, key)\n I18n.t(:\"notifications_subject\", :scope => [:eventifier, :notifications, key],\n :default => [:subject, key.to_s.humanize])\n end", "def subject_name=(value)\n @subject_name = value\n end", "def formatted_subject(text)\n name = PersonMailer.global_prefs.app_name\n label = name.blank? ? \"\" : \"[#{name}] \"\n \"#{label}#{text}\"\n end", "def set_Subject(value)\n set_input(\"Subject\", value)\n end", "def set_Subject(value)\n set_input(\"Subject\", value)\n end", "def set_Subject(value)\n set_input(\"Subject\", value)\n end", "def set_Subject(value)\n set_input(\"Subject\", value)\n end", "def set_Subject(value)\n set_input(\"Subject\", value)\n end", "def set_Subject(value)\n set_input(\"Subject\", value)\n end", "def set_Subject(value)\n set_input(\"Subject\", value)\n end", "def set_Subject(value)\n set_input(\"Subject\", value)\n end", "def subject_name\n return @subject_name\n end", "def subject() self.headers[\"Subject\"] || \"[NO SUBJECT]\" end", "def email_subject(form)\n \"#{form.type_of_enquiry} - #{reference}\"\n end", "def subject\n @subject ||= \"(sans sujet)\"\n if @no_header_subject.nil?\n \"#{header_subject}#{@subject}\"\n else\n @subject\n end\n end", "def subject\n self['subject']\n end", "def subject_name\n subject_full_name\n end", "def subject=(value)\n @subject = value\n end", "def subject=(value)\n @subject = value\n end", "def subject=(value)\n @subject = value\n end", "def subject=(value)\n @subject = value\n end", "def subject=(value)\n @subject = value\n end", "def subject=(value)\n @subject = value\n end", "def set_subject(subject)\n\t\tend", "def SetSubject(subject)\n\t\t#Subject of document\n\t\t@subject = subject\n\tend", "def subject_for(template, attributes = {})\n subject = EmailTemplate.subject_for(template)\n subject = I18n.t(\"email_templates.#{template}.default_subject\") if subject.nil?\n subject = \"No Subject\" if subject.nil?\n Florrick.convert(subject, add_default_attributes(attributes))\n end", "def get_email_subject(email_type)\n email_subject = email_type\n case(email_type)\n when \"welcome\"\n email_subject = \"Welcome to Aspera Files\"\n when \"reset\"\n email_subject = \"Password Reset\"\n end\n return email_subject\n end", "def message_subject\n return @message_subject\n end", "def subject\n message.subject\n end", "def subject\n title \n end", "def subject\n @options.fetch(:subject) { \"Invitation\" }\n end", "def subject_titles\n @subject_titles ||= sw_subject_titles\n end", "def normalize_subject_name\n self.subject = subject.downcase.titleize\n end", "def email_subject\n sponsor_name = @config.plan.sponsor_name\n display_date = @date.to_s()\n if @config.div_id.present?\n email_subject = \"Payroll report for #{sponsor_name} for division #{@config.division_name}: #{display_date}\"\n else\n email_subject = \"Payroll report for #{sponsor_name}: #{display_date}\"\n end\n return email_subject\n end", "def choose_subject(action, params = {})\n scope = [:mailers, mailer_name, action]\n key = :subject\n experiment_name = \"#{mailer_name}_mailer_#{action}_subject\".to_sym\n if experiment_active?(experiment_name)\n scope << key\n key = ab_test(experiment_name)\n end\n params.merge!(scope: scope)\n I18n.t(key, params)\n end", "def compose_email\n @title = t 'conclusion_draft_review.send_by_email'\n end", "def set_title\n @title = t(:message_2, :scope => [:controller, :exams])\n end", "def custom_mail( user, subject, title, contents )\n @user = user\n @host = GogglesCore::AppConstants::WEB_MAIN_DOMAIN_NAME\n @contents = contents\n @title = title\n #subject: \"[#{ GogglesCore::AppConstants::WEB_APP_NAME }@#{ @host }] #{ subject }\",\n mail(\n subject: \"#{ subject } [#{GogglesCore::AppConstants::WEB_APP_NAME}]\",\n to: user.email,\n date: Time.now\n )\n end", "def subject; @message_impl.getSubject; end", "def email_subject(&blk)\n @email_subject_block = blk if blk\n @email_subject_block\n end", "def get_subject_name\n subject_name = subject_header.text.sub! 'Subject:', ''\n subject_name = subject_name.strip!\n subject_name\n end", "def get_subject\n\t\tend", "def deliver_invitation(options = {})\n super(options.merge(subject: _('A Data Management Plan in %{application_name} has been shared with you') % {application_name: Rails.configuration.branding[:application][:name]}))\n end", "def editcompany_email(company)\n @company = company\n @message = t('mailers.company.updated')\n \n emails = AdminUser.all.collect(&:email).join(\",\")\n\n mail(:to => emails, :subject => \"#{t('site_title')}: #{@message}\")\n end", "def subject_names\n @subject_names ||= sw_subject_names\n end", "def subject(options = {})\n options = { :capitalize => true, :case => Grammar::Case::SUBJECT }.merge(options)\n pronoun_or_noun(@subject, @audience, options)\n end", "def translation_scope\n \"mailers.#{mailer_name.tr(\"/\", \".\").sub(\"_mailer\", \"\")}.#{action_name}\"\n end", "def subject_alternative_name\n extensions[R509::Cert::Extensions::SubjectAlternativeName]\n end", "def notify_company_admin(admin, task_submission)\n @submitter = task_submission.submitter\n\n I18n.with_locale(admin.locale) do\n mail(to: admin.email, subject: I18n.t(\"tskz.notifier.requesting_approval_of_tasks\", name: @submitter.full_name), track_opens: true)\n end\n end", "def admin_notified(error_text_for_admin)\r\n @error_text_for_admin = error_text_for_admin\r\n mail to: 'admin@fancy_gifts_shop.net', subject: 'Error from the site www.depot.com !'\r\n end", "def headers\n { subject: \"#{I18n.t('cms.contact_form.subject_prefix')}: #{reason}: #{subject}\",\n to: Account.current.preferred_support_email,\n from: Account.current.preferred_support_email,\n reply_to: %(\"#{name}\" <#{email}>) }\n end", "def admin_mail_params\n params.require(:admin_mail).permit(:user_name, :subject, :content)\n end", "def admin_email\n ContactMailer.admin_email\n end", "def subject\n map_field(:subject)&.map { |a| a.gsub(/ \\$[a-z] /, '--') }\n end", "def mmm_test_subj_call\n ->(candidate) { I18n.t('email.test_monthly_mail_subject_initial_input', candidate_account_name: candidate.account_name) }\n end", "def set_subject\n @subject = Subject.friendly.find(params[:id])\n end", "def getEmailDefaults(subject, toEmail, ccEmail = nil)\n if Rails.env.eql? 'development'\n subject = \"[BASL-DEV] #{subject}\"\n toEmail = '[email protected]'\n ccEmail = toEmail\n else\n subject = \"[BASL] #{subject}\"\n end\n mailInfo = {\n :to => toEmail,\n :subject => subject,\n :cc => ccEmail\n }\n mailInfo\n end", "def subject\n @subject=EzCrypto::Name.new(@cert.subject) unless @subject\n @subject\n end", "def admin_broadcast(desired_locale, emails_as_string, subject, body)\n ActionMailer::Base.smtp_settings = APP_CONFIG[:admin_mail]\n set_locale( desired_locale )\n\n @content_type = \"text/html\"\n @recipients = \"[email protected]\"\n @bcc = emails_as_string\n @from = head_encode(\"\\\"Kroogi (No Reply)\\\"\".t) + \" <[email protected]>\"\n @subject = head_encode(\"[Kroogi] \" + subject)\n @sent_on = Time.now\n @body[:url] = \"http://#{APP_CONFIG[:hostname]}/\"\n @body[:body] = body\n end", "def konsalt_mail params\n build_params params\n send_email t('emails.konsalta_mail.subject')\n end", "def data_subject=(value)\n @data_subject = value\n end", "def newcompany_email(company)\n @company = company\n @message = t('mailers.company.created')\n \n emails = AdminUser.all.collect(&:email).join(\",\")\n\n mail(:to => emails, :subject => \"#{t('site_title')}: #{@message}\")\n \n end", "def set_subject_and_message(form, subject, message)\n raise Impostor::MissingTemplateMethodError.new(\"set_subject_and_message must be implemented\")\n end", "def set_title\n @title = t(:message_0, :scope => [:controller, :scholarships])\n end", "def subject\n @subject\n end", "def default_sender_address\n address = Mail::Address.new(Gitlab.config.gitlab.email_from)\n address.display_name = \"GitLab\"\n address\n end", "def subject\n @subject\n end", "def subject\n @subject\n end", "def subject_topic\n map_field(:subject_topic)&.map { |a| a.gsub(/ \\$[a-z] /, '--') }\n end", "def setup_admin_email(official)\n @recipients = Array.new\n @recipients << 'Jeremy Driscoll <[email protected]>'\n do_not_deliver! if @recipients.empty?\n @from = AppConfig.admin_email\n @subject = \"[Q2016] \"\n @sent_on = Time.now\n @body[:official] = official\n end", "def subject=(subject)\n self.subjects = [subject]\n end", "def community_member_email(sender, recipient, email_subject, email_content, community)\n @email_type = \"email_from_admins\"\n set_up_layout_variables(recipient, community, @email_type)\n with_locale(recipient.locale, community.locales.map(&:to_sym), community.id) do\n @email_content = email_content\n @no_recipient_name = true\n premailer_mail(:to => recipient.confirmed_notification_emails_to,\n :from => community_specific_sender(community),\n :subject => email_subject,\n :reply_to => \"\\\"#{sender.name(community)}\\\"<#{sender.confirmed_notification_email_to}>\")\n end\n end", "def subject(*args)\n subject = \"#{Settings['application']['name']} | \"\n subject << args.join(' ') if args.present?\n subject\n end", "def subject(*extra)\n subject = \"\"\n subject << \"#{@project.name} | \" if @project\n subject << extra.join(' | ') if extra.present?\n subject\n end", "def set_subject\n @subject = Subject.find(params[:subject_id])\n end", "def set_subject\n\t\t\t@subject = Subject.find(params[:id])\n\t\tend", "def submission_graded_email(submission)\n ActsAsTenant.without_tenant do\n @course = submission.assessment.course\n end\n @recipient = submission.creator\n @assessment = submission.assessment\n @submission = submission\n\n I18n.with_locale(@recipient.locale) do\n mail(to: @recipient.email,\n subject: t('.subject', course: @course.title, assessment: @assessment.title))\n end\n end", "def subject\n return @subject\n end", "def subject\n return @subject\n end", "def subject\n return @subject\n end", "def subject\n return @subject\n end", "def subject\n return @subject\n end", "def subject\n return @subject\n end", "def subject\n 'Report a problem'.freeze\n end" ]
[ "0.7019005", "0.6982073", "0.69610137", "0.6836579", "0.6824567", "0.68236315", "0.68006516", "0.6797426", "0.67960495", "0.67943686", "0.6647366", "0.6613251", "0.65677065", "0.65677065", "0.65677065", "0.65677065", "0.64939994", "0.6483839", "0.6471161", "0.6432198", "0.6432198", "0.6432198", "0.6432198", "0.6432198", "0.6432198", "0.6432198", "0.6432198", "0.6387766", "0.6337453", "0.63154364", "0.63118637", "0.6274479", "0.62633467", "0.62536526", "0.62536526", "0.62536526", "0.62536526", "0.62536526", "0.62536526", "0.6232536", "0.6204891", "0.6201015", "0.6195865", "0.6176447", "0.6152482", "0.613019", "0.6115117", "0.60818213", "0.6036141", "0.6031365", "0.6028493", "0.6003259", "0.5960343", "0.59484076", "0.59005445", "0.5894945", "0.58927035", "0.58598065", "0.58517575", "0.58514166", "0.5820101", "0.5819849", "0.5815407", "0.5806221", "0.579046", "0.57836944", "0.5781255", "0.5775535", "0.5771487", "0.5767961", "0.57631314", "0.5759916", "0.5754575", "0.5744764", "0.5731424", "0.57217306", "0.56589824", "0.5651278", "0.56438416", "0.56251985", "0.5622615", "0.5600838", "0.5586011", "0.5586011", "0.5561379", "0.5558118", "0.5558082", "0.5556132", "0.55526674", "0.55436397", "0.5533923", "0.55235", "0.55003643", "0.5495009", "0.5495009", "0.5495009", "0.5495009", "0.5495009", "0.5495009", "0.5493328" ]
0.60462695
48
GET /orders GET /orders.json
def index @orders = Order.by_state(params[:state]).search(params[:keyword]).page params[:page] respond_to do |format| format.html format.json format.csv { send_data @orders.to_csv } end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getorders(args={})\n {\n :method=> \"GetOrders\"\n }.to_json\n end", "def orders\n params = { command: 'account_orders' }\n get('/json.php', params)\n end", "def index\n @orders = Order.all\n render json: @orders\n end", "def index\n @orders = Order.all\n render json: @orders, status: 200\n end", "def orders\n authenticated_post(\"orders\").body\n end", "def orders\n authenticated_post(\"auth/r/orders\").body\n end", "def index\n @orders = Order.order(\"id\").all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def index\n\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def index\n @orders = Order.all\n respond_to do |format|\n format.html\n format.json { render :json => @orders }\n end\n end", "def index\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def index\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def index\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def index\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def index\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def get_all_orders() \n\tputs \"Getting all orders\"\n\tresponse = request_get(\"/api/order\")\n\tputs response.body\nend", "def index\n #@orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def get_orders\n return ShopifyAPI::Order.all\n end", "def index\n\t\t@orders = Order.all\n\t\trespond_with @orders\n\tend", "def listOrders()\n if(!authenticateAdmin(params[:admin_id], params[:admin_auth_key]))\n render json: {status: false, reason: \"Authentication Failed\", data: \"\"}\n return\n end\n ret = []\n Order.find_each do |order|\n ret << {id: order.id, product_id: order.product_id, product_name: Product.find(order.product_id).name, user_id: order.user_id, quantity: order.quantity, price_per_unit: order.price_per_unit, int_status: order.order_status}\n end\n render json: {data: ret.to_json, reason: '', status: true}\n end", "def index\n @orders = Order.all\n if @orders.count >= 1\n json_response(@orders)\n else\n json_response({ Message: Message.no_data }, :not_found)\n end\n end", "def recent_orders()\n\t\twith_auth nil do |options|\n\t\t\tself.class.get(\"/api/v1/orders\", options)\n \tend\n\tend", "def index\n @orders = Order.all\n render json: { status: 'SUCCESS', message: 'Loaded posts', data: @orders }\n end", "def show\n render json: @order\n end", "def show\n render json: @order\n end", "def index\n @orders = Order.paginate page: params[:page], order: 'created_at desc', \n per_page: 10\n \n respond_to do |format|\n format.html\n format.json { render json: @orders } \n end\n end", "def index\n @power_orders = PowerOrder.all\n render json: @power_orders\n end", "def show\n order = Order.find(params[:id])\n render json: order\n end", "def index\n @order_line_items = @order.order_line_items\n\n render json: @order_line_items\n end", "def index\n @orders = Order.order(\"id DESC\").page(params[:page])\n\n if params[:user_id]\n @orders = @orders.where(:user_id => params[:user_id])\n end\n\n if Rails.configuration.orders_status.select{|k, v| v[:real]}.keys.include? params[:status]\n @orders = @orders.where(:status => params[:status])\n end\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def index\n @admin_orders = Order.page(params[:page]).per(10)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @admin_orders }\n end\n end", "def orders(params = {})\n get('/market/orders', params)\n .map { |data| build_persisted_order data }\n end", "def show\n @order = Order.find(params[:id])\n\n render json: @order\n end", "def index\n #data = HTTParty.get(\"http://localhost:8081/customers.json\")\n #p data.parsed_response[0]['email']\n if params[:customerId].present?\n @orders = Order.where(\"customerId\": params[:customerId].to_i)\n render json: @orders, status: 200\n elsif params[:id].present?\n @orders = Order.find_by id: params[:id]\n render json: @orders, status:200\n elsif params[:email].present?\n res = HTTParty.get(\"http://localhost:8081/customers/?email=#{params['email']}\")\n p res\n res = res.parsed_response\n @orders = Order.where(\"customerId\": res['id'].to_i)\n render json: @orders, status:200\n else\n @orders = Order.all\n end\n end", "def all_orders(options)\n request :account, :get, 'allOrders', options\n end", "def index\n @orders = @group.orders\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def index # see model > order.rb\n # if current_user\n @orders = current_user.orders\n render \"index.json.jb\"\n # else\n # render json: [], status: :unauthorized\n # end\n end", "def get_orders\n orders\n end", "def index\n @orders = order.all\n end", "def show\n @order = Order.find(params[:id])\n\n render json: @order, status: :ok\n\n end", "def get_order(order_id)\n\tputs \"Getting order: \" + order_id\n\tresponse = request_get(\"/api/order/\" + order_id)\n\tputs response.body\nend", "def index\n @orders = @orders.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = @branch.orders.limit(100).paginate(:page=>params[:page], :per_page=>20)\n logger.debug @orders.inspect\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def index\n @orders = index_resources\n respond_with(@orders)\n end", "def show\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end", "def show_orders\n @user = User.find_by_sql [\"select * from users where email = ? and users.status = 'admin'\", params[:email]]\n if @user.count != 0\n @orders = UsersOrder.find_by_sql [\"select users_orders.id, users.email, books.title, books.genre from users_orders\n LEFT JOIN users ON users_orders.user_id = users.id\n LEFT JOIN books ON users_orders.book_id = books.id where users_orders.status = ? ORDER BY users_orders.created_at ASC\", params[:status]]\n end\n render json: @orders\n end", "def index\n @line_items = @order.line_items\n\n render json: @line_items\n end", "def index\n @orders = Order.all\n end", "def orders\n resp = API.connection.get 'api/orders', {\n user_email: @email,\n user_token: @token\n }\n\n case resp.status\n when 200\n resp.body.map { |order_hash| Order.new(order_hash) }\n when 401\n raise Teachable::AuthError, resp.body['error']\n else\n raise Teachable::Error, 'Unknown response.'\n end\n end", "def customer_single_orders\n @orders = Order.where(customer_id: current_user.customer_id, category: :single)\n render json: @orders, status: 200\n\n end", "def order(id, orderid = '')\n get(\"/accounts/#{id}/orders\")['orders']\n end", "def index\n @orders = Order.all\n respond_to do |format|\n format.html { render layout: \"account\" }\n format.json { render :json => @orders }\n end\n end", "def show\n respond_to do |format|\n format.html\n format.json { render :json => @order }\n end\n end", "def show\n respond_to do |format|\n format.html\n format.json { render :json => @order }\n end\n end", "def index\n @orders = Order.all\n end", "def orders(id, orderid = nil)\n get(\"/accounts/#{id}/orders#{orderid ? \"/#{orderid}\" : ''}\")['orders']\n end", "def show\n \n @order = Order.find(params[:id])\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n \n end \n end", "def index \n @orders = Order.all \n end", "def searchByOrder\n \torderId = params['id']\n\t order = Order.where(id: orderId)\n\t render json: order, status: 200\n\tend", "def index # class method\n @orders = Order.all\n end", "def index # class method\n @orders = Order.all\n end" ]
[ "0.83267236", "0.815406", "0.80041754", "0.7947603", "0.7846487", "0.7823147", "0.78028905", "0.76992875", "0.76980406", "0.7682792", "0.7682792", "0.7682792", "0.7682792", "0.76827645", "0.75880665", "0.7574349", "0.756171", "0.7531827", "0.74860185", "0.7432208", "0.7369724", "0.73326814", "0.73287463", "0.73287463", "0.7286546", "0.72852486", "0.7270603", "0.7269312", "0.7264207", "0.72581947", "0.72464883", "0.72452116", "0.7238898", "0.7233591", "0.7217645", "0.72125256", "0.7156275", "0.7141881", "0.7086836", "0.70671874", "0.70659137", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70640796", "0.70595765", "0.70301193", "0.70267373", "0.70247364", "0.70047027", "0.69968164", "0.6988875", "0.6972612", "0.69694895", "0.6966311", "0.696538", "0.696538", "0.6957052", "0.69567907", "0.6946388", "0.6917995", "0.69107455", "0.69074893", "0.69074893" ]
0.0
-1
GET /orders/1 GET /orders/1.json
def show end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getorders(args={})\n {\n :method=> \"GetOrders\"\n }.to_json\n end", "def index\n @orders = Order.all\n render json: @orders\n end", "def index\n @orders = Order.all\n render json: @orders, status: 200\n end", "def index\n @orders = Order.order(\"id\").all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def index\n @orders = Order.all\n if @orders.count >= 1\n json_response(@orders)\n else\n json_response({ Message: Message.no_data }, :not_found)\n end\n end", "def get_order(order_id)\n\tputs \"Getting order: \" + order_id\n\tresponse = request_get(\"/api/order/\" + order_id)\n\tputs response.body\nend", "def show\n order = Order.find(params[:id])\n render json: order\n end", "def show\n @order = Order.find(params[:id])\n\n render json: @order\n end", "def index\n\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def index\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def index\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def index\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def index\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def index\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def orders\n params = { command: 'account_orders' }\n get('/json.php', params)\n end", "def index\n @orders = Order.all\n respond_to do |format|\n format.html\n format.json { render :json => @orders }\n end\n end", "def show\n @order = Order.find(params[:id])\n\n render json: @order, status: :ok\n\n end", "def show\n @v1_order = V1::Order.find(params[:id])\n\n if @v1_order.nil?\n render json: @v1_order, message: 'Resource not found', status: 404\n else\n render json: @v1_order, message: 'OK', status: 200\n end\n end", "def index\n #@orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def customer_single_orders\n @orders = Order.where(customer_id: current_user.customer_id, category: :single)\n render json: @orders, status: 200\n\n end", "def show\n render json: @order\n end", "def show\n render json: @order\n end", "def get_all_orders() \n\tputs \"Getting all orders\"\n\tresponse = request_get(\"/api/order\")\n\tputs response.body\nend", "def show\n \n @order = Order.find(params[:id])\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n \n end \n end", "def index\n #data = HTTParty.get(\"http://localhost:8081/customers.json\")\n #p data.parsed_response[0]['email']\n if params[:customerId].present?\n @orders = Order.where(\"customerId\": params[:customerId].to_i)\n render json: @orders, status: 200\n elsif params[:id].present?\n @orders = Order.find_by id: params[:id]\n render json: @orders, status:200\n elsif params[:email].present?\n res = HTTParty.get(\"http://localhost:8081/customers/?email=#{params['email']}\")\n p res\n res = res.parsed_response\n @orders = Order.where(\"customerId\": res['id'].to_i)\n render json: @orders, status:200\n else\n @orders = Order.all\n end\n end", "def index\n\t\t@orders = Order.all\n\t\trespond_with @orders\n\tend", "def show\n @order1 = Order1.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order1 }\n end\n end", "def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end", "def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end", "def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end", "def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end", "def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end", "def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end", "def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end", "def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end", "def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end", "def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end", "def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end", "def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end", "def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end", "def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end", "def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end", "def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end", "def index\n @order_line_items = @order.order_line_items\n\n render json: @order_line_items\n end", "def orders\n authenticated_post(\"orders\").body\n end", "def index\n @orders = Order.order(\"id DESC\").page(params[:page])\n\n if params[:user_id]\n @orders = @orders.where(:user_id => params[:user_id])\n end\n\n if Rails.configuration.orders_status.select{|k, v| v[:real]}.keys.include? params[:status]\n @orders = @orders.where(:status => params[:status])\n end\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def show\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end", "def show\n respond_to do |format|\n format.html\n format.json { render :json => @order }\n end\n end", "def show\n respond_to do |format|\n format.html\n format.json { render :json => @order }\n end\n end", "def orders\n authenticated_post(\"auth/r/orders\").body\n end", "def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json=>@order }\n end\n end", "def show\n @order = Order.find(params[:id])\n @title = \"Order #{@order.id}\"\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end", "def recent_orders()\n\t\twith_auth nil do |options|\n\t\t\tself.class.get(\"/api/v1/orders\", options)\n \tend\n\tend", "def show\n #@order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end", "def show\n if @order\n respond_to do |format|\n format.html { @order }\n format.json { render json: @order.to_json(include: [:status, :package, :discount]) }\n end\n else\n redirect_to orders_path, notice: \"Order ID not found for that client.\"\n end\n end", "def index\n @orders = @group.orders\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end", "def index\n @power_orders = PowerOrder.all\n render json: @power_orders\n end", "def index\n @admin_orders = Order.page(params[:page]).per(10)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @admin_orders }\n end\n end", "def order(id, orderid = '')\n get(\"/accounts/#{id}/orders\")['orders']\n end", "def retrieve_order(order_id:)\n new_api_call_builder\n .request(new_request_builder(HttpMethodEnum::GET,\n '/v2/orders/{order_id}',\n 'default')\n .template_param(new_parameter(order_id, key: 'order_id')\n .should_encode(true))\n .header_param(new_parameter('application/json', key: 'accept'))\n .auth(Single.new('global')))\n .response(new_response_handler\n .deserializer(APIHelper.method(:json_deserialize))\n .is_api_response(true)\n .convertor(ApiResponse.method(:create)))\n .execute\n end", "def index # see model > order.rb\n # if current_user\n @orders = current_user.orders\n render \"index.json.jb\"\n # else\n # render json: [], status: :unauthorized\n # end\n end", "def index\n @orders = Order.paginate page: params[:page], order: 'created_at desc', \n per_page: 10\n \n respond_to do |format|\n format.html\n format.json { render json: @orders } \n end\n end", "def index\n @line_items = @order.line_items\n\n render json: @line_items\n end", "def index\n @orders = order.all\n end", "def searchByOrder\n \torderId = params['id']\n\t order = Order.where(id: orderId)\n\t render json: order, status: 200\n\tend", "def get_order(order_id)\n res = request('get', \"https://#{orders_path}/#{order_id}\")\n parse_response res\n end", "def show\n @order = Order.find(request[:order_id])\n @order_item = @order.order_items.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order_item }\n end\n end", "def index\n @orders = Order.all\n render json: { status: 'SUCCESS', message: 'Loaded posts', data: @orders }\n end", "def fetch\n @order = Order.where(owner_id: current_user.branch.nodes.pluck(:id)).order('orders.quantity').last\n\n render json: {id: @order.id, quantity: @order.quantity,\n date: @order.created_at.strftime('%d/%m/%Y'),\n time: @order.created_at.strftime('%p %I:%M'),\n destination: \"#{t('fridge')} - #{@order.owner_id}\"\n }, status: :ok\n end", "def get(order_id)\n get_request(t_url(:order, order_id))\n end", "def get_order\n @order = Order.find(params[:id])\n end", "def get_order\n @order = Order.find(params[:id])\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end", "def index\n @orders = Order.all\n end" ]
[ "0.75533825", "0.74895185", "0.74764353", "0.7378989", "0.73719287", "0.7341498", "0.73393774", "0.73174775", "0.73031616", "0.72925746", "0.72925746", "0.72925746", "0.72925746", "0.72924364", "0.72804433", "0.72674763", "0.7250841", "0.72291356", "0.7214567", "0.7183521", "0.71268445", "0.71268445", "0.70963174", "0.7089442", "0.7045707", "0.7017688", "0.7011928", "0.7006555", "0.7006555", "0.7006555", "0.7006555", "0.7006555", "0.7006555", "0.7006555", "0.7006555", "0.7006555", "0.7006555", "0.7006555", "0.7006555", "0.7006555", "0.7006555", "0.7006555", "0.7006555", "0.6980087", "0.69742304", "0.69627887", "0.6952898", "0.6920965", "0.6920965", "0.69104093", "0.6908525", "0.69049233", "0.68889505", "0.68818635", "0.6871454", "0.68532974", "0.68322176", "0.6830885", "0.6826834", "0.6821004", "0.681981", "0.68094957", "0.6792112", "0.6788042", "0.6784708", "0.6749544", "0.67385375", "0.6726228", "0.67160434", "0.6705562", "0.67025334", "0.67025334", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114", "0.66982114" ]
0.0
-1
POST /orders POST /orders.json
def create @order = Order.new(order_params) respond_to do |format| if @order.save persist_order_address format.html { redirect_to [:admin, @order], notice: 'Order was successfully created.' } format.json { render action: 'show', status: :created, location: @order } else format.html { render action: 'new' } format.json { render json: @order.errors, status: :unprocessable_entity } end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def orders\n authenticated_post(\"orders\").body\n end", "def orders\n authenticated_post(\"auth/r/orders\").body\n end", "def create\n order = Order.create(order_params)\n render json: order\nend", "def submit_order()\n\tputs \"Submitting order\"\n\tdata = create_order()\n\tresponse = request_post(\"/api/order\", data)\n\tputs response.body\nend", "def create\n @order = Order.new(order_params)\n if @order.save\n render json: { status: 'SUCCESS', data: @order }\n else\n render json: { status: 'ERROR', data: @order.errors }\n end\n end", "def create_order(order_params)\n res = request('post', \"https://#{orders_path}\", order_params)\n parse_response res\n end", "def create\n @order = @orders.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: \"Order was successfully created.\" }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(params[:order])\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, :notice=>\"Order was successfully created.\" }\n format.json { render :json=>@order, :status=>:created, :location=>@order }\n else\n format.html { render :action=>\"new\" }\n format.json { render :json=>@order.errors, :status=>:unprocessable_entry }\n end\n end\n end", "def create\n @order = Order.new(params[:order])\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, :notice => 'Order was successfully created.' }\n format.json { render :json => @order, :status => :created, :location => @order }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @order.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create_order(options)\n request :account, :post, 'order', options\n end", "def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: t('app.orders.create.success') }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n # @order = Order.new() \n total = 0\n \n @order = Order.new()\n for product in params[:_json]\n \n if (product[:quantity].nil? || product[:quantity].to_f < 1 || !isint(product[:quantity]))\n # Handle case when order invalid quantity\n render json: \"\", status: :bad_request\n return\n end\n\n @product = Product.find_by_name_and_size_id(product[:product], product[:size]) \n if @product.nil?\n # Handle case when order invalid products\n render json: \"\", status: :not_found\n return\n end \n total = total + @product.price * product[:quantity].to_f \n @order.orders_products << OrdersProduct.new(:product => @product, :hot => product[:hot], :quantity => product[:quantity]) \n end \n\n @order.total = total\n\n if @order.save\n render json: @order, status: :created, location: @order\n else\n render json: @order.errors, status: :unprocessable_entity\n end\n end", "def validate_order() \n\tputs \"Validating order\" \n\tdata = create_order()\n\tresponse = request_post(\"/api/order/validate\", data)\n\tputs response.body\nend", "def create\n @order = Order.new(params[:order])\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(params[:order])\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(params[:order])\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(params[:order])\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(order_params)\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def orders\n params = { command: 'account_orders' }\n get('/json.php', params)\n end", "def cow_order\n @order = Order.new\n @order.lines.build\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @order }\n end\n end", "def create\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render json: { order_id: @order.id }, status: :ok }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(order_params)\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(order_params)\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n order = Order.create(order_params)\n if order.save\n render json: order\n else\n render new\n end\n end", "def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def add_orders params\n @orders_hash = JSON.parse(params)\n\n @orders_hash['orders'].each do |order|\n add_order(order[\"origin\"],order[\"destination\"],order[\"size\"])\n end\n true\n end", "def create\n @power_order = PowerOrder.new(power_order_params)\n @power_order.save\n render json: @power_order\n end", "def create\n @order = current_owner.orders.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to orders_path, notice: 'Order was created successfully' }\n format.json { render :show, status: :created, location: orders_path }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(params[:order])\n @order_sales = @order.sales\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, flash: { sucess: 'Order was successfully created.' } }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def phone_order(params)\n path = @version + '/Phone/Order/'\n method = 'POST'\n return request(path, method, params)\n end", "def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render action: 'show', status: :created, location: @order }\n else\n format.html { render action: 'new' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n\n @v1_order = V1::Order.new(v1_order_params)\n\n if @v1_order.save\n render json: @v1_order, status: :OK, location: @v1_order\n else\n render json: @v1_order.errors, status: :unprocessable_entity\n end\n end", "def create_test_order(options)\n request :account, :post, 'order/test', options\n end", "def create\n @order = Order.new(params[:order])\n @order.user_id = current_user.user_id\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to order_url(@order), notice: I18n.t('orders.successfully_created') }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = current_user.orders.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(params[:order])\n @order.submitted_by_id = current_user.id\n respond_to do |format|\n if @order.save\n format.html { redirect_to orders_url, notice: 'Order was successfully created.' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order, @errors = Order.create_from_cart(@cart, order_params)\n\n if @order\n render json: @order, status: :created, location: @order\n else\n render json: @errors, status: :unprocessable_entity\n end\n end", "def create\n @admin_order = Order.new(params[:admin_order])\n\n respond_to do |format|\n if @admin_order.save\n format.html { redirect_to @admin_order, notice: 'Order was successfully created.' }\n format.json { render json: @admin_order, status: :created, location: @admin_order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @admin_order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_order(order)\n # response = post(\n # 'peatio/market/orders',\n # {\n # market: order.market.downcase,\n # side: order.side.to_s,\n # volume: order.amount,\n # price: order.price\n # }\n # )\n\n return if order.amount < 1e-8\n od = build_order(order)\n return if od.nil?\n\n# Arke::Log.debug \"Skip order creation #{od.to_json}\\n#{order.inspect}\"\n Ordering.new(od).submit\n @open_orders.add_order(order, od.id) if od.id\n Arke::Log.debug \"Order created #{od.to_json}\"\n\n # @open_orders.add_order(order, response.env.body['id']) if response.env.status == 201 && response.env.body['id']\n\n # response\n end", "def create\n @order = Order.new( order_params )\n @order.system_id = Order.set_system_id\n respond_to do |format|\n if @order.save\n set_order_values(@order,params)\n route = 'https://private-3643a-orderlordapi.apiary-mock.com/api/v1/jobs'\n respose = HTTParty.post(route, body: @body, :headers => @headers)\n parsed_response = respose.parsed_response.symbolize_keys!\n if \tparsed_response[:success] == \"true\"\n @order.tracker_hash = parsed_response[:tracker_hash]\n @order.save!\n end\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n respond_to do |format|\n if order.save\n format.html { redirect_to order, notice: 'Order was successfully created.' }\n format.json { render json: order, status: ':created', location: order }\n else\n format.html { render action: 'new' }\n format.json { render json: order.errors, status: ':unprocessable_entity' }\n end\n end\n end", "def create\n # 前端傳回的資料,呼叫 order_params 過濾前端傳回來的資料,用 new 創出一個 Order 的物件 (此時還是 ruby 物件喔)\n @order = Order.new(order_params)\n\n respond_to do |format|\n # save 是指把該筆 物件裡的資料存入資料庫內\n if @order.save\n # 若儲存成功,就導回該筆資料的 show 頁面\n format.html { redirect_to order_path(@order), notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n # 若儲存失敗,就導回新增的頁面重新填寫資料\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n items = params[:items]\n filtered_items = []\n items.each do |item|\n item[:quantity].times do |order|\n filtered_items << {\"apiKey\" => item[:item_api], \"customizationChoices\" => [], \"comments\" => item[:instruction]}\n end\n end\n token = ENV[\"REACT_APP_EAT_STREET_TOKEN\"]\n uri = URI.parse(\"https://api.eatstreet.com/publicapi/v1/send-order\")\n request = Net::HTTP::Post.new(uri)\n request.content_type = \"application/json\"\n request[\"X-Access-Token\"] = token\n request.body = JSON.dump({\n \"restaurantApiKey\" => processing_params[:restaurant_api_key],\n \"items\" => filtered_items,\n \"method\" => \"delivery\",\n \"payment\" => \"cash\",\n \"test\" => false,\n \"comments\" => processing_params[:comments],\n \"card\" => {\n \"apiKey\" => nil\n },\n \"address\" => {\n \"apiKey\" => nil,\n \"streetAddress\" => processing_params[:address],\n \"latitude\" => processing_params[:latitude],\n \"longitude\" => processing_params[:longitude]\n },\n \"recipient\" => {\n \"apiKey\" => nil,\n \"firstName\" => processing_params[:username],\n \"lastName\" => processing_params[:username],\n \"phone\" => processing_params[:phone],\n 'email' => processing_params[:email]\n }\n })\n\n req_options = {\n use_ssl: uri.scheme == \"https\",\n }\n\n response = Net::HTTP.start(uri.hostname, uri.port, req_options) do |http|\n http.request(request)\n end\n\n message = ''\n if response.code == 200\n message = {error: false, details: 'You Order Has Been Processed'}\n items.each do |item|\n order = Order.find(item[:id])\n order.ordered = true\n end\n\n else\n message = response.body\n end\n\n render json: message\n end", "def write_order(order, request_body = nil)\n path = \"/checkout/orders\"\n path += \"/#{order.id}\" if order.id\n\n request_body ||= order.to_json\n response = https_connection.post do |req|\n req.url path\n\n req.headers['Authorization'] = \"Klarna #{sign_payload(request_body)}\"\n req.headers['Accept'] = 'application/vnd.klarna.checkout.aggregated-order-v2+json',\n req.headers['Content-Type'] = 'application/vnd.klarna.checkout.aggregated-order-v2+json'\n req.headers['Accept-Encoding'] = ''\n\n req.body = request_body\n end\n handle_status_code(response.status, response.body)\n response\n end", "def create\n @order = Order.new(order_params)\n respond_to do |format|\n if @order.save\n @order.order_create\n format.html { redirect_to @order, notice: 'Замовлення успішно створено.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to orders_url, notice: 'Dati commessa caricati.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_order(order)\n build_persisted_order(\n post('/market/orders', order.symbolize_keys.merge(side: SIDES_MAP.fetch(order.fetch(:side))))\n )\n end", "def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to home_path, notice: 'Order was successfully created.' }\n format.json { render action: 'show', status: :created, location: @order }\n else\n format.html { render action: 'new' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n res = HTTParty.get(\"http://localhost:8081/customers/?email=#{order_params['email'].to_s}\")\n codeCustomer = res.code\n dataCustomer = res.parsed_response\n p res\n res = HTTParty.get(\"http://localhost:8082/items/#{order_params['itemid'].to_s}.json\")\n #res = HTTParty.get(\"http://localhost:8082/items/#{order_params['itemid'].to_s}.json\")\n codeItem = res.code\n dataItem = res.parsed_response\n p dataItem\n if codeCustomer != 404 && codeItem != 404\n newParams = order_params\n newParams[\"award\"] = dataCustomer[\"award\"] \n newParams[\"price\"] = dataItem[\"price\"]\n newParams[\"total\"] = dataItem[\"price\"] - dataCustomer[\"award\"]\n p newParams\n \n #HTTParty.put(\"http://localhost:8081/customers/order?award=#{newParams['award']}&total=#{newParams[\"total\"]}&customerId=#{newParams['customerid']}\")\n end\n if codeCustomer == 404 || codeItem == 404\n if codeCustomer == 404 and codeItem == 404\n render json: {error: \"Customer and Item do not exist\"}, status: 400\n return\n end\n if codeCustomer == 404 and codeItem != 404\n render json: {error: \"Customer does not exist\"}, status: 400\n return\n end\n if codeCustomer != 404 and codeItem == 404\n render json: {error: \"Item does not exist\"}, status: 400\n return\n end\n else\n @order = Order.new\n @order.customerid = dataCustomer[\"id\"]\n @order.email = dataCustomer[\"email\"]\n @order.itemid = order_params[:itemid]\n @order.description = dataItem[\"description\"]\n @order.award = newParams[\"award\"]\n @order.total = newParams[\"total\"]\n @order.price = newParams[\"price\"]\n \n orderResult = HTTParty.put('http://localhost:8081/customers/order', \n :body => @order.to_json,\n :headers => {'Content-Type' => 'application/json', 'ACCEPT' => 'application/json'}\n )\n\n res = HTTParty.put(\"http://localhost:8082/items/#{order_params[:itemid]}?stockQty=#{dataItem['stockQty']-1}&description=#{dataItem['description']}&price=#{dataItem['price']}&id=#{order_params[:id]}\")\n p res\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n \n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end\n end", "def create\n @order = Order.new(params[:order])\n end", "def create\n @order = Order.new(order_params)\n @order.user_id = current_user.id\n @order.uuid = SecureRandom.hex(8)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render 'show', status: :created, location: @order }\n else\n format.html { render 'new' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n # Create the order \n @order = Order.new\n @email = params[:email]\n \n # Invoke the customer service to retrieve the customer id using the customers\n customerCode, customer = Customer_Service.getCustomerByEmail(@email)\n \n # Check to make sure the customer can be found\n if customerCode != 200\n render json: { error: \"Customer could not be found. \", status: 400 }\n return\n end\n \n # Invoke the item service to retrieve the item information\n orderCode, item = Item_Service.getItemById(params[:itemId])\n # Check to see if the item can be found\n if orderCode != 200\n render json: { error: \"Item could not be found\", status: 400 }\n return\n end\n # Check to see if the item is in stock\n if item[:stockQty] <= 0\n render json: { error: \"Item is out of stock\", status: 400 }\n return\n end\n \n \n # Construct the object\n @order.itemId = params[:itemId]\n @order.description = item[:description]\n @order.customerId = customer[:id]\n @order.price = item[:price]\n @order.award = customer[:award]\n @order.total = @order.price - @order.award\n \n # Check to see if the order can be saved\n if @order.save\n # Save the order to the customer and save it to the item\n tempCode = Customer_Service.postOrder(@order)\n tempCode = Item_Service.postOrder(@order)\n render json: @order, status: 201\n else\n render json: @order.errors, status: 400\n end\n \n end", "def new\n @path = '/orders'\n @method = 'post'\n @order = Order.new\n @bundles = Bundle.all\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @order }\n end\n end", "def create\n @order = Order.new(params[:order])\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: \"Don't think you're going to be there when we deliver? Leave cash in an envelope outside your door. Otherwise, we'll see you in person!\" }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n\t\t@order = Order.new(order_params)\n\t\[email protected]_id = current_user.id\n\t\[email protected]_date = Time.now\n\t\[email protected] = 1\n\n\t\trespond_to do |format|\n\t\t\tif @order.save\n\t\t\t\tformat.html { redirect_to @order, notice: 'Order was successfully created.' }\n\t\t\t\tformat.json { render :show, status: :created, location: @order }\n\t\t\telse\n\t\t\t\tformat.html { render :new }\n\t\t\t\tformat.json { render json: @order.errors, status: :unprocessable_entity }\n\t\t\tend\n\t\tend\n\tend", "def create\n @order = Order.new(params[:order])\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to root_path, notice: 'Order berhasil, tunggu untuk konfirmasi selanjutnya' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n\n @basket = ActiveSupport::JSON.decode(cookies[\"basket\"])\n p @basket\n @basket.each do |order_position|\n @order_detail = OrderDetail.create!(order: @order, product_id: order_position[0], qty: order_position[1])\n end\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n\t\t@admin_order = Admin::Order.new(order_params)\n\n\t\trespond_to do |format|\n\t\t\tif @admin_order.save\n\t\t\t\tif params[:order_update].present?\n\t\t\t\t\tformat.html { redirect_to edit_admin_order_path(@admin_order), notice: 'Pedido criado com sucesso.'}\n\t\t\t\telse\n\t\t\t\t\tformat.html { redirect_to @admin_order, notice: 'Pedido criado com sucesso.' }\n\t\t\t\t\tformat.json { render json: @admin_order, status: :created, location: @admin_order }\n\t\t\t\tend\n\t\t\telse\n\t\t\t\tformat.html { render action: \"new\" }\n\t\t\t\tformat.json { render json: @admin_order.errors, status: :unprocessable_entity }\n\t\t\tend\n\t\tend\n\tend", "def new_order(params)\n camelcase_params!(params)\n call_api('NewOrder', params)\n end", "def create\n @order = Order.new JSON.parse request.body.read\n\n respond_to do |format|\n rescue_connection_failure do\n if @order.save\n format.html { redirect_to @order, notice: 'ORder was successfully created.' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end\n end", "def post(resource, params)\n case resource\n when \"pedidos\", \"place_order\", \"new_order\" then url = \"/pedidos\"\n when \"envios\", \"shipping\" then url = \"/envios\"\n else url = \"/#{resource}\"\n end\n\n post_request(url, params)\n end", "def create\n @order = Order.new :user_id => params[:user_id], :order_list => params[:order_list], :table_number => params[:table_number], :paid => params[:paid], :fb_user => params[:fb_user], :business_name => params[:business_name]\n @order.order_list = []\n @order.paid = false\n\n success_msg = {\n \"messages\": [\n {\"text\": \"Your order was created.\"},\n {\"text\": \"Thank you.\"}\n ]\n }\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :json => success_msg }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity, response: request.body.read }\n end\n end\n end", "def create\n @order = current_user.orders.build(order_params)\n\n respond_to do |format|\n if @order.save_with_items(current_user)\n format.json { render json: @order, status: :created }\n format.html { redirect_to @order }\n else\n format.html do\n set_feed_back\n set_addresses_add_express_fee\n\n set_wx_cards\n\n flash.now[:error] = @order.errors.full_messages.join(', ')\n\n render :confirmation, status: :unprocessable_entity\n end\n\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(order_params)\n @order.status = \"Pending\"\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def index\n @orders = Order.all\n render json: @orders\n end", "def order_post(order, api_key, opts = {})\n data, _status_code, _headers = order_post_with_http_info(order, api_key, opts)\n return data\n end", "def index\n @orders = Order.all\n render json: @orders, status: 200\n end", "def create\n @order = Order.new(params[:order])\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Provider was successfully created.' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n \tredirect_to orders_url\n# @order = Order.new(params[:order])\n\n# respond_to do |format|\n# if @order.save\n# format.html { redirect_to @order, notice: 'Order was successfully created.' }\n# format.json { render json: @order, status: :created, location: @order }\n# else\n# format.html { render action: \"new\" }\n# format.json { render json: @order.errors, status: :unprocessable_entity }\n# end\n# end\n end", "def create\n @title = t 'view.orders.new_title'\n @order = current_customer.orders.build(params[:order])\n session[:documents_to_order].try(:clear)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: t('view.orders.correctly_created') }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: 'new' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(order_params)\n @order.user = current_user\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def getorders(args={})\n {\n :method=> \"GetOrders\"\n }.to_json\n end", "def create\n @order = Order.new(tl_params)\n\n if @order.save\n if params[:product]\n \[email protected]_order(params[:product])\n end\n render json: @order, status: :created\n else\n render json: @order.errors, status: :unprocessable_entity\n end\n end", "def create\n fail = false\n itemId = order_params[:itemId]\n customerId = order_params[:customerId]\n \n #logger.debug \"getting item\"\n self.class.base_uri \"http://localhost:8082\"\n uri = \"/items/%d\" % [itemId]\n response = self.class.get uri\n \n #Check if item of this id was fetched correctly\n if response.code == 200\n item = response.body\n else\n fail = true\n end\n #logger.debug item\n\n #logger.debug \"getting customer\"\n self.class.base_uri \"http://localhost:8081\"\n uri = \"/customers?id=%d\" % [customerId]\n response = self.class.get uri\n \n\n #Check if customer of this id was fetched correctly\n if response.code == 200\n customer = response.body\n else\n fail = true\n end\n #logger.debug customer\n \n #If item and customer are found, create the order, otherwise, give a 404 not found error\n if !fail\n #logger.debug \"success\"\n item = JSON.parse(item)\n description = item[\"description\"]\n \n price = item[\"price\"].to_f #to float, as it's parsed as a string for some reason\n logger.debug price\n customer = JSON.parse(customer)\n \n award = customer[\"award\"]\n total = price - award\n \n @order = Order.new({itemId: itemId, customerId: customerId, description: description, price: price, award: award, total: total})\n if @order.save\n render json: @order, status: 201\n \n # #raise ActiveRecord::Rollback, \"Rolling back changes\"\n else\n render json: @order.errors.messages, status: 400\n end\n else\n head 404\n end\n\n end", "def create\n @order = Order.new(order_params)\n \n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, :print => 'true' }\n format.json { render action: 'show', status: :created, location: @order }\n else\n format.html { render action: 'new' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n\n @orders = Order.all\n @order = Order.create(order_params)\n\n=begin\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n=end\n end", "def create\n @order = Order.new(params[:order])\n\n respond_to do |format|\n if @order.save\n redirect_to :action => :index\n else\n render :action => :new\n end\n end\n end", "def perform\n Magento2::Api.configure('dz4xnhhgfsfuyj00g6bkel0jq6mwdak2', 'hhjnlf59qh2m7an9sdpfcu0o9nox78y6', 'ie5iafduhqs1dydynidsjki582oti17w', 'mva5hldj17elic6muxmf53fq7zmm7xl5', \"https://mall2door.net\")\n orders = Magento2::Api.get(\"/rest/en/V1/orders\", {searchCriteria: 'all' })\n all_orders = orders[:items]\n all_orders.each do |order|\n unless order[:status].present?\n order_id = order[:increment_id]\n id = order[:entity_id]\n status = order[:state]\n params = {\n entity_id: id,\n increment_id: order_id,\n status: status,\n }\n if status\n Magento2::Api.put(\"/rest/en/V1/orders/create\", {entity: params})\n end\n end\n end\n end", "def make_api_call_for_order_creation(url, api_params, access_token)\n RestClient.post(url, api_params, Authorization: \"Bearer #{access_token}\", content_type: :json)\n end", "def create\n @ordered = Ordered.new(ordered_params)\n\n respond_to do |format|\n if @ordered.save\n format.html { redirect_to @ordered, notice: 'Ordered was successfully created.' }\n format.json { render :show, status: :created, location: @ordered }\n else\n format.html { render :new }\n format.json { render json: @ordered.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @order = Order.new(order_params)\n @order.user_id = session[:user_id]\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @sales_order = SalesOrder.new(sales_order_params)\n @sales_order.date = Time.now\n sodetails = @sales_order.sales_order_details\n sodetails.each do |sod|\n sod.pending_rg = sod.quantity\n sod.pending_cg = sod.quantity\n sod.pending_inv = sod.quantity\n end\n \n respond_to do |format|\n if @sales_order.save\n format.html {\n flash[:notice] = 'La Orden de Venta se creó satisfactoriamente.'\n redirect_to sales_orders_path\n }\n format.json { render :show, status: :created, location: @sales_order }\n else\n format.html { \n flash[:error] = @sales_order.errors\n redirect_to new_sales_order_path\n }\n format.json { render json: @sales_order.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n\t @order = Order.new\n\t code, customer = Customer.getCustomer(params[:email])\n\t \n\t if code != 200\n\t\t render json: { error: \"Customer email not found. #{params[:email]}\" }, status: 400\n\t\t return\n\t end\n\t \n\t code, item = Item.getItemById(params[:itemId])\n\t if code != 200\n\t\t render json: { error: \"Item id not found. #{params[:itemId]}\" }, status: 400\n\t\t return\n\t end\n\t \n\t if item[:stockQty] <= 0\n\t\t render json: { error: \"Item is out of stock.\"}, status: 400\n\t\t return\n\t end\n\t \n\t @order.itemId = item[:id]\n\t @order.description = item[:description]\n\t @order.customerId = customer[:id]\n\t @order.price = item[:price]\n\t @order.award = customer[:award]\n\t @order.total = @order.price - @order.award\n\t \n\t if @order.save\n \t\t# put order to customer and item subsystem to do their updates\n\t\t code = Customer.putOrder(@order)\n\t\t code = Item.putOrder(@order)\n\t\t render json: @order, status: 201\n\t else\n \t\trender json: @order.errors, status: 400\n \tend\n end", "def order_params\n params.require(:order).permit(:date, :obs, :client_id)\n end", "def create\n if params[:order][:id]\n build do\n message 'Повторый заказ'\n order Order.create_order_from_order(params[:order][:id])\n view 'consumer/orders/show'\n end\n elsif Order.create_orders_from_cart(params[:cart_id], current_user)\n build do\n message 'Создание заказов'\n view 'consumer/orders/create'\n end\n else\n render json: @order.errors, status: :unprocessable_entity\n end\n end", "def create\n @order = Order.new(params[:order])\n\n respond_to do |format|\n if @order.save\n UserMailer.order_email(@order).deliver\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def index\n @orders = Order.all\n render json: { status: 'SUCCESS', message: 'Loaded posts', data: @orders }\n end", "def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'La commande a été créée avec succès.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end" ]
[ "0.75886714", "0.7490972", "0.7488501", "0.74664384", "0.72739846", "0.7195349", "0.7063682", "0.69745994", "0.6910657", "0.68880194", "0.68747663", "0.685004", "0.6840204", "0.6838178", "0.6838178", "0.6838178", "0.6838178", "0.682244", "0.6790037", "0.67793274", "0.6779247", "0.67782325", "0.6771919", "0.6771919", "0.67691034", "0.6754747", "0.6754747", "0.6754747", "0.6754747", "0.6754747", "0.6754747", "0.6754747", "0.6754747", "0.6754747", "0.6754747", "0.6754747", "0.6750308", "0.6741648", "0.6727289", "0.67202854", "0.67192626", "0.67174095", "0.66730356", "0.66511375", "0.66424936", "0.66256195", "0.66148645", "0.65905017", "0.6581552", "0.6574477", "0.65712935", "0.65698874", "0.6569679", "0.65657544", "0.656543", "0.6562849", "0.6553006", "0.65528905", "0.65511286", "0.65461224", "0.6544374", "0.65240365", "0.65232676", "0.6502522", "0.64868236", "0.6483515", "0.64801395", "0.6467508", "0.6463724", "0.6455904", "0.64464027", "0.6438949", "0.64251065", "0.6424166", "0.6408214", "0.64042836", "0.6403627", "0.63958675", "0.639366", "0.63919747", "0.6391734", "0.6390046", "0.63786453", "0.6368862", "0.63676715", "0.63499165", "0.6326805", "0.6325994", "0.6319851", "0.63150823", "0.6309701", "0.6297522", "0.62932307", "0.6284583", "0.62811166", "0.6266786", "0.6266663", "0.62406427", "0.6232846", "0.6231927" ]
0.6485337
65
PATCH/PUT /orders/1 PATCH/PUT /orders/1.json
def update respond_to do |format| if @order.update(order_params) persist_order_address format.html { redirect_to [:admin, @order], notice: 'Order was successfully updated.' } format.json { head :no_content } else format.html { render action: 'edit' } format.json { render json: @order.errors, status: :unprocessable_entity } end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update\n @order = Order.find(params[:id])\n\n if @order.update(order_params)\n head :no_content\n else\n render json: @order.errors, status: :unprocessable_entity\n end\n end", "def update\n @order = Order.find(params[:id])\n\n if @order.update(order_params)\n head :no_content\n else\n render json: @order.errors, status: :unprocessable_entity\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, :notice=>\"Order was successfully updated.\"}\n format.json { head :ok }\n else\n format.html { render :action=>\"edit\" }\n format.json { render :json=>@order.errors, :status=>\"unprocessable_entry\" }\n end\n end\n end", "def update\n #@order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to orders_path, notice: 'Order was successfully updated.' }\n format.json { render json:@order }\n else\n format.html { render action: 'edit' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to orders_url, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to order_url(@order), notice: I18n.t('orders.successfully_updated') }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, :notice => 'Order was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @order.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @v1_order = V1::Order.find(params[:id])\n\n case @v1_order.state\n when 0\n if @v1_order.update(v1_order_params)\n head :no_content\n else\n render json: @v1_order.errors, status: :unprocessable_entity\n end\n else\n render json: {message: 'Can be edited only when in draft(0) state'}, status: 400\n end\n \n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(params[:order])\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order = Order.find(params[:id])\n\n respond_to do |format|\n if @order.update_attributes(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to orders_path, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @orders_path }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if order.update(order_params)\n format.html { redirect_to order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: order.errors, status: ':unprocessable_entity' }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render 'edit' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @order1 = Order1.find(params[:id])\n\n respond_to do |format|\n if @order1.update_attributes(params[:order1])\n format.html { redirect_to stores_path, notice: 'Order1 was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order1.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n\n @orders = Order.all\n @order = Order.find(params[:id])\n\n @order.update_attributes(order_params)\n\n=begin\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n=end\n end", "def update_order(order_id:,\n body:)\n new_api_call_builder\n .request(new_request_builder(HttpMethodEnum::PUT,\n '/v2/orders/{order_id}',\n 'default')\n .template_param(new_parameter(order_id, key: 'order_id')\n .should_encode(true))\n .header_param(new_parameter('application/json', key: 'Content-Type'))\n .body_param(new_parameter(body))\n .header_param(new_parameter('application/json', key: 'accept'))\n .body_serializer(proc do |param| param.to_json unless param.nil? end)\n .auth(Single.new('global')))\n .response(new_response_handler\n .deserializer(APIHelper.method(:json_deserialize))\n .is_api_response(true)\n .convertor(ApiResponse.method(:create)))\n .execute\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity, response: request.body.read }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: t('app.orders.update.success') }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n\t\trespond_to do |format|\n\t\t\tif @order.update(order_params)\n\t\t\t\tformat.html { redirect_to @order, notice: 'Order was successfully updated.' }\n\t\t\t\tformat.mobile { redirect_to @order, notice: 'Order was successfully updated.' }\n\t\t\t\tformat.json { head :no_content }\n\t\t\telse\n\t\t\t\tformat.html { render action: 'edit' }\n\t\t\t\tformat.mobile { render action: 'edit' }\n\t\t\t\tformat.json { render json: @order.errors, status: :unprocessable_entity }\n\t\t\tend\n\t\tend\n\tend", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, flash: { sucess: 'Order was successfully updated.' } }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: \"Order was successfully updated.\" }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to home_path, notice: 'Order was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @order.update(order_params)\n format.html { redirect_to @order, notice: 'Order was successfully updated.' }\n format.json { render :show, status: :ok, location: @order }\n else\n format.html { render :edit }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end" ]
[ "0.6803119", "0.6803119", "0.67749745", "0.6743427", "0.67353284", "0.6726928", "0.6723453", "0.6719653", "0.67047364", "0.6704673", "0.6704673", "0.6704673", "0.6704673", "0.6704673", "0.6704673", "0.6704673", "0.6704673", "0.6704673", "0.6704673", "0.6704673", "0.6704673", "0.6704673", "0.6704673", "0.6704673", "0.6704673", "0.6704673", "0.6704673", "0.6704673", "0.6704673", "0.6704673", "0.6704673", "0.6704673", "0.6690837", "0.6688913", "0.6688913", "0.66768485", "0.6657085", "0.6655258", "0.6652174", "0.66485804", "0.66415966", "0.66415966", "0.66415966", "0.66415966", "0.66415966", "0.66415966", "0.66415966", "0.66415966", "0.66415966", "0.66415966", "0.66415966", "0.66415966", "0.66415966", "0.66415966", "0.6639128", "0.6625475", "0.662192", "0.6588414", "0.6554208", "0.65375495", "0.6504115", "0.6503397", "0.6503364", "0.6501873", "0.65018094", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753", "0.65012753" ]
0.0
-1
Use callbacks to share common setup or constraints between actions.
def set_order @order = Order.find(params[:id]) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_required_actions\n # TODO: check what fields change to asign required fields\n end", "def action_hook; end", "def run_actions; end", "def define_action_hook; end", "def actions; end", "def define_action_helpers\n if super && action == :save\n @instance_helper_module.class_eval do\n define_method(:valid?) do |*args|\n self.class.state_machines.fire_event_attributes(self, :save, false) { super(*args) }\n end\n end\n end\n end", "def add_actions; end", "def callbacks; end", "def callbacks; end", "def setup *actions, &proc\n (@setup_procs ||= []) << [proc, actions.size > 0 ? actions : [:*]]\n end", "def define_action_helpers; end", "def post_setup\n end", "def action_methods; end", "def action_methods; end", "def action_methods; end", "def before_setup; end", "def action_run\n end", "def execute(setup)\n @action.call(setup)\n end", "def define_action_helpers?; end", "def set_actions\n actions :all\n end", "def action_done(action)\n dispatch = { :migrate => :done_migrating, :map => :done_mapping, :reduce =>\n :done_reducing, :finalize => :done_finalizing } \n self.send dispatch[action[:action]], action\n end", "def dependencies action, &block\n @actions.each do |other|\n if action[:requires].include? other[:provide]\n block.call other\n end\n end\n end", "def setup!\n return unless @setup_procs\n http_actions = actions\n @setup_procs.each do |setup_proc|\n proc, actions = setup_proc\n @setup__actions = actions.map do |action|\n\n action.is_a?(Regexp) ?\n http_actions.select { |a| a.to_s =~ action } :\n action.is_a?(String) && action =~ /\\A\\./ ?\n http_actions.map { |a| a.to_s << action if format?(a).include?(action) }.compact :\n action\n\n end.flatten\n self.class_exec &proc\n @setup__actions = nil\n end\n @setup_procs = nil\n end", "def before_actions(*logic)\n self.before_actions = logic\n end", "def setup_handler\n end", "def set_action(opts)\n opts = check_params(opts,[:actions])\n super(opts)\n end", "def setup(action)\n @targets.clear\n unless action.item.target_filters.empty?\n @targets = SES::TargetManager.make_targets(action)\n else\n item = action.item\n if item.for_opponent?\n @targets = $game_troop.alive_members\n elsif item.for_dead_friend?\n @targets = $game_party.battle_members.select { |actor| actor.dead? }\n else\n $game_party.battle_members.select { |actor| actor.alive? }\n end\n end\n @item_max = @targets.size\n create_contents\n refresh\n show\n activate\n end", "def action; end", "def action; end", "def action; end", "def action; end", "def action; end", "def workflow\n end", "def revisable_shared_setup(args, block)\n class << self\n attr_accessor :revisable_options\n end\n options = args.extract_options!\n self.revisable_options = Options.new(options, &block)\n \n self.send(:include, Common)\n self.send(:extend, Validations) unless self.revisable_options.no_validation_scoping?\n self.send(:include, WithoutScope::QuotedColumnConditions)\n end", "def setup\n @action = SampleActionAndroid.new(os_name: 'android',\n app_name: APP_PATH)\n end", "def before(action)\n invoke_callbacks *self.class.send(action).before\n end", "def process_action(...)\n send_action(...)\n end", "def before_dispatch(env); end", "def after_actions(*logic)\n self.after_actions = logic\n end", "def setup\n # override and do something appropriate\n end", "def setup(client)\n return unless @setup\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n actions.each do |action|\n action.execute(client)\n end\n self\n end", "def setup(_context)\n end", "def setup(resources) ; end", "def validate_actions\n errors.add(:base, :should_give_at_least_one_action) if !manage? && !forecasting? && !read? && !api?\n end", "def setup\n @resource_config = {\n :callbacks => {\n :before_create => nil,\n :after_create => nil,\n :before_update => nil,\n :after_update => nil,\n :before_destroy => nil,\n :after_destroy => nil,\n },\n :child_assoc => nil,\n :model => nil,\n :parent => nil,\n :path => nil,\n :permission => {},\n :properties => {},\n :relation => {\n :create => nil,\n :delete => nil,\n },\n :roles => nil,\n }\n end", "def determine_valid_action\n\n end", "def process_shared\n handle_taxes\n handle_shippings\n create_adjustments_from_params\n handle_status\n handle_inventory_refunds\n handle_payment_transactions\n order.updater.update\n end", "def startcompany(action)\n @done = true\n action.setup\n end", "def init_actions\n am = action_manager()\n am.add_action(Action.new(\"&Disable selection\") { @selection_mode = :none; unbind_key(32); bind_key(32, :scroll_forward); } )\n am.add_action(Action.new(\"&Edit Toggle\") { @edit_toggle = !@edit_toggle; $status_message.value = \"Edit toggle is #{@edit_toggle}\" })\n end", "def event_callbacks(event, metadata={})\n case event\n when :reset, :review\n if confirmed\n update_attributes(confirmed: false)\n end\n when :confirm\n confirm\n # trigger :order for all applicable items\n # NOTE: :order event is common to both physical and digital items\n items.each do |i|\n if i.event_permitted(:order)\n user_id = last_transition.user_id\n i.trigger!(:order, { order_id: id, user_id: user_id })\n end\n end\n when :complete_work\n request = metadata[:request]\n work_complete_notification(request)\n when :close\n close\n end\n if event != :close && !open\n reopen\n end\n end", "def setup_action\n return unless PONY::ERRNO::check_sequence(current_act)\n new_sequence = @action_sequence[@sequence_index+1...@action_sequence.size]\n @sequence_index = 0\n new_sequence = DND::SkillSequence::ACTS[@acts[1]] + new_sequence\n execute_sequence\n end", "def define_tasks\n define_weave_task\n connect_common_tasks\n end", "def setup(&block)\n define_method(:setup, &block)\n end", "def setup\n transition_to(:setup)\n end", "def setup\n transition_to(:setup)\n end", "def action\n end", "def setup( *args )\n\t\t\tself.class.setupBlocks.each {|sblock|\n\t\t\t\tdebugMsg \"Calling setup block method #{sblock}\"\n\t\t\t\tself.send( sblock )\n\t\t\t}\n\t\t\tsuper( *args )\n\t\tend", "def config(action, *args); end", "def setup\n @setup_proc.call(self) if @setup_proc\n end", "def before_action \n end", "def setup_callbacks\n defined_callbacks.each do |meth|\n unless respond_to?(\"call_#{meth}_callbacks\".to_sym)\n self.class.module_eval <<-EOE\n def call_#{meth}_callbacks(*args)\n plugin_store.each {|a| a.call_#{meth}_callbacks(*args) } if respond_to?(:plugin_store) && plugin_store\n self.send :#{meth}, *args if respond_to?(:#{meth})\n end\n EOE\n end\n end\n end", "def action\n end", "def matt_custom_action_begin(label); end", "def setup\n # override this if needed\n end", "def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend", "def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend", "def action(options,&callback)\n new_action = Action===options ? options : Action.new(options,&callback)\n # replace any with (shared name/alias or both default) + same arity\n @actions.delete_if do |existing_action|\n ((existing_action.names & new_action.names).size > 0 ||\n existing_action.default? && new_action.default?) &&\n existing_action.required.size == new_action.required.size &&\n existing_action.optional.size <= new_action.optional.size\n end\n @actions = (@actions + [new_action]).sort\n new_action\n end", "def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action\n end", "def after(action)\n invoke_callbacks *options_for(action).after\n end", "def pre_task\n end", "def setup(server)\n server.on('beforeMethod', method(:before_method), 10)\n end", "def add_actions\n attribute = machine.attribute\n name = self.name\n \n owner_class.class_eval do\n define_method(name) {self.class.state_machines[attribute].events[name].fire(self)}\n define_method(\"#{name}!\") {self.class.state_machines[attribute].events[name].fire!(self)}\n define_method(\"can_#{name}?\") {self.class.state_machines[attribute].events[name].can_fire?(self)}\n end\n end", "def init_actions\n @select_action = SelectAction.new\n @endpoint_mouse_action = EndpointMouseAction.new\n @move_action = MoveAction.new\n end", "def setup_signals; end", "def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend", "def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend", "def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action.respond_to?('weak!') ? action.weak! : action\n end", "def initialize(*args)\n super\n @action = :set\nend", "def after_set_callback; end", "def setup\n #implement in subclass;\n end", "def lookup_action; end", "def setup &block\n if block_given?\n @setup = block\n else\n @setup.call\n end\n end", "def setup_action\n return TSBS.error(@acts[0], 1, @used_sequence) if @acts.size < 2\n actions = TSBS::AnimLoop[@acts[1]]\n if actions.nil?\n show_action_error(@acts[1])\n end\n @sequence_stack.push(@acts[1])\n @used_sequence = @acts[1]\n actions.each do |acts|\n @acts = acts\n execute_sequence\n break if @break_action\n end\n @sequence_stack.pop\n @used_sequence = @sequence_stack[-1]\n end", "def release_actions; end", "def around_hooks; end", "def save_action; end", "def setup(easy)\n super\n easy.customrequest = @verb\n end", "def action_target()\n \n end", "def setup\n callback(:setup) do\n notify(:setup)\n migration_check.last_deployed_commit\n end\n end", "def setup\n return unless @setup\n\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n run_actions_and_retry(actions)\n self\n end", "def before_setup\n # do nothing by default\n end", "def my_actions(options)\n @setup = false\n get_template_part(\"custom_used\",\"action_users\",true)\n end", "def default_action; end", "def setup(&blk)\n @setup_block = blk\n end", "def callback_phase\n super\n end", "def advice\n end", "def _handle_action_missing(*args); end", "def duas1(action)\n action.call\n action.call\nend", "def shared_action(name, &block)\n @controller.shared_actions[name] = block\n end", "def before_action action, &block\n @audience[:before][action] ||= Set.new\n @audience[:before][action] << block\n end", "def setup_initial_state\n\n state_a = State.new(\"a\", 0)\n state_b = State.new(\"b\", 0)\n state_c = State.new(\"c\", 10)\n\n move_to_b = Action.new(\"move_to_b\", 1, state_b)\n\n move_to_c = Action.new(\"move_to_c\", 1, state_c)\n\n state_a.actions = [move_to_b, move_to_c]\n\n return state_a\n \nend" ]
[ "0.6163163", "0.6045976", "0.5946146", "0.591683", "0.5890051", "0.58349305", "0.5776858", "0.5703237", "0.5703237", "0.5652805", "0.5621621", "0.54210985", "0.5411113", "0.5411113", "0.5411113", "0.5391541", "0.53794575", "0.5357573", "0.53402257", "0.53394014", "0.53321576", "0.53124547", "0.529654", "0.5296262", "0.52952296", "0.52600986", "0.52442724", "0.52385926", "0.52385926", "0.52385926", "0.52385926", "0.52385926", "0.5232394", "0.523231", "0.5227454", "0.52226824", "0.52201617", "0.5212327", "0.52079266", "0.52050185", "0.51754695", "0.51726824", "0.51710224", "0.5166172", "0.5159343", "0.51578903", "0.51522785", "0.5152022", "0.51518047", "0.51456624", "0.51398855", "0.5133759", "0.5112076", "0.5111866", "0.5111866", "0.5110294", "0.5106169", "0.509231", "0.50873137", "0.5081088", "0.508059", "0.50677156", "0.50562143", "0.5050554", "0.50474834", "0.50474834", "0.5036181", "0.5026331", "0.5022976", "0.5015441", "0.50121695", "0.5000944", "0.5000019", "0.4996878", "0.4989888", "0.4989888", "0.49864885", "0.49797225", "0.49785787", "0.4976161", "0.49683493", "0.4965126", "0.4958034", "0.49559742", "0.4954353", "0.49535993", "0.4952725", "0.49467874", "0.49423352", "0.49325448", "0.49282882", "0.49269363", "0.49269104", "0.49252945", "0.4923091", "0.49194667", "0.49174926", "0.49173003", "0.49171105", "0.4915879", "0.49155936" ]
0.0
-1
Never trust parameters from the scary internet, only allow the white list through.
def order_params params.require(:order).permit(:special_instructions, :shipping_method_id, line_items_attributes: [:id, :variant_id, :price, :quantity, :_destroy]) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def strong_params\n params.require(:user).permit(param_whitelist)\n end", "def strong_params\n params.require(:listing_member).permit(param_whitelist)\n end", "def allow_params_authentication!; end", "def allowed_params\n ALLOWED_PARAMS\n end", "def default_param_whitelist\n [\"mode\"]\n end", "def param_whitelist\n [:role, :title]\n end", "def expected_permitted_parameter_names; end", "def safe_params\n params.except(:host, :port, :protocol).permit!\n end", "def strong_params\n params.require(:team_member).permit(param_whitelist)\n end", "def permitir_parametros\n \t\tparams.permit!\n \tend", "def strong_params\n params.require(:community).permit(param_whitelist)\n end", "def permitted_strong_parameters\n :all #or an array of parameters, example: [:name, :email]\n end", "def strong_params\n params.require(:education).permit(param_whitelist)\n end", "def restricted_params\n #params.require(self.controller_name.classify.underscore.to_sym).permit([])\n raise(\"No strong params set, override restricted_params method in your controller. E.g. params.require(:model).permit(:attribute1, :attribute2)\")\n end", "def allowed_params\n params.require(:user).permit(:username, :email, :password, :password_confirmation)\n end", "def param_whitelist\n [:rating, :review]\n end", "def param_whitelist\n whitelist = [\n :username, :name,\n :parent_id,\n :headline, :description, :video,\n :policy, :signup_mode, :category,\n :website, :facebook, :twitter, :linkedin,\n :founded_at,\n privacy: [\n :events,\n :resources\n ],\n permission: [\n :profile,\n :members,\n :children,\n :statistics,\n :posts,\n :listings,\n :resources,\n :events\n ],\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n \n if action_name === 'update'\n whitelist.delete(:parent_id)\n unless current_user.role_in(@community) === 'owner'\n whitelist.delete(:privacy)\n whitelist.delete(:permission)\n end\n end\n \n whitelist\n end", "def param_whitelist\n if @user.present? && current_user != @user\n return [:followed]\n end\n \n whitelist = [\n :username, :email, :password,\n :first_name, :last_name,\n :birthday, :gender,\n :headline, :biography, :ask_about, :focus,\n :website, :facebook, :linkedin, :twitter, :github,\n roles: [],\n skills: [],\n interests: [],\n privacy: { contact: [] },\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n \n if action_name === 'update'\n whitelist.delete(:email)\n whitelist.delete(:password)\n end\n \n whitelist\n end", "def user_params \n \tparams.require(:user).permit(:name, :email, :password, :password_confirmation)# preventing CSTR\n end", "def valid_params_request?; end", "def strong_params\n params.require(:experience).permit(param_whitelist)\n end", "def user_params\n params.permit(:name, :phoneNumber, :address, :postalCode, :local, :link, :counter, :latitude, :longitude) \n end", "def trim_whitelisted(params, whitelist)\n # remove any parameters that are not whitelisted\n params.each do |key, value|\n # if white listed\n if whitelist.include? key\n # strip the parameters of any extra spaces, save as string\n params[key] = value.to_s.strip\n else\n # delete any unauthorized parameters\n params.delete key\n end\n end\n params\n end", "def whitelist_url_params\n params.require(:whitelist_url).permit(:domain)\n end", "def allowed_params\n params.require(:allowed).permit(:email)\n end", "def permitted_params\n []\n end", "def trim_whitelisted(params, whitelist)\n # remove any parameters that are not whitelisted\n params.each do |key, value|\n # if white listed\n if whitelist.include? key\n # strip the parameters of any extra spaces, save as string\n params[key] = value.to_s.strip\n else\n # delete any unauthorized parameters\n params.delete key\n end\n end\n params\n end", "def safe_params\n params.permit(:id, :name, :origin, :emails => []); #emails is an array\n end", "def query_param\n\t\tparams.permit(:first_name, :last_name, :phone)\n\tend", "def strong_params\n params.require(:success_metric).permit(param_whitelist)\n end", "def devise_filter\r\n logger.debug(\"In devise_filter =>PARAMS: #{params.inspect}\")\r\n\r\n # White list for sign_up\r\n devise_parameter_sanitizer.for(:sign_up) { |u| u.permit(user_whitelist) }\r\n\r\n # White list for account update\r\n devise_parameter_sanitizer.for(:account_update) { |u| u.permit(user_whitelist, :current_password) }\r\n\r\n # White list for Invitation creation\r\n devise_parameter_sanitizer.for(:invite) { |u| u.permit(:account_type, :email, :invitation_token)}\r\n\r\n # White list for accept invitation\r\n devise_parameter_sanitizer.for(:accept_invitation) { |u| u.permit(user_whitelist, :invitation_token)}\r\n\r\n end", "def whitelisted_user_params\n params.require(:user).\n permit( :first_name, :last_name, :email,:password,:password_confirmation,:birthday,:gender)\n end", "def user_params\n ActionController::Parameters.permit_all_parameters = true\n params.require(:user) #.permit(:name, :surname, :phone, :password, :email, :time_zone)\n end", "def safe_params\n params.require(:user).permit(:name)\n end", "def strong_params\n params.require(:metric_change).permit(param_whitelist)\n end", "def get_params\n\t\treturn ActionController::Parameters.new(self.attributes).permit(\"account_id\", \"title\", \"category\", \"introduction\", \"tags\", \"segment_type\", \"visible\", \"status\", \"main_image\")\n\tend", "def grant_params\n @whitelisted = params.require(:grant).permit(:name, :description, :agency_id, :acronym)\n end", "def check_params; true; end", "def param_whitelist\n whitelist = [\n :description,\n :progress,\n :kpi_id\n ]\n \n unless action_name === 'create'\n whitelist.delete(:kpi_id)\n end\n \n whitelist\n end", "def quote_params\n params.permit!\n end", "def valid_params?; end", "def paramunold_params\n params.require(:paramunold).permit!\n end", "def user_params\n\t\tparams.permit(:nickname, :avatar, :description, :password, :gender, :birthday, :email, :phone, :qq_id, :wechat_id)\n\tend", "def filtered_parameters; end", "def user_params\n params.permit(\n \t:id,\n \t:email, \n \t:first_name, \n \t:last_name, \n \t:password, \n \t:confirm_token, \n \t:phone_number,\n \t:facebook_link,\n \t:car_model,\n \t:license_plate)\n end", "def filtering_params\n params.permit(:email, :name)\n end", "def check_params\n true\n end", "def wx_public_params\n params.require(:wx_public).permit(:nickname, :manager, :alias)\n end", "def allowed_params\n params.require(:user).permit(:email, :password, :role, :first_name, :last_name, :password_confirmation)\n end", "def allowed_params\n params.require(:user).permit(:email, :password, :role, :first_name, :last_name, :password_confirmation)\n end", "def listing_params\n\t\tparams.permit(:address, :transit_info, :rules, :other_info, :lat, :lng)\n\tend", "def social_account_params\n\t\t\tparams.require(:social_account).permit!\n\t\tend", "def safe_params\n resurce_name = self.class.resource_name\n params_method_name = \"#{resurce_name}_params\".to_sym\n if params[resurce_name]\n if respond_to?(params_method_name) || private_methods.include?(params_method_name)\n send(params_method_name)\n else\n raise ActiveModel::ForbiddenAttributesError, \"Please, define the '#{params_method_name}' method in #{self.class.name}\"\n end\n end\n end", "def url_params\n params.require(:url).permit(:short_url, :original_url, :clicks, :ip_addresses)\n end", "def user_params\n params.require(:user).permit(:uri, :username, :password, :realname, :email, :publicvisible)\n end", "def model_params\n\t\tparams.require(:manager).permit(\n\t :user_name,\n :password,\n :email,\n \t\t\t)\n\tend", "def article_params_whitelist\n params.require(:article).permit(:title, :description, category_ids: [])\n end", "def college_whitelist_params\n params.require(:college_whitelist).permit(:status)\n end", "def filtering_params\n params.permit(:email)\n end", "def active_code_params\n params[:active_code].permit\n end", "def valid_params(params)\n params.permit(:user_id, :photo_id, :originX, :originY, :width, :height)\n end", "def ip_address_params\n\t\t\tparams.require(:ip_address).permit!\n end", "def reserved_params\n params.require(:reserved).permit(:name, :email, :pax, :address, :KTP, :title)\n end", "def pull_request_params\n whitelist = [\n :url,\n :id,\n :html_url,\n :diff_url,\n :patch_url,\n :issue_url,\n :number,\n :state,\n :locked,\n :title\n ]\n params.require(:pull_request).permit(whitelist)\n end", "def post_params\n if current_user.admin? \n params.permit(:title, :body, :city, :country, :gps_location, :privacy, :visible, :latitude, :longitude, images: [], files: [])\n else \n params.permit(:title, :body, :city, :country, :gps_location, :privacy,:latitude, :longitude, images: [], files: [])\n end \n end", "def filter_parameters; end", "def filter_parameters; end", "def list_params\n params.permit(:name)\n end", "def vineyard_params\n params.permit(:vineyard_name, :email, :website_url, :phone, :address, :city, :region, :postcode, :country, :specialty, :description, :pet_friendly, :holiday, :tours, :events, :family_friendly, :cover_image, :image_one, :image_two, :image_three, :image_four, :user_id, :base64)\n end", "def available_activity_params\n # params.require(:available_activity).permit(:type,:geometry,:properties)\n whitelisted = ActionController::Parameters.new({\n type: params.require(:available_activity)[:type],\n geometry: params.require(:available_activity)[:geometry].try(:permit!).to_h,\n properties: params.require(:available_activity)[:properties].try(:permit!).to_h\n }).try(:permit!)\n end", "def user_params\n params.permit(:name, :username, :email, :password, :img_url, :bg_url, :coinbank)\n end", "def user_params_pub\n\t \tparams[:user].permit(:hruid)\n\t end", "def user_params\n params.permit(:id, :email, :password, :nickname, :status, :avatar, :flat_picture, :flatsharing_id, :member,\n :user, :color, :solde)\n end", "def validate_search_inputs\n @whitelisted = params.fetch(:user, nil)\n if @whitelisted.blank?\n render_error(400, \"#{I18n.t('general_error.params_missing_key')}\": [I18n.t('general_error.params_missing_value', model: \"review\")])\n return\n else\n @whitelisted = @whitelisted.permit(:name, :uen, :description)\n end\n end", "def param_whitelist\n [\n :title,\n :description,\n :organization,\n :team_id,\n :started_at,\n :finished_at,\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n end", "def url_whitelist; end", "def admin_social_network_params\n params.require(:social_network).permit!\n end", "def filter_params\n params.require(:filters).permit(:letters)\n end", "def valid_params(params)\n params.permit(:login, :first_name, :last_name, \n :password, :password_confirmation)\n end", "def origin_params\n params.permit(:country, :state, :city, :postal_code, :address, :description)\n end", "def sensitive_params=(params)\n @sensitive_params = params\n end", "def permit_request_params\n params.permit(:address)\n end", "def user_params\n # Ensure a user can't give themselves admin priveleges\n params.delete(:admin) if current_user.admin?\n params.require(:user).permit(:name, :email, :admin, :image)\n end", "def secure_params\n params.require(:location).permit(:name)\n end", "def strong_params\n params.require( :setting ).\n permit( :global_scan_limit, :per_user_scan_limit,\n :target_whitelist_patterns, :target_blacklist_patterns )\n end", "def question_params\n params.require(:survey_question).permit(question_whitelist)\n end", "def case_insensitive_params\n params.require(:case_insensitive).permit(:name)\n end", "def maintenance_request_params\n params[:maintenance_request].permit! #allow all parameters for now\n end", "def empire_master_no_match_params\n params.require(:empire_master_no_match).permit(:uid, :last_name, :list, :search_date, :double, :source)\n end", "def unwanted_params\n params.require(:unwanted).permit(:title, :description, :image)\n end", "def url_params\n params[:url].permit(:full)\n end", "def backend_user_params\n params.permit!\n end", "def filter_params\n\t\treturn params[:candidate].permit(:name_for_filter)\n\tend", "def speed_measurement_params\n\n #fuckit, to lazy to deal with permit crap right now\n ActionController::Parameters.permit_all_parameters = true\n\n params[:speed_measurement]\n end", "def user_params\n params.permit(:name, :age, :username, :display_photo, :password)\n end", "def get_params\r\n #params.require(:article).permit(:title, :permalink, :content, :source_site, :introtext, :type_id, :order_by, :searchable, :created_by, :edited_by, :published_by, :published_on, :user_id)\r\n params.require(:article).permit!\r\n\r\n end", "def pub_params\n params.require(:pub).permit(:name, :description, :phone, :email, :hidden, :city_id, :address)\n end", "def droptraining_params\n params.permit(:training_id,:user_id, :utf8, :authenticity_token, :commit)\n end", "def pass_params\n params[:pass].permit(:name, :price, :description, :colour, :events)\n end", "def person_params\n # params whitelist does *not* include admin, sub, remember_token\n # TBD: share this whitelist with the list used by configuration_permitted_parameters\n # TBD: should current_password be on this list? -- for now, leaving off, since it seems to work without\n # NOTE: do not include 'admin' in this list!\n params.require(:person).permit(\n :name, \n :email, \n :description,\n :password, \n :password_confirmation\n )\n end", "def parameter_params\n params.require(:parameter).permit(:name, :description, :param_code, :param_value, :active_from, :active_to)\n end" ]
[ "0.6981606", "0.6784227", "0.6746523", "0.67439264", "0.67361516", "0.6593381", "0.6506166", "0.64994407", "0.6483518", "0.64797056", "0.64578557", "0.6441216", "0.63811713", "0.63773805", "0.6366333", "0.63217646", "0.6301816", "0.63009787", "0.6294436", "0.62940663", "0.6292164", "0.62917984", "0.62836355", "0.6242686", "0.6241917", "0.62210834", "0.6214862", "0.62125784", "0.619428", "0.617912", "0.617705", "0.61735916", "0.6163706", "0.61532795", "0.6152666", "0.6148062", "0.6123372", "0.61180484", "0.61088324", "0.6106139", "0.60925204", "0.608326", "0.60711503", "0.606551", "0.60216546", "0.6018924", "0.6015004", "0.60106766", "0.6008301", "0.6008301", "0.60028726", "0.60020626", "0.5999236", "0.59931505", "0.5993037", "0.59917194", "0.5982164", "0.5968051", "0.5960277", "0.5960268", "0.5960012", "0.59594494", "0.5954652", "0.5954304", "0.59440255", "0.59404963", "0.59404963", "0.59401006", "0.593522", "0.5932182", "0.5925528", "0.5924541", "0.5918796", "0.59123147", "0.5910144", "0.5909186", "0.5907257", "0.5899382", "0.5897783", "0.58972496", "0.58958495", "0.58948576", "0.5892734", "0.5888056", "0.58843875", "0.58818483", "0.5873746", "0.58700997", "0.5870056", "0.5869255", "0.58668107", "0.58662325", "0.5865003", "0.5862908", "0.5862406", "0.58614665", "0.5859661", "0.585562", "0.5855185", "0.58523446", "0.58504915" ]
0.0
-1
def facet_filter override this to provide additional constraints nil end
def es_facet_class if is_distance? GeoDistanceFacet else RangeFacet end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_facet_field(*) super end", "def render_filter_element(facet, values, localized_params)\n # Overrride BL's render_filter_element\n # When creating remove filter links exclude the date range added parameters, if present\n # Otherwise the filter gets removed but the parameters stay in the URL\n if facet == 'sdateRange'\n excluded_params = [:year_from, :year_to]\n new_localized_params = localized_params.clone\n new_localized_params.except!(*excluded_params)\n\n super(facet, values, new_localized_params)\n else\n super(facet, values, localized_params)\n end\n end", "def add_facetting_to_solr(solr_params)\n if %w[collection_context online_contents].include? blacklight_params[:view]\n return solr_params\n end\n\n super(solr_params)\n end", "def facet\n @pagination = get_facet_pagination\n end", "def facet_partial_name(display_facet = nil)\n if advanced_query&.filters&.keys&.include?(display_facet.name)\n 'blacklight_advanced_search/facet_limit'\n else\n super\n end\n end", "def facet(facet_field)\n params['facet_field'] = facet_field\n params['facet_filter'] = true\n self\n end", "def facets_of_filter_type(filter_type)\n return unless filter_class = available_filter_of_type(filter_type)\n proper_field = has_applied_filter_of_type?(filter_type) ? applied_filter_of_type(filter_type).field : filter_class.field\n self.facets.options.delete(:page)\n self.facets.options.delete(:per_page)\n self.facets[proper_field]\n end", "def facets\n self.facet_list.any? ? SearchFacet.where(:identifier.in => self.facet_list) : SearchFacet.visible\n end", "def render_search_to_page_title_filter(facet, values)\n return \"\" unless facet && values\n super(facet, values)\n end", "def include_suppressed_facet(solr_parameters)\n solr_parameters.append_facet_fields(IndexesWorkflow.suppressed_field)\n solr_parameters[:'facet.missing'] = true\n\n # we only want the facet counts not the actual data\n solr_parameters[:rows] = 0\n end", "def facet\n @pagination = get_facet_pagination(params[:id])\n end", "def process_facets\n public_plans_params.fetch(:facet, {})\n end", "def filter\n super\n end", "def facet\n @facet = blacklight_config.facet_fields[params[:id]]\n raise ActionController::RoutingError, 'Not Found' unless @facet\n\n @response = search_service.facet_field_response(@facet.key)\n @display_facet = @response.aggregations[@facet.field]\n\n @presenter = @facet.presenter.new(@facet, @display_facet, view_context)\n @pagination = @presenter.paginator\n respond_to do |format|\n format.html do\n # Draw the partial for the \"more\" facet modal window:\n return render layout: false if request.xhr?\n # Otherwise draw the facet selector for users who have javascript disabled.\n end\n format.json\n end\n end", "def facet_field_in_params? field\n pivot = facet_configuration_for_field(field).pivot\n if pivot\n pivot_facet_field_in_params?(pivot)\n else \n params[:f] and params[:f][field]\n end\n end", "def facet_search_options\n # maxValuesPerFacet: 20,\n personal_search_options.merge(\n facets: '_tags', page: 0, hitsPerPage: 1, tagFilters: ''\n )\n end", "def facet\n # raise\n @facet = blacklight_config.facet_fields[params[:id]]\n return render json: nil, status: :bad_request unless @facet\n\n # Allow callers to pass in extra params, that won't be sanitized-out by\n # the processing that 'params' undergoes\n extra_params = params[:extra_params] || {}\n\n @response = get_facet_field_response(@facet.key, params, extra_params)\n @display_facet = @response.aggregations[@facet.key]\n\n @pagination = facet_paginator(@facet, @display_facet)\n\n # 2/23/2017 - turned off after two weeks of data collection (NEXT-908)\n # # 2/7/2017 - get some info on see-more sizes, hopefully to be\n # # turned off pretty soon. Hardcode test to current limit, 500)\n # limit = (@display_facet.items.size == 501) ? ' - HIT LIMIT' : ''\n # Rails.logger.warn \"FACET-SEE-MORE name: #{@display_facet.name} count: #{@display_facet.items.size}#{limit}\"\n\n respond_to do |format|\n # Draw the facet selector for users who have javascript disabled:\n format.html\n format.json { render json: render_facet_list_as_json }\n\n # Draw the partial for the \"more\" facet modal window:\n format.js { render layout: false }\n end\n end", "def facet_field_in_params?(field)\n pivot = facet_configuration_for_field(field).pivot\n if pivot\n pivot_facet_field_in_params?(pivot)\n else\n params[:f] && params[:f][field]\n end\n end", "def facet\n # raise\n @facet = blacklight_config.facet_fields[params[:id]]\n return render json: nil, status: :bad_request unless @facet\n\n # Allow callers to pass in extra params, that won't be sanitized-out by\n # the processing that 'params' undergoes\n extra_params = params[:extra_params] || {}\n\n @response = get_facet_field_response(@facet.key, params, extra_params)\n @display_facet = @response.aggregations[@facet.key]\n\n @pagination = facet_paginator(@facet, @display_facet)\n\n # 2/23/2017 - turned off after two weeks of data collection (NEXT-908)\n # # 2/7/2017 - get some info on see-more sizes, hopefully to be\n # # turned off pretty soon. Hardcode test to current limit, 500)\n # limit = (@display_facet.items.size == 501) ? ' - HIT LIMIT' : ''\n # Rails.logger.warn \"FACET-SEE-MORE name: #{@display_facet.name} count: #{@display_facet.items.size}#{limit}\"\n\n respond_to do |format|\n # Draw the facet selector for users who have javascript disabled:\n format.html { render layout: false }\n format.json { render json: render_facet_list_as_json }\n\n # Draw the partial for the \"more\" facet modal window:\n format.js { render layout: false }\n end\n end", "def add_facet_params_and_redirect(field, value)\n new_params = super\n\n # Delete :qt, if needed - added to resolve NPE errors\n new_params.delete(:qt)\n\n new_params\n end", "def facet_params\n\t\tparams.require(:facet).permit(:name)\n\tend", "def no_facets_or_highlight(solr_params)\n solr_params['facet'] = false\n solr_params.delete('facet.fields')\n solr_params.delete('facet.query')\n solr_params.delete('facet.pivot')\n solr_params.delete('hl.fl')\n end", "def facets\n @facets ||= raw[:facets]\n end", "def query_has_facetfilters?(localized_params = params)\n (generate_next_url.scan(\"facetfilter[]=\").length > 0) or (generate_next_url.scan(\"limiter[]=\").length > 0)\n end", "def should_render_facet?(display_facet)\n return false if display_facet.name == 'human_readable_type_sim' && display_facet.items.reject { |item| item.value == 'Collection'}.empty?\n super\n end", "def apply_filter\n end", "def facets\n @facets ||= search.facets\n end", "def solr_facet_params(facet_field, extra_controller_params={})\n input = params.deep_merge(extra_controller_params)\n solr_params = solr_search_params(extra_controller_params)\n \n # add additional facet.field parameter to retrieve any a-z facets\n # e.g. solr_params[:\"facet.field\"] = [author_facet, \"author_first_letter\"]\n if Blacklight.config[:facet][:a_to_z].has_key? facet_field\n solr_params[:\"facet.field\"] = [facet_field, Blacklight.config[:facet][:a_to_z][facet_field]] \n else\n solr_params[:\"facet.field\"] = facet_field\n end\n \n solr_params[:\"f.#{facet_field}.facet.limit\"] = \n if solr_params[\"facet.limit\"] \n solr_params[\"facet.limit\"] + 1\n elsif respond_to?(:facet_list_limit)\n facet_list_limit.to_s.to_i + 1\n else\n 20 + 1\n end\n # use facet.prefix to filter out outliers not corresponding a particular alphabet facet\n solr_params['facet.prefix'] = extra_controller_params[:\"catalog_facet.prefix\"].upcase if extra_controller_params.has_key?(\"catalog_facet.prefix\") \n solr_params['facet.offset'] = input[ Blacklight::Solr::FacetPaginator.request_keys[:offset] ].to_i\n solr_params['facet.sort'] = input[ Blacklight::Solr::FacetPaginator.request_keys[:sort] ] \n solr_params[:rows] = 0\n \n return solr_params\n end", "def solr_facet_params(facet_field, extra_controller_params={})\n input = params.deep_merge(extra_controller_params)\n {\n :phrase_filters => input[:f],\n :q => input[:q],\n :facets => {:fields => facet_field},\n 'facet.limit' => 6,\n 'facet.offset' => input[:offset].to_i,\n }\n end", "def render_facet_value(facet_solr_field, item, options ={})\n return if facet_solr_field == 'human_readable_type_sim' && item.value == 'Collection'\n super\n end", "def get_facet_pagination(facet_field, extra_controller_params={})\n solr_params = solr_facet_params(facet_field, extra_controller_params)\n response = Blacklight.solr.find(solr_params)\n\n limit = \n if respond_to?(:facet_list_limit)\n facet_list_limit.to_s.to_i\n elsif solr_params[:\"f.#{facet_field}.facet.limit\"]\n solr_params[:\"f.#{facet_field}.facet.limit\"] - 1\n else\n nil\n end\n \n intended_facets = response.facets.select {|facet| facet.name.include?(facet_field)}.first.items\n intended_paginator = Blacklight::Solr::FacetPaginator.new(intended_facets, \n :prefix => solr_params['facet.prefix'],\n :offset => solr_params['facet.offset'],\n :limit => limit,\n :sort => response[\"responseHeader\"][\"params\"][\"f.#{facet_field}.facet.sort\"] || response[\"responseHeader\"][\"params\"][\"facet.sort\"]\n )\n\n if Blacklight.config[:facet][:a_to_z].has_key? facet_field\n extra_controller_params.delete(Blacklight.config[:facet][:a_to_z][facet_field])\n prefix = extra_controller_params[\"catalog_facet.prefix\"]\n extra_controller_params.delete(\"catalog_facet.prefix\") # in order to retrieve the a-z facet listing\n extra_controller_params[\"catalog_facet.offset\"]=0\n extra_controller_params[\"catalog_facet.sort\"]= \"index\"\n solr_params = solr_facet_params(facet_field, extra_controller_params)\n response = Blacklight.solr.find(solr_params)\n a_to_z_facets = response.facets.select {|facet| facet.name.include?(Blacklight.config[:facet][:a_to_z][facet_field])}.first.items\n a_to_z_paginator = Blacklight::Solr::FacetPaginator.new(a_to_z_facets,\n :prefix => prefix,\n :offset => solr_params['facet.offset'],\n :limit => limit,\n :sort => response[\"responseHeader\"][\"params\"][\"f.#{facet_field}.facet.sort\"] || response[\"responseHeader\"][\"params\"][\"facet.sort\"]\n )\n return { facet_field => intended_paginator, Blacklight.config[:facet][:a_to_z][facet_field] => a_to_z_paginator }\n else\n return intended_paginator\n end\n \n end", "def filter_complex\n @filtergraph ||= FilterGraph\n end", "def non_applied_facets_of_filter_type(filter_type)\n return {} unless has_facets_of_filter_type?(filter_type)\n values = applied_filter_values_of_type(filter_type).map {|v| v.to_s}\n facets_of_filter_type(filter_type).reject {|k,v| values.include?(k.to_s)}\n end", "def true_facet?\n @type != 'pseudo_facet'\n end", "def filters; end", "def filters; end", "def facet_field_names\n Blacklight.config[:facet][:field_names]\n end", "def filter_parameters; end", "def filter_parameters; end", "def filter(options={})\n super\n end", "def update_filter_values!(external_facet = nil)\n external_facet ||= {} # to prevent errors later when checking external_facet attributes\n if is_numeric?\n values = get_unique_filter_values\n # only process external numeric facet if unit is compatible\n if external_facet[:is_numeric] && external_facet[:unit] == unit\n Rails.logger.info \"Merging #{external_facet} into '#{name}' facet filters\"\n # cast values to floats to get around nil comparison issue\n values[:MIN] = external_facet[:min] if values[:MIN].to_f > external_facet[:min].to_f\n values[:MAX] = external_facet[:max] if values[:MAX].to_f < external_facet[:max].to_f\n end\n return false if values.empty? # found no results, meaning an error occurred\n\n update(min: values[:MIN], max: values[:MAX])\n else\n values = get_unique_filter_values(public_only: false)\n merged_values = values.dup\n if external_facet[:filters]\n Rails.logger.info \"Merging #{external_facet[:filters]} into '#{name}' facet filters\"\n external_facet[:filters].each do |filter|\n merged_values << { id: filter, name: filter } unless filters_include?(filter)\n end\n end\n return false if values.empty? # found no results, meaning an error occurred\n\n values.sort_by! { |f| f[:name] }\n merged_values.sort_by! { |f| f[:name] }\n public_values = get_unique_filter_values(public_only: true)\n update(filters: values, public_filters: public_values, filters_with_external: merged_values)\n end\n end", "def remove_advanced_facet_param(field, value, my_params = nil)\n my_params ||= params\n result = Blacklight::SearchStateExt.new(my_params, blacklight_config).to_h\n if result&.fetch(:f_inclusive, nil)&.fetch(field, nil)&.include?(value)\n result[:f_inclusive] = result[:f_inclusive].dup\n result[:f_inclusive][field] = result[:f_inclusive][field].dup\n result[:f_inclusive][field].delete(value)\n result[:f_inclusive].delete(field) if result[:f_inclusive][field].empty?\n result.delete(:f_inclusive) if result[:f_inclusive].empty?\n end\n result.except(:id, :counter, :page, :commit)\n end", "def facet_queries\n @facet_queries ||= facet_counts['facet_queries'] || {}\n end", "def facet_queries\n @facet_queries ||= facet_counts['facet_queries'] || {}\n end", "def facet_list_limit\n (params[:limit]) ? params[:limit] : 20\n end", "def facet_field_names_for_advanced_search\n @facet_field_names_for_advanced_search ||=\n blacklight_config.facet_fields.reject { |_key, field_def|\n field_def.include_in_advanced_search.is_a?(FalseClass)\n }.values.map(&:field)\n end", "def render_constraints_filters(localized_params = params)\n return \"\" unless localized_params[:f]\n content = \"\"\n localized_params[:f].each_pair do |facet,values| \n values.each do |val|\n content << render_constraint_element( facet_field_labels[decode_breadcrumb_key_for_name(facet)],\n val,\n :breadcrumb => catalog_index_path + create_breadcrumb_url(facet, val, localized_params),\n :remove => catalog_index_path + remove_facet_params(facet, val, localized_params),\n :classes => [\"filter\"] \n ) + \"\\n\"\n end\n end \n return content \n end", "def remove_facet_params(field, item, source_params=params)\n if item.respond_to? :field\n field = item.field\n end\n\n value = facet_value_for_facet_item(item)\n\n p = reset_search_params(source_params)\n # need to dup the facet values too,\n # if the values aren't dup'd, then the values\n # from the session will get remove in the show view...\n p[:f] = (p[:f] || {}).dup\n p[:f][field] = (p[:f][field] || []).dup\n p[:f][field] = p[:f][field] - [value]\n p[:f].delete(field) if p[:f][field].size == 0\n p.delete(:f) if p[:f].empty?\n p\n end", "def filters\n end", "def facet_display_order\n priority_facets = (params[:facets]&.keys || []) + (params[:ranges]&.keys || [])\n return priority_facets unless @search_models.include? Item\n\n priority_facets + [Item.solr_exporter_class.solr_name_for(:all_contributors, role: :facet),\n Item.solr_exporter_class.solr_name_for(:all_subjects, role: :facet)]\n end", "def facet_queries\n @facet_queries ||= facets['facet_queries'] || {}\n end", "def filters=(_arg0); end", "def filters=(_arg0); end", "def facet_by_field_name(name)\n @facets_by_field_name ||= {}\n @facets_by_field_name[name] ||= (\n facets.detect{|facet|facet.name.to_s == name.to_s}\n )\n end", "def clear_facets\n add_actions 'ClearFacetFilters()'\n end", "def facet_json\n @facet_json ||= (self['facets'] || {}).select { |k, v| k != 'count' }\n end", "def should_collapse_facet? facet_field\n !facet_field_in_params?(facet_field.field) && facet_field.collapse\n end", "def render_facet_limit(display_facet, options = {})\n return if not should_render_facet?(display_facet)\n options = options.dup\n options[:partial] ||= facet_partial_name(display_facet)\n options[:layout] ||= \"facet_layout\" unless options.has_key?(:layout)\n options[:locals] ||= {}\n options[:locals][:solr_field] ||= display_facet.name \n options[:locals][:facet_field] ||= facet_configuration_for_field(display_facet.name)\n options[:locals][:display_facet] ||= display_facet \n\n render(options)\n end", "def facet_description\n %{ in facet \"#{@field}\"}\n end", "def to_facet\n TermsFacet.new(\n default_params.merge(\n :label => prefix_label('display_result'),\n :size => (@num_result_rows || self.class::DEFAULT_NUM_RESULTS) + checked_rows.length,\n :exclude => exclude,\n :facet_filter => facet_filter\n )\n )\n end", "def facets\n @solr_data[:facets]\n end", "def facet_in_params?(field, value)\n params[:f] and params[:f][field] and params[:f][field].include?(value)\n end", "def facets\n @solr_data[:facets]\n end", "def facet_pivot\n @facet_pivot ||= facet_counts['facet_pivot'] || {}\n end", "def add_facet_params(field, item)\n\t\tfilter(field).add(item).params\n\tend", "def facet\n @browse_config = BlacklightFacetBrowse::ConfigInfo.new(blacklight_config, params[:id])\n\n if ! @browse_config.browse_configured?\n # first do no harm, do nothing if the facet ain't configured\n # for browse. \n super\n else\n # Mostly copied and modified from current BL 4.4, although will work\n # with older BL, in some cases adding features. Use\n # our custom get_browse_facet_pagination\n\n # If no other sort is specified, and we have no prefix query,\n # insist on defaulting to 'index', anything else is confusing. \n if params[\"catalog_facet.sort\"].blank? && params[ @browse_config.query_param_name ].present?\n params[\"catalog_facet.sort\"] = \"index\"\n end\n\n\n @pagination = get_browse_facet_pagination(params[:id], params)\n\n respond_to do |format| \n format.html do \n # we're going to use a custom view, possibly user specified,\n # but the default is \"browsable_facet\"\n render @browse_config.browsable_facet_template\n end\n\n # Draw the partial for the \"more\" facet modal window,\n # without layout. \n format.js { render @browse_config.browsable_facet_template, :layout => false }\n\n # Json format copied from BL 4.4, there was no json response in\n # BL 3.5, we need one, sure let's use that one to try and be compat -- \n # we intentionally don't use a hook method that lets someone redefine\n # this, because if they redefine it our js won't understand it!\n format.json { render json: {response: {facets: @pagination }}}\n end\n end\n end", "def filter\n end", "def facet_field?(name)\n facet_fields.key?(name)\n end", "def search_facet_fields\n self.content_columns.select {|c| [:boolean,:decimal,:float,:integer,:string,:text].include?(c.type) }.map {|c| c.name }\n end", "def should_render_facet?(display_facet)\n # display when show is nil or true\n facet_config = facet_configuration_for_field(display_facet.name)\n display = should_render_field?(facet_config, display_facet)\n display_facet.items.reject! { |item| item.value.blank? }\n display && display_facet.items.present? # && !display_facet.items.empty?\n end", "def ts_apply_filters\n # TODO: Make filters for Thinking Sphinx\n end", "def add_facet_fields_to_solr_request!(*fields)\n if fields.empty?\n self.add_facet_fields_to_solr_request = true\n else\n facet_fields.slice(*fields).each_value { |v| v.include_in_request = true }\n end\n end", "def filtered_parameters; end", "def get_facet_pagination(facet_field, extra_controller_params={})\n Blacklight::Solr::Facets.paginate solr_facet_params(facet_field, extra_controller_params)\n end", "def Filter=(arg0)", "def facet_field_aggregations\n list_as_hash(facet_fields).each_with_object({}) do |(facet_field_name, values), hash|\n items = values.map do |value, hits|\n i = FacetItem.new(value: value, hits: hits)\n\n # solr facet.missing serialization\n if value.nil?\n i.label = I18n.t(:\"blacklight.search.fields.facet.missing.#{facet_field_name}\", default: [:\"blacklight.search.facets.missing\"])\n i.fq = \"-#{facet_field_name}:[* TO *]\"\n end\n\n i\n end\n\n options = facet_field_aggregation_options(facet_field_name)\n hash[facet_field_name] = FacetField.new(facet_field_name,\n items,\n options)\n\n # alias all the possible blacklight config names..\n blacklight_config.facet_fields.select { |k,v| v.field == facet_field_name }.each do |key,_|\n hash[key] = hash[facet_field_name]\n end if blacklight_config and !blacklight_config.facet_fields[facet_field_name]\n end\n end", "def facet_field(field)\n facet_fields[field.to_s] || []\n end", "def add_facetting_to_solr(solr_parameters, user_params)\n # While not used by BL core behavior, legacy behavior seemed to be\n # to accept incoming params as \"facet.field\" or \"facets\", and add them\n # on to any existing facet.field sent to Solr. Legacy behavior seemed\n # to be accepting these incoming params as arrays (in Rails URL with []\n # on end), or single values. At least one of these is used by\n # Stanford for \"faux hieararchial facets\". \n if user_params.has_key?(\"facet.field\") || user_params.has_key?(\"facets\")\n solr_parameters[:\"facet.field\"].concat( [user_params[\"facet.field\"], user_params[\"facets\"]].flatten.compact ).uniq!\n end \n\n blacklight_config.facet_fields.select { |field_name,facet|\n facet.include_in_request || (facet.include_in_request.nil? && blacklight_config.add_facet_fields_to_solr_request)\n }.each do |field_name, facet|\n solr_parameters[:facet] ||= true\n\n case \n when facet.pivot\n solr_parameters.append_facet_pivot with_ex_local_param(facet.ex, facet.pivot.join(\",\"))\n when facet.query\n solr_parameters.append_facet_query facet.query.map { |k, x| with_ex_local_param(facet.ex, x[:fq]) } \n else\n solr_parameters.append_facet_fields with_ex_local_param(facet.ex, facet.field)\n end\n\n if facet.sort\n solr_parameters[:\"f.#{facet.field}.facet.sort\"] = facet.sort\n end\n\n if facet.solr_params\n facet.solr_params.each do |k, v|\n solr_parameters[:\"f.#{facet.field}.#{k}\"] = v\n end\n end\n\n # Support facet paging and 'more'\n # links, by sending a facet.limit one more than what we\n # want to page at, according to configured facet limits.\n solr_parameters[:\"f.#{facet.field}.facet.limit\"] = (facet_limit_for(field_name) + 1) if facet_limit_for(field_name)\n end\n end", "def remove_facet_params(field, item, source_params = params)\n field = item.field if item.respond_to? :field\n\n value = facet_value_for_facet_item(item)\n\n p = source_params.dup\n # need to dup the facet values too,\n # if the values aren't dup'd, then the values\n # from the session will get remove in the show view...\n p[:f] = (p[:f] || {}).dup\n p[:f][field] = (p[:f][field] || []).dup\n p.delete :page\n p.delete :id\n p.delete :counter\n p.delete :commit\n p[:f][field] = p[:f][field] - [value]\n p[:f].delete(field) if p[:f][field].size.zero?\n p\n end", "def filtered_entries; end", "def search_adding_filter(condition,value)\n conditions = params.dup[:conditions] || {}\n\n if PLURAL_FILTERS.include?(condition)\n conditions[condition] ||= []\n conditions[condition] << value\n else\n conditions[condition] = value\n end\n params.except(:quiet, :all, :facet).recursive_merge(:page => nil, :action => :show, :conditions => conditions)\n end", "def named_filter; end", "def filter(sparql)\n raise \"Must be overridden\"\n end", "def filter_by_visibility(solr_parameters)\n # add a new solr facet query ('fq') parameter that limits results to those with a 'public_b' field of 1\n solr_parameters[:fq] ||= []\n fq = viewable_metadata_visibilities.map { |visibility| \"(visibility_ssi:\\\"#{visibility}\\\")\" }.join(\" OR \")\n solr_parameters[:fq] << \"(#{fq})\"\n end", "def range_facet_and_filter_for(field,params={},options={})\n\t\t\tstart = (options[:start] || 20)\t#.to_i\n\t\t\tstop = (options[:stop] || 50)\t#.to_i\n\t\t\tstep = (options[:step] || 10)\t#.to_i\n\t\t\tlog = (options[:log] || false)\t#.to_i\n\t\t\trange_filter_for(field,params)\n#\t\t\tif params[field]\n##\t\"expect\"=>[\"1e-5..1e0\"]\n#\t\t\t\tany_of do\n#\t\t\t\t\tparams[field].each do |pp|\n##\t\t\t\t\t\tif pp =~ /^Under (\\d+)$/\n#\t\t\t\t\t\tif pp =~ /^Under (.+)$/\n#\t\t\t\t\t\t\twith( field.to_sym ).less_than $1 #\tactually less than or equal to\n##\t\t\t\t\t\telsif pp =~ /^Over (\\d+)$/\n#\t\t\t\t\t\telsif pp =~ /^Over (.+)$/\n#\t\t\t\t\t\t\twith( field.to_sym ).greater_than $1 #\tactually greater than or equal to\n##\t\t\t\t\t\telsif pp =~ /^\\d+\\.\\.\\d+$/\n#\t\t\t\t\t\telsif pp =~ /^.+\\.\\..+$/\n#\t\t\t\t\t\t\twith( field.to_sym, eval(pp) )\t#\tNOTE could add parantheses then use Range.new( $1,$2 )???\n#\t\t\t\t\t\telsif pp =~ /^\\d+$/\n#\t\t\t\t\t\t\twith( field.to_sym, pp )\t#\tprimarily for testing? No range, just value\n#\t\t\t\t\t\tend\n#\t\t\t\t\tend\n#\t\t\t\tend\n#\t\t\tend\n\t\t\tfacet field.to_sym do\n\t\t\t\tif log\n\t\t\t\t\trow \"Under 1e#{start}\" do\n\t\t\t\t\t\twith( field.to_sym ).less_than \"1e#{start}\".to_f\n\t\t\t\t\tend\n\t\t\t\t\t(start..(stop-step)).step(step).each do |range|\n\t\t\t\t\t\trow \"1e#{range}..1e#{range+step}\" do\n\t\t\t\t\t\t\twith( field.to_sym, Range.new(\"1e#{range}\".to_f,\"1e#{range+step}\".to_f) )\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\t\trow \"Over 1e#{stop}\" do\n\t\t\t\t\t\twith( field.to_sym ).greater_than \"1e#{stop}\".to_f\n\t\t\t\t\tend\n\t\t\t\telse\n\t\t\t\t\t#\trow \"text label for facet in view\", block for facet.query\n\t\t\t\t\trow \"Under #{start}\" do\n\t\t\t\t\t\t#\tIs less_than just less_than or does it also include equal_to?\n\t\t\t\t\t\t#\tResults appear to include equal_to which makes it actually incorrect and misleading.\n\t\t\t\t\t\twith( field.to_sym ).less_than start\t\t#\tfacet query to pre-show count if selected (NOT A FILTER)\n\t\t\t\t\tend\n\t\t\t\t\t#\tthis works when like 1-100 step 10\n\t\t\t\t\t(start..(stop-step)).step(step).each do |range|\n\t\t\t\t\t\trow \"#{range}..#{range+step}\" do\n\t\t\t\t\t\t\twith( field.to_sym, Range.new(range,range+step) )\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\t\trow \"Over #{stop}\" do\n\t\t\t\t\t\t#\tIs greater_than just greater_than or does it also include equal_to?\n\t\t\t\t\t\t#\tResults appear to include equal_to which makes it actually incorrect and misleading.\n\t\t\t\t\t\twith( field.to_sym ).greater_than stop\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\tend", "def set_filters\n filter_param_keys = [\n 'brand', 'color',\n 'size', 'department', 'keywords'\n ]\n @filters = []\n filter_param_keys.each do |key|\n if !params[key].blank?\n params[key].split(',').each do |val|\n @filters << {:key => key, :val => val}\n end\n end\n end\n \n \n if params[:price]\n params[:price].split(',').each_slice(2).to_a.each do |range|\n @filters << {:key => 'price', :val => range.join(',')}\n end\n end\n\n if @products\n @brands = @products.facet('brand_facet').rows.sort_by{ |brand| brand.value.capitalize}\n @departments = @products.facet('department_facet').rows\n end\n \n @colors = ['green', 'blue', 'purple', 'red', 'pink', 'beige', 'brown', 'yellow', 'orange', 'black', 'white', 'gray', 'teal', 'glowing', 'gold', 'silver']\n \n if [email protected]? && @taxon.has_size?\n sizes = (Spree::Product.sizes.sort_by{|size| size.position}.map(&:presentation) & @products.facet(\"size_facet\").rows.map(&:value))\n end\n end", "def filter; end", "def filter; end", "def filter; end", "def filter_index\n filter\n end", "def remove_facet_params(field, item, source_params = params)\n if item.respond_to? :field\n field = item.field\n end\n\n value = facet_value_for_facet_item(item)\n\n p = source_params.dup\n # need to dup the facet values too,\n # if the values aren't dup'd, then the values\n # from the session will get remove in the show view...\n p[:f] = (p[:f] || {}).dup\n p[:f][field] = (p[:f][field] || []).dup\n p.delete :page\n p.delete :id\n p.delete :counter\n p.delete :commit\n p[:f][field] = p[:f][field] - [value]\n p[:f].delete(field) if p[:f][field].size == 0\n p\n end", "def remove_facet_params(field, item, source_params=params)\n if item.respond_to? :field\n field = item.field\n end\n\n value = facet_value_for_facet_item(item)\n\n p = source_params.dup\n # need to dup the facet values too,\n # if the values aren't dup'd, then the values\n # from the session will get remove in the show view...\n p[:f] = (p[:f] || {}).dup\n p[:f][field] = (p[:f][field] || []).dup\n p.delete :page\n p.delete :id\n p.delete :counter\n p.delete :commit\n p[:f][field] = p[:f][field] - [value]\n p[:f].delete(field) if p[:f][field].size == 0\n p\n end", "def filter_parameters=(_arg0); end", "def filter_parameters=(_arg0); end", "def add_facetting_to_solr(solr_parameters)\n # NOTE: `facet=false` is a Solr concept; although this param is ignored in stock BL as a\n # BL param, it's useful to support this at the BL level; esp. because I think `facet=false`\n # in Solr does not disable \"JSON Facet API\" faceting!\n return if blacklight_params[:facet] == false # default true, so distinguish from falsey `nil`\n facet_fields_to_include_in_request.each do |field_name, facet|\n next if blacklight_params[:action] == 'facet' && blacklight_params[:id] != field_name\n next unless evaluate_if_unless_configuration(facet, blacklight_params)\n solr_parameters[:facet] ||= true\n\n if facet.json_facet\n json_facet = (solr_parameters[:'json.facet'] ||= [])\n json_facet << facet.json_facet.to_json\n next\n end\n\n if facet.pivot\n solr_parameters.append_facet_pivot with_ex_local_param(facet.ex, facet.pivot.join(\",\"))\n elsif facet.query\n solr_parameters.append_facet_query facet.query.map { |k, x| with_ex_local_param(facet.ex, x[:fq]) }\n else\n solr_parameters.append_facet_fields with_ex_local_param(facet.ex, facet.field)\n end\n\n if facet.sort\n solr_parameters[:\"f.#{facet.field}.facet.sort\"] = facet.sort\n end\n\n if facet.solr_params\n facet.solr_params.each do |k, v|\n solr_parameters[:\"f.#{facet.field}.#{k}\"] = v\n end\n end\n\n limit = facet_limit_with_pagination(field_name)\n solr_parameters[:\"f.#{facet.field}.facet.limit\"] = limit if limit\n end\n end", "def filterable?; @filterable; end", "def general_facet_names\n facet_field_names - admin_facet_names\n end", "def applicable_category_filters\n return {} if current_category_level > 3\n facet_name = ProductSearcher::CATEGORY_LEVEL_FACET_MAP[current_category_level + 1]\n facet = self.facets[facet_name.to_sym]\n facet_id_and_count_to_model_and_count(facet, Category).sort{|a,b| b[1] <=> a[1]}[0..19]\n end", "def query_constraints\n # the `+` with @facet_constraint_component is copied from original implementation, and\n # I think is about \"advanced search\" feature?\n\n helpers.render(@query_constraint_component.new(\n search_state: @search_state\n )) + helpers.render(@facet_constraint_component.with_collection(clause_presenters.to_a, **@facet_constraint_component_options))\n end", "def strict_filters=(_arg0); end", "def to_facet_options\n to_search_options.slice(:select, :with, :conditions, :geo)\n end" ]
[ "0.69509584", "0.68063945", "0.66341025", "0.66246533", "0.6587306", "0.6566637", "0.65467894", "0.64402914", "0.64110863", "0.6399205", "0.6307552", "0.6294035", "0.6281053", "0.6212003", "0.62007004", "0.61739266", "0.6139975", "0.6135269", "0.61293995", "0.610518", "0.609782", "0.6084528", "0.608153", "0.6067888", "0.6041093", "0.6040778", "0.60370713", "0.6031653", "0.602978", "0.60257024", "0.60201144", "0.60161537", "0.5999351", "0.5975641", "0.5972989", "0.5972989", "0.5966717", "0.594804", "0.594804", "0.5943919", "0.59044325", "0.589792", "0.58973676", "0.58973676", "0.58862203", "0.58828884", "0.5877475", "0.5863859", "0.58552027", "0.58495814", "0.5841564", "0.58375937", "0.58375937", "0.58247566", "0.58153296", "0.580085", "0.57908374", "0.57807183", "0.5780655", "0.5767371", "0.5757441", "0.5755519", "0.57552034", "0.5745615", "0.5722337", "0.5720878", "0.5708703", "0.570332", "0.57015085", "0.5695338", "0.56917423", "0.56856215", "0.5678011", "0.56728023", "0.5659486", "0.5651937", "0.56446415", "0.5637877", "0.56346214", "0.5631484", "0.56155753", "0.5615343", "0.5612176", "0.5604399", "0.55980325", "0.55925965", "0.5590894", "0.5590894", "0.5590894", "0.5587782", "0.55873686", "0.5579558", "0.5579109", "0.5579109", "0.5574962", "0.55637646", "0.5559597", "0.5559451", "0.55549175", "0.5546759", "0.5546496" ]
0.0
-1
NOTE: we use this pair of methods to transform between es and clientside units
def initialize_rows @transform_lookup = {} if is_time? initialize_time_rows elsif is_distance? initialize_distance_rows else initialize_numeric_rows end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def units\n @units = SQF.units @this\n @units\n end", "def unit_conversion\n if user.metric_system?\n self.distance = Goal.kms_to_miles(distance) if distance.present?\n self.vertical_gain = Goal.meters_to_feet(vertical_gain) if vertical_gain.present?\n end\n end", "def value_from_units value, from_units = nil\n multiplier = if from_units.nil?\n if units.eql?('in')\n 1.0 / 72.0 # PDF units per inch\n else\n 25.4 * 1.0 / 72.0\n end\n elsif self.units.eql?(from_units)\n 1.0\n elsif self.units.eql?('in') && from_units.eql?('mm')\n (1.0 / 25.4)\n else\n 25.4\n end\n value.to_f * multiplier\n end", "def units _args\n \"units _args;\" \n end", "def as unit\n check_unit! unit\n dist = self.dup\n dist.number = convert_to_meters * meters_map[unit]\n dist.unit = unit\n dist \n end", "def convert_to_measured\n converter = 1\n case self.cost_unit\n when \"tsp\"\n converter = 0.16667 # convert to us_fl_oz \n self.cost_unit = \"us_fl_oz\"\n when \"tbsp\"\n converter = 0.5 # convert to us_fl_oz \n self.cost_unit = \"us_fl_oz\"\n when \"cup\"\n converter = 8 # convert to us_fl_oz \n self.cost_unit = \"us_fl_oz\"\n end\n self.cost_size *= converter\n end", "def systemOfUnits \n \"systemOfUnits\" \n end", "def to_units( units, thousands=0 )\n\t\treturn Units[ units ] + to_thousands( thousands )\n\tend", "def units(units=nil)\n cur_page.units(units)\n end", "def units\n return @units\n end", "def unit \n\t\t\tunitq = self.dup\n\t\t\tmagnitude = self.abs\n\t\t\tunitq[0] /= magnitude\n\t\t\tunitq[1] /= magnitude\n\t\t\tunitq[2] /= magnitude\n\t\t\tunitq[3] /= magnitude\n\t\t\treturn unitq\n\t\tend", "def unit_helper(number, from_unit_string, to_unit_string)\n OpenStudio.convert(OpenStudio::Quantity.new(number, OpenStudio.createUnit(from_unit_string).get), OpenStudio.createUnit(to_unit_string).get).get.value\n end", "def unit_mappings\n {\n ApiUnitSystem.US => { :duration => \"milliseconds\", :distance => \"miles\", :elevation => \"feet\", :height => \"inches\", :weight => \"pounds\", :measurements => \"inches\", :liquids => \"fl oz\", :blood_glucose => \"mg/dL\" },\n ApiUnitSystem.UK => { :duration => \"milliseconds\", :distance => \"kilometers\", :elevation => \"meters\", :height => \"centimeters\", :weight => \"stone\", :measurements => \"centimeters\", :liquids => \"mL\", :blood_glucose => \"mmol/l\" },\n ApiUnitSystem.METRIC => { :duration => \"milliseconds\", :distance => \"kilometers\", :elevation => \"meters\", :height => \"centimeters\", :weight => \"kilograms\", :measurements => \"centimeters\", :liquids => \"mL\", :blood_glucose => \"mmol/l\" }\n }\n end", "def convertUnitValue(value,unit)\n if (unit.present?)\n unit.strip!\n else\n return value\n end\n unitRegEx=/^([afpnu\\xC2\\xB5\\316\\274mcdhkMGTPE]?)[ ]*(.+)/\n matchSet=unitRegEx.match(unit)\n if (matchSet.nil?)\n return value\n else\n prefix=matchSet[1]\n baseUnit=matchSet[2]\n case prefix\n when \"a\"\n mult=1.0E-18\n when \"f\"\n mult=1.0E-15\n when \"p\"\n mult=1.0E-12\n when \"n\"\n mult=1.0E-9\n when \"u\",\"\\316\\274\",\"\\xC2\\xB5\"\n mult=1.0E-6\n when \"m\"\n mult=1.0E-3\n when \"c\"\n mult=1.0E-2\n when \"d\"\n mult=1.0E-1\n when \"\"\n mult=1.0\n when \"h\"\n mult=1.0E2\n when \"k\"\n mult=1.0E3\n when \"M\"\n mult=1.0E6\n when \"G\"\n mult=1.0E9\n when \"T\"\n mult=1.0E12\n when \"P\"\n mult=1.E15\n when \"E\"\n mult=1.0E18\n else\n mult=1.0\n end\n return value*mult\n end\n end", "def render_units scale=1.0, transform: true\n if unit_quantity && unit_type\n units = (Unitwise(unit_quantity, unit_type) * (transform ? scale : 1))\n\n # :symbol normalizes to (e.g.) \"10 °C\"\n # UnitWise does not handle #to_s nicely when there is no symbol.\n # TODO Add a custom lookup for tablespoons, teaspoons, pints, quarts, etc.\n strategy = units.unit.to_s(:symbol) == '1' ? :names : :symbol\n units.to_s(strategy)\n end\n end", "def units_hash\n {\n \"_hrs\" => \"hours\",\n \"_hours\"=>\"hours\",\n \"_min\"=>\"minutes\",\n \"_minutes\"=>\"minutes\",\n \"_ppm\"=>\"ppm\",\n \"_ppb\"=>\"ppb\",\n \"_mgm3\"=>\"mg/m3\",\n \"_f\"=>\"&deg;f\",\n \"_c\"=>\"&deg;c\",\n \"_rh\"=>\"% rh\",\n \"_utf\"=>\"\"\n }\n end", "def base_unit\n units[0]\n end", "def unit_helper(number, from_unit_string, to_unit_string)\n converted_number = OpenStudio.convert(OpenStudio::Quantity.new(number, OpenStudio.createUnit(from_unit_string).get), OpenStudio.createUnit(to_unit_string).get).get.value\n end", "def unit_helper(number, from_unit_string, to_unit_string)\n converted_number = OpenStudio.convert(OpenStudio::Quantity.new(number, OpenStudio.createUnit(from_unit_string).get), OpenStudio.createUnit(to_unit_string).get).get.value\n end", "def no_Units\n val = self.no_inch\n val = val.no_foot\n val = val.no_cm\n val = val.no_mm\n val = val.gsub(/[~]/,\"\")\n return val\n end", "def units=(value)\n @units = value\n end", "def eval_unit expr\n return 1.0 if expr.nil? or expr.strip.empty?\n # base SI units\n m = _K = _J = s = 1.0\n # derived units\n _W = _J/s\n # orders\n cm = m / 100\n mm = m / 1000\n # constants\n pi = Math::PI\n return Kernel.eval expr.gsub('%','0.01').gsub(/([A-Z]+)/, \"_\\\\1\").gsub('^','**')\n end", "def units\n self.ListUnits.first.map { |u| map_unit(u) }\n end", "def converted_value(other_unit)\n if other_unit.special?\n other_unit.magnitude scalar\n else\n scalar / other_unit.scalar\n end\n end", "def unit_converter(value, input_unit, output_unit)\n\t\treturn 0 if value == 0\n\t\treturn value if input_unit.downcase == output_unit.downcase\n\n\t\tif input_unit.downcase == 'gb'\n\t\t\tif output_unit.downcase == 'mb'\n\t\t\t\treturn value * 1024\n\t\t\tend\n\t\telse\n\t\t\tif input_unit.downcase == 'mb'\n\t\t\t\tif output_unit.downcase == 'gb'\n\t\t\t\t\treturn value / 1024\n\t\t\t\tend\n\t\t\telse\n\t\t\t\tif input_unit.downcase == 'tb'\n\t\t\t\t\tif output_unit.downcase == 'gb'\n\t\t\t\t\t\treturn value * 1024\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\tend", "def convert_to_emu(dim_str)\n value, unit = dim_str.match(/(^\\.?\\d+\\.?\\d*)(\\w+)/).to_a[1..-1]\n value = value.to_f\n\n if unit == \"cm\"\n value = value * 360000\n elsif unit == \"in\"\n value = value * 914400\n else\n throw ArgumentError, \"Unsupported unit '#{unit}', only 'cm' and 'in' are permitted.\"\n end\n\n value.round()\n end", "def emission_units\n data['emission_units']\n end", "def unit_s\n self.unit.to_s if self.unit\n end", "def unit\n parts[3]\n end", "def units\n attribute('yweather:units')\n end", "def work_in unit, from = \"g\"\n (work.to_s + from).to_unit.convert_to(unit).scalar.to_f\n end", "def convert_temp(value, fromUnits)\t\n\t\tif fromUnits == \"F\"\n\t\t\tconverted_value = (value - 32) / 1.8\n\t\telsif fromUnits == \"C\"\n\t\t\tconverted_value = (1.8 * value) + 32\n\t\tend\t\t\n\t\treturn converted_value.round\n\tend", "def usd(cents); end", "def get_unit(key)\n round_to(super, @nearest)\n end", "def parse_units\n # extract the unit suffix\n if self =~ /(\\d[\\d_]*(?:\\.\\d+)?)\\s*([a-zA-Z]+\\b|%(?= \\s|$))/\n units = $2.downcase\n num = $1 #.to_f\n num = num[\".\"] ? num.to_f : num.to_i\n\n case units\n when \"%\"\n # 0.01\n num / 100.0\n when \"k\"\n # 10**3\n num.thousand\n when \"m\", \"mm\"\n # 10**6\n num.million\n when \"b\", \"bn\"\n # 10**9\n num.billion\n when \"gib\", \"gb\", \"g\"\n num * 2**30\n when \"mib\", \"mb\"\n num * 2**20\n when \"kib\", \"kb\"\n num * 2**10\n when \"t\", \"tb\"\n # 10**12\n num.trillion\n when \"q\"\n # 10**15\n num.quadrillion\n when \"Q\"\n # 10**18\n num.quintillion\n when \"min\"\n # 1.minute\n num.minutes\n when \"hours\", \"h\", \"hr\", \"hrs\"\n # 1.hour\n num.hours\n when \"d\", \"days\", \"dy\"\n num.days\n else\n raise \"Invalid units: #{units.inspect}, in: #{self.inspect}\"\n end\n else\n raise \"Couldn't find any units to parse! (expecting: '<a number><some letters>')\"\n end\n end", "def calculate_price_in_micro_units\n 0\n end", "def unit\n return @unit\n end", "def unit_t\n I18n.t \"#{self.unit_s}\", :count => 1\n end", "def calculate(units)\n @price_per_unit * (units - units.to_i / 2)\n end", "def as_euro; end", "def Unitize(*args)\n Unitize::Measurement.new(*args)\nend", "def all_units\n services.map{|el| el[:units]} \n end", "def set_Units(value)\n set_input(\"Units\", value)\n end", "def set_Units(value)\n set_input(\"Units\", value)\n end", "def units_per_package\n unit_factor = self.service.displayed_pricing_map.unit_factor\n units_per_package = unit_factor || 1\n\n return units_per_package\n end", "def si_unit\n return Unit.steridian if describe == 'solid angle'\n return Unit.radian if describe == 'plane angle'\n\n val = si_base_units\n return nil unless val\n return val[0] if val.length == 1\n val = val.inject(Unit.unity) do |compound,unit|\n compound * unit\n end\n val = val.or_equivalent unless val.acts_as_equivalent_unit\n end", "def switchableUnits \n \"switchableUnits\" \n end", "def unit\n return @units[@index]\n end", "def unit\n return @units[@index]\n end", "def adjust_units stats, unit # :nodoc:\n if stats.first > 0.05 then\n stats << unit\n return stats\n end\n\n unit.replace \"m#{unit}\"\n\n stats = stats.map { |stat| stat * 1000 }\n\n stats << unit\n end", "def value_with_units(value, units, usage_factor)\n raise \"Error: value must convert to an integer.\" unless value.to_i\n raise \"Error: units must be k, m, g\" unless units =~ /[KMG]/i\n factor = usage_factor.to_f\n raise \"Error: usage_factor is 1.0 Value used: #{usage_factor}\" if factor > 1.0 || factor <= 0.0\n (value * factor).to_i.to_s + units\nend", "def transform_units(t)\n @powers[t.from] = t.to if powers.has_key?(t.from)\n end", "def UNIT(*values)\n first, second = values\n Unit.convert_to(first, second)\n end", "def get_distance_unit\n\t\t@kilometers\n\tend", "def convert_inches_to_cm length\n in_to_cm = 2.54\n length * in_to_cm\nend", "def convert_inches_to_cm length\n in_to_cm = 2.54\n length * in_to_cm\nend", "def convert_inches_to_cm length\n in_to_cm = 2.54\n length * in_to_cm\nend", "def price_per_unit\n\t\t\"$ #{(self.sale_amount/self.units_sold).round(2)}\"\n\tend", "def handle_u\n @unit = @tokens[@index].get_tag(Unit).type\n @index += 1\n @precision = :unit\n end", "def calculate(units)\n @price_per_unit * units\n end", "def set_value_total_presentation_unit_type_to_use\n result = product.presentation_unit_type_quantity_to_use(self.quantity,:side_dimension_x => self.side_dimension_x,:side_dimension_y => self.side_dimension_y)\n# quantity_by_presentation_unit_type = product.finished_product.quantity_by_presentation_unit_type(self.side_dimension_x, self.side_dimension_y)\n# div = self.quantity / quantity_by_presentation_unit_type\n# mod = self.quantity % quantity_by_presentation_unit_type\n# result = div + (mod.zero? ? 0 : 1)\n self.total_presentation_unit_type_to_use = result\n end", "def set_value_total_presentation_unit_type_to_use\n result = product.presentation_unit_type_quantity_to_use(self.quantity,:side_dimension_x => self.side_dimension_x,:side_dimension_y => self.side_dimension_y)\n# quantity_by_presentation_unit_type = product.finished_product.quantity_by_presentation_unit_type(self.side_dimension_x, self.side_dimension_y)\n# div = self.quantity / quantity_by_presentation_unit_type\n# mod = self.quantity % quantity_by_presentation_unit_type\n# result = div + (mod.zero? ? 0 : 1)\n self.total_presentation_unit_type_to_use = result\n end", "def administrative_units\n return @administrative_units\n end", "def convert_bytes_to_unit(data:, unit:)\n case unit\n when 'kb' then @usage = data.to_i / 1024\n when 'mb' then @usage = data.to_i / 1024 / 1024\n when 'gb' then @usage = data.to_i / 1024 / 1024 / 1024\n end\n end", "def total_units\n return @total_units\n end", "def get_units(dimension)\n nil\n end", "def units\n\t\tret = []\n\t\[email protected]('sgt-structure:' + @id + ':units').each do |uid|\n\t\t\tret.push(getUnit(@db, uid))\n\t\tend\n\t\tret\n\tend", "def meters\n if self.u == 'meters' # meter in meter\n self.distance\n# 1 mile = 1609.344 meters\n elsif self.u == 'miles' # meter in mile\n self.distance * 1609.34\n# 1 kilometer = 1000 meters \n elsif self.u == 'kilometers' # meter in kilometer\n self.distance * 1000\n# 1 yard = 0.9144 meters\n elsif self.u == 'yards' # meter in yard\n self.distance * 0.9144\n end\n end", "def value\n if eng_unit_type\n \"#{data} #{eng_unit_type.tag}\"\n else\n data.to_s\n end\n end", "def scale_degrees\n \n end", "def user_units\n @user.units\n end", "def in_celsius\n\t\tif(@unit.to_s == \"c\")\n\t\t\t@degree\n\t\telse\n\t\t\t(@degree - 32) * (5.0/9.0)\n\t\tend\n\tend", "def unit=(value)\n @unit = value\n end", "def convert(units)\n\t\tif units < 10\n\t\t\t\"0#{units}\" \n\t\telse\n\t\t\t\"#{units}\"\n\t\tend\n\tend", "def units(metric=nil)\n (metric || (metric.nil? && self.metric?)) ? METRIC_UNITS : IMPERIAL_UNITS\n end", "def size\n ['o', 'k', 'G'].inject(super.to_f) do |s, unit|\n # recusively divide by 1024 until...\n if s.is_a?(Float) && s >= 1024\n s = s / 1024\n # we format it here with the unit\n elsif !s.is_a?(String)\n s = \"#{s.to_s.gsub(/(\\.\\d{3})\\d+$/, \"\\\\1\")} #{unit}\"\n end\n s\n end\n end", "def monetize\n (object.unit_price * 0.01).to_s\n end", "def as_us_dollar; end", "def apply_unit(v, col_scale)\n\t\ta, b = /(.*\\d)\\s*_?\\s*([mnul]{,3})\\s*$/.match(v)[1..2]\n\t\t#raise \"invalid size unit\" unless b\n\t\ts = Dim_Hash[b] || col_scale\n\t\treturn Float(a) * s # may raise exception if a is not a valid float\n\tend", "def available_units\n # After manual set, duplicate map for form select lists\n all_units = ['gram', 'kg', 'lb', 'oz', 'liter', 'gal', 'qt', 'pt', 'us_fl_oz', 'tsp', 'tbsp', 'cup', 'each'].map { |unit| [unit, unit] }.sort\n return all_units\n\tend", "def node_helper_to_units(path: 'status', type:, value: 'used', output_msg:, unit: 'gb', perf_label: 'Usage')\n http_connect(path: \"api2/json/nodes/#{@options[:node]}/#{path}\")\n data = JSON.parse(@response.body)['data'][type][value]\n convert_bytes_to_unit(data: data, unit: unit)\n build_output(msg: \"#{output_msg}: #{@usage}#{unit.upcase}\")\n build_perfdata(perfdata: \"#{perf_label}=#{@usage}#{unit.upcase}\")\n check_thresholds(data: @usage)\n end", "def si_unit\n return self if self.dimensions.is_dimensionless?\n @dimensions.si_unit\n end", "def ref_to_unit(value)\n result = nil\n if self.uom_type == 'big'\n result = value/self.ratio\n elsif self.uom_type == 'small'\n result = value * self.ratio\n elsif self.uom_type == 'ref'\n result = value\n end\n result.to_f\n end", "def test_autonomy\n assert m = Autonomy.load(200000)\n assert_same Unit[:L, :U, :M], m.unit\n assert_in_delta 108, m, 0.1\n m = m.localize(Locale::US)\n assert_same Unit[:L, :U, :M], m.unit\n end", "def convert_to(unit)\n unit = Unit.get(unit) if unit.is_a?(Symbol)\n self.normalize + unit.denormalize\n end", "def units=(args)\n\t@units = (!args or args.is_a?(Units)) ? args : Units.new(args)\n end", "def transform_us_to_mxn(dollars, current_value)\n dollars * current_value\nend", "def available_units\n return 0 if status == NO_SPACE\n\n 999\n end", "def setSystemOfUnits _args\n \"setSystemOfUnits _args;\" \n end", "def unit_weight\n (unit_area * product_thickness * 2.5).round(2)\n end", "def uomd\n @framework_rate = @framework_rates[@service_ref].to_f\n\n # benchmark rate set here\n @benchmark_rate = @benchmark_rates[@service_ref].to_f\n\n @uomd =\n if @supplier_name && @rate_card.data['Discounts'][@supplier_name][@service_ref]\n (1 - @rate_card.data['Discounts'][@supplier_name][@service_ref]['Disc %'].to_f) * @uom_vol * @rate_card.data['Prices'][@supplier_name][@service_ref][@building_data['fm-building-type']].to_f\n else\n @uom_vol * @framework_rate\n end\n rescue StandardError => e\n raise e\n end", "def convert_to_fl_oz(amount, unit)\n\t\tcase unit\n\t\t\twhen 'gallon' then amount * 128\n\t\t\twhen 'quart' then amount * 32\n\t\t\twhen 'pint' then amount * 16\n\t\t\twhen 'cup' then amount * 8\n\t\t\twhen 'fl_oz' then amount\n\t\t\twhen 'Tbsp' then amount * 0.5\n\t\t\twhen 'tsp' then amount * 0.1666666666667\n\t\tend\n\tend", "def currency_unit\n @currency_unit\n end", "def is_units?(); @type == GRT_UNITS; end", "def test_change_system_yd\n range = Class.new(Metric)\n range.dimension = Dimension::L\n m0 = range.new(1, @yard_us)\n assert m1 = m0.change_system(:SI)\n assert_same @meter, m1.unit\n end", "def unit\n self.dup.unit!\n end", "def convert_tocelsius(fahrenheit)\n # return (fahrenheit - 32) * (5/9)\n return fahrenheit*5/9 - 32*5/9 \nend", "def set_millimeters\n\t @units = 'millimeters'\n\t current_layer.set_millimeters\n\tend", "def unit_formats(type)\n #find statements\n #if sales[0]< 1000 and tot assets[0] < 1000\n #$xxx,xxx.x\n #else\n #$xxx,xxx\n #end\n end", "def euro(cents); end", "def convert_in_to_cm(measurement_in, in_to_cm_conversion)\n measurement_in * in_to_cm_conversion\nend" ]
[ "0.6690909", "0.6566071", "0.6484851", "0.6444477", "0.64376646", "0.6410337", "0.63341236", "0.62523085", "0.6204305", "0.6203699", "0.6202544", "0.6128126", "0.60987455", "0.6066264", "0.60201", "0.59954417", "0.5967133", "0.5935795", "0.5935795", "0.58879924", "0.5880713", "0.5863023", "0.5854941", "0.58483124", "0.5829534", "0.5828406", "0.5799958", "0.5765336", "0.57362807", "0.5724053", "0.5714895", "0.570848", "0.5695155", "0.5692987", "0.56681097", "0.56617016", "0.5658024", "0.5647021", "0.56423986", "0.5622631", "0.5622548", "0.5612701", "0.5598321", "0.5598321", "0.5591566", "0.5591408", "0.55909204", "0.55482066", "0.55482066", "0.55374175", "0.5534761", "0.5530268", "0.5514712", "0.54958034", "0.5491071", "0.5491071", "0.5491071", "0.54902023", "0.54857445", "0.54796547", "0.5477324", "0.5477324", "0.5462926", "0.5462138", "0.54594994", "0.54580253", "0.54399276", "0.5437535", "0.54286623", "0.54275334", "0.54196435", "0.54060155", "0.5405307", "0.5384529", "0.5381229", "0.53746885", "0.5374524", "0.53742296", "0.537332", "0.5363437", "0.5358579", "0.5355561", "0.5351431", "0.533757", "0.53278047", "0.53133166", "0.5310703", "0.5310595", "0.530487", "0.52990294", "0.5293766", "0.5284964", "0.52845865", "0.5279204", "0.5272612", "0.5270174", "0.52686673", "0.5258043", "0.52570146", "0.52501625", "0.524791" ]
0.0
-1
need to move to application controlller and except the auth.json
def grats quants = [ 46043, 17140, 11105, 11956, 20928, ] # [ 80431, 31000, 18133, 20000, 28778 ] symbols = ['csco','crm','msft','intc','amat'] costs = [ 1076070.00, 1108836.00, 406866.00, 283692.00, 233090.00 ] # [ 2104879.27, 2168140.00, 796038.70, 554400.00, 458721.32] @cash = 235782 # 155000 @latest = [ ] prices = Options.stock_price(symbols) @total = 0 prices.each do |s| symbol = s['Symbol'] index = symbols.index(symbol) value = quants[index] * s['LastTrade'].to_d profit = (quants[index] * s['LastTrade'].to_d) - costs[index] change = (quants[index] * s['Change'].to_d) @total += profit @latest.push [ symbol.upcase, quants[index], change, profit, s['LastTrade'] ] end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def auth_info\n render :action => 'auth_info.json', :layout => false\n end", "def auth_store; end", "def auth\n end", "def auth\n end", "def auth\n {}\n end", "def authorize_application\n render json: { ok: false, message: 'unauthorized' }, status: 401 unless request.headers['App-Key'] == 'R141p7THbd5vRdH20xb5SaOsN6bJ5J5S'\n end", "def auth_process\n\t\tif @auth_file.authorization.nil?\n \t\t\tmake_auth\n\t\telse\n\t\t\[email protected] = @auth_file.authorization\n\t\tend\n\tend", "def use_oauth\n\t\t\t\n\t\tend", "def auth\n cfg_get(:auth)\n end", "def http_auth_hash; end", "def credentials; end", "def credentials; end", "def credentials; end", "def credentials; end", "def credentials; end", "def auth_param; end", "def auth\n cfg_get(:auth)\n end", "def http_auth?; end", "def authenticate_json_request\n #return true unless Rails.env.production?\n\n # TODO Turn this back after making it correctly check for API requests\n if false && APIKeysActive == true && Rails.env.production?\n # Is it safe to suppose that ALL JSON requests will be API requests?? -SR\n #we'll check the mime types once 1.0 is deprecated, and 2.0 servers both html and json - RJ\n\n #case request.format\n #when Mime::JSON\n #/^Token token=\"(.+?)\"$/ - This should be integrated in the near future because we want\n # to check for the Token token portion of the header value.\n regex = /^.*\\\"([\\w]+)\\\"$/.match(request.authorization)\n regex ||= Array.new #guarantees the array accessor works on the next line\n key = regex[1]\n render :json => OldApi.error(403, \"Invalid Api Key\"), :status => 403 and return unless ApiKey.exists?(key: key)\n #end\n end\n end", "def auth_settings\n {\n }\n end", "def auth_settings\n {\n }\n end", "def auth_settings\n {\n }\n end", "def capable_login_auth?; end", "def params_auth_hash; end", "def oauth_authentication; end", "def web\n _auth(false)\n end", "def auth_methods; end", "def require_no_authentication\n require_no_authentication_or_app_jwt\n end", "def authorization; end", "def auth_scheme; end", "def auth\n\n @user = current_user\n render json: @user\n \n end", "def update_application_controller\n inject_into_file 'app/controllers/application_controller.rb', after: \"protect_from_forgery with: :exception\\n\" do <<-'RUBY'\n before_action :authenticate_or_token\n\n protected\n def configure_permitted_parameters\n devise_parameter_sanitizer.for(:sign_up) { |u| u.permit(:locale, :name, :username, :email, :password, :password_confirmation, :role, :remember_me) }\n devise_parameter_sanitizer.for(:sign_in) { |u| u.permit(:login, :username, :email, :password, :remember_me) }\n devise_parameter_sanitizer.for(:account_update) { |u| u.permit(:username, :email, :password, :password_confirmation, :current_password, :role) }\n I18n.locale = @current_user.locale || I18n.default_locale unless @current_user.nil?\n end\n\n # Validate user session if is not API call\n def authenticate_or_token\n authenticate_user! if params[:controller].index('api').nil? && request.fullpath != root_path\n @current_user = current_user if @current_user.nil?\n end\n\n RUBY\n end\n end", "def require_login\n render json: {message: 'Pleae login'}, status: :unauthorized unless logged_in?\n end", "def initialize(app_id, app_secret, api_key, email, password)\n\n merge!(\n {\n applicationCredentials: {\n applicationId: app_id,\n applicationSecret: app_secret\n },\n userCredentials: {\n apiKey: api_key,\n email: email,\n password: password \n }\n }\n )\n end", "def skip_authorization; end", "def api_auth\n # output the APIkey from the header\n # puts request.headers[\"X-APIkey\"];\n creator = Creator.find_by(creator: request.headers[:creator])\n if creator && creator.authenticate(request.headers[:password])\n render json: { auth_token: encodeJWT(creator), id: creator.id}\n else\n render json: { error: 'Invalid username or password' }, status: :unauthorized\n end\n end", "def client_secret; end", "def basic_auth_definitions\n {}\n end", "def authenticate\n render json: { error: 'Access Denied' }, status: 401 unless current_user\n end", "def authenticate\n# byebug\n return true if public_action?\n if request.format.json?\n authenticate_token || render_json_unauthorized\n else\n authenticate_user!\n end\n end", "def http_auth_login\n # FIXME: Implement\n end", "def authenticate_app\n payload = {\n # The time that this JWT was issued, _i.e._ now.\n iat: Time.now.to_i,\n\n # JWT expiration time (10 minute maximum)\n exp: Time.now.to_i + (10 * 60),\n\n # Your GitHub App's identifier number\n iss: APP_IDENTIFIER\n }\n logger.debug \"JWT payload: #{payload}\"\n\n # Cryptographically sign the JWT.\n jwt = JWT.encode(payload, PRIVATE_KEY, 'RS256')\n\n # Create the Octokit client, using the JWT as the auth token.\n @app_client ||= Octokit::Client.new(bearer_token: jwt)\n end", "def auth(value); end", "def mock_defective_auth_hash\n nil\n end", "def save\r\n SystemConfig.set :auth, to_h, true\r\n end", "def app_credentials\n {user: @config['mysql_app_user'], pass: @config['mysql_app_password']}\n end", "def authenticate_current_user\n render json: {}, status: :unauthorized if get_current_user.nil?\n end", "def client_secrets\n Google::APIClient::ClientSecrets.load(\n 'client_secrets.json').to_authorization\nend", "def user_authentication\n end", "def authenticate_shim!\n if ENV['RACK_ENV'] == 'development'\n true\n else\n authenticate!\n end\n end", "def active_for_authentication?; end", "def active_for_authentication?; end", "def authenticate_user!\n render :json => { error: \"please log in\" }, :status => 401 unless current_user.id\n end", "def api_auth\n creator = Creator.find_by(username: request.headers[:username])\n if creator && creator.authenticate(request.headers[:password])\n render json: { auth_token: encodeJWT(creator) }\n else\n render json: { error: 'Invalid username or password' }, status: :unauthorized\n end\n end", "def third_party_apps\n # Displays the user's 3rd party applications profile page\n authorize ::User\n\n @identifier_schemes = IdentifierScheme.for_users.order(:name)\n @tokens = current_user.access_tokens.select { |token| token.revoked_at.nil? }\n end", "def api_auth\n api_response(403, \"Invalid Authorization header\") unless api_user\n end", "def authenticate_client\r\n client_id = request.headers['client-id']\r\n client_secret = request.headers['client-secret']\r\n\r\n unless client_id == Rails.application.secrets.api_key && client_secret == Rails.application.secrets.api_secret\r\n render json: ErrorResponse.new(\r\n code: 401, message: 'Unauthorized Request'\r\n ), adapter: :json, status: :unauthorized\r\n nil\r\n end\r\n\r\n end", "def authenticate_manual \n api_key = request.headers['X-Api-Key']\n @app = App.where(api_key: api_key).first if api_key\n\n unless @app\n head status: :unauthorized\n return false\n end\n end", "def initialize\n super\n @section = \"auth\"\n end", "def require_login\n end", "def api_auth\n\t\tauth ::UApi\n\tend", "def capable_plain_auth?; end", "def auth_controller?\n false\n end", "def auth\n if configuration.api_key\n { :key => configuration.api_key, :sign => 'true' }\n elsif configuration.access_token\n { :access_token => configuration.access_token }\n end\n end", "def allowed_auth_methods; end", "def authorization_mode; end", "def auth_required\n unless Facts.config.user\n Facts.ui.puts \"Authorization required for this task, use `facts config`\"\n exit(0)\n end\n end", "def valid_for_http_auth?; end", "def auth!\n # first call b2_authorize_account to get an account_auth_token\n # this has to stick around because it has various important data\n b2_authorize_account\n\n unless body_wrap.allowed.capabilities.include? 'writeFiles'\n raise \"app_key #{app_key} does not have write access to account #{account_id}\"\n end\n end", "def capable_auth_types; end", "def after_custom_authentication; end", "def require_auth\n head :unauthorized unless current_user\n end", "def authenticate_api!\n find_case\n return true if @case&.public? || current_user\n\n render json: { reason: 'Unauthorized!' },\n status: :unauthorized\n end", "def app\n render json: {app_load: true}\n #@location_path = \"/#{params[:path]}\"\n end", "def middlewares; end", "def require_api_token\n end", "def api_authentication_required\n unauthorized unless current_user?\n end", "def include_session_auth_concern\n log :include_session_auth_concern, \"\"\n content = <<-EOF\n protect_from_forgery with: :exception, unless: -> { request.format.json? }\n include SessionAuthentication\n EOF\n\n replace_in_file 'app/controllers/application_controller.rb',\n 'protect_from_forgery with: :exception', content\n\n # insert_into_file(\"app/controllers/application_controller.rb\",\n # content,\n # after: \"protect_from_forgery with: :exception\\n\\n\")\n end", "def require_master_key; end", "def require_master_key; end", "def attr_user_app\n set_keys(USER_APP_KEY,USER_MASTER_KEY)\n end", "def host_authorization; end", "def host_authorization; end", "def authenticate_request\n render :json => { :error => :unauthorized }, :status => :unauthorized unless current_user\n end", "def copy_auth_files\n log :copy_auth_files, \"\"\n\n copy_file \"controllers/api_authentication.rb\",\n 'app/controllers/concerns/api_authentication.rb'\n\n copy_file \"controllers/api_versioning.rb\",\n 'app/controllers/concerns/api_versioning.rb'\n\n copy_file \"test/api_helper.rb\",\n 'test/support/api_helper.rb'\n\n copy_file \"test/authorization_helper.rb\",\n 'test/support/authorization_helper.rb'\n\n\n inject_into_file 'test/test_helper.rb',\n \"\\nclass ActionController::TestCase\\n include ::ApiHelper\\nend\",\n after: \"end\\n\"\n\n\n # copy_file \"config/api.yml\", \"config/api.yml\"\n end", "def login\n begin\n # - Creating a JSON file with the proper fields and setting the environment variable ONEVIEW_AUTH_FILE to its path\n credentials = if ENV['ONEVIEW_AUTH_FILE']\n JSON.parse(File.read(File.absolute_path(ENV['ONEVIEW_AUTH_FILE'])), symbolize_names: true)\n # - Declaring each field as an environment variable\n elsif ENV['ONEVIEW_URL']\n environment_credentials\n # - Placing a JSON file in the directory you are running the manifests from\n else\n JSON.parse(File.read(File.expand_path(Dir.pwd + '/login.json', __FILE__)), symbolize_names: true)\n end\n rescue\n raise('The Oneview credentials could not be set. Please check the documentation for more information.')\n end\n credentials_parse(credentials)\nend", "def authentication_profile\n super\n end", "def allow_params_authentication!; end", "def access_control\n \n end", "def token_secret; config[:token_secret]; end", "def auth\n \trequest.env['omniauth.auth']\n \tend", "def authenticate_user!\n return if current_user\n render json: json_message(errors: 'Acceso denegado. Por favor ingresa.'), status: 401\n end", "def require_logged_in\n (render json: [\"You need to be logged in for this.\"], status: 401) unless logged_in?\n end", "def authenticate!\n if current_identity\n true\n else\n respond_to do |format|\n format.html do\n store_location\n redirect_to main_app.new_session_path\n end\n format.json do\n render status: 403, nothing: true\n end\n end\n end\n end", "def authenticate_request!\n payload, header = JsonWebToken.verify(http_token)\n header if false # Commeent this line\n @requested_user = {\n email: payload['https://sassbox.com/email'],\n first_name: payload['https://sassbox.com/first_name'],\n last_name: payload['https://sassbox.com/last_name']\n }\n rescue JWT::VerificationError, JWT::DecodeError\n render json: { errors: ['Not Authenticated'] }, status: :unauthorized\n end", "def auth?\n true\n end", "def auth_user_file\n \t\treturn @htaccess_hash['AuthUserFile']\n \tend", "def valid_for_authentication?; end", "def valid_for_authentication?; end", "def client_secret=(client_secret); end", "def client_secret=(client_secret); end" ]
[ "0.6422438", "0.6273565", "0.62371933", "0.62371933", "0.61892486", "0.6171844", "0.5922347", "0.5914494", "0.5897142", "0.58174866", "0.5811394", "0.5811394", "0.5811394", "0.5811394", "0.5811394", "0.57409936", "0.5740043", "0.5712492", "0.5687165", "0.56609106", "0.56609106", "0.56609106", "0.56578946", "0.56321955", "0.56157506", "0.56121117", "0.55835634", "0.5563902", "0.5533067", "0.55225235", "0.55208653", "0.5508466", "0.55038553", "0.5497577", "0.5494988", "0.5491562", "0.54882026", "0.5484789", "0.54809827", "0.547828", "0.5439757", "0.54379445", "0.54359704", "0.5428794", "0.5425334", "0.54203594", "0.5420228", "0.5410491", "0.5404239", "0.54017", "0.5399276", "0.5399276", "0.53992456", "0.5397156", "0.5392973", "0.53861743", "0.5384365", "0.5381892", "0.5377009", "0.53758633", "0.53702825", "0.53569794", "0.53519297", "0.5347096", "0.5341685", "0.5340319", "0.53402966", "0.5340038", "0.53260475", "0.532398", "0.53223395", "0.5316828", "0.531401", "0.53137773", "0.5312486", "0.53107774", "0.531071", "0.53082687", "0.5305615", "0.5305615", "0.530524", "0.5304006", "0.5304006", "0.5302453", "0.5279491", "0.52790046", "0.52786803", "0.5274892", "0.5274452", "0.5272337", "0.5270961", "0.52689964", "0.5261142", "0.5260527", "0.5258711", "0.52511394", "0.52459496", "0.524291", "0.524291", "0.5241834", "0.5241834" ]
0.0
-1
Helper for windows clang compiling
def setCXXEnv # Could store and restore these... @Clang.setupEnv end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_for_clang()\n catch_error(\"You passed the --clang option and clang is not in your path. \\nPlease try again or do not use --clang.\") do\n exec_cmd(%[command -v clang])\n end\n return true\n end", "def compile(compilable)\r\n compiler = File.expand_path @compiler_exe, @compiler_path\r\n result = []\r\n \r\n compilable.compiler_target_files.each do |target_file|\r\n # Construct paths.\r\n include_paths = \"-I#{compilable.compiler_include_paths.join ';'}\" unless\r\n compilable.compiler_include_paths.empty?\r\n \r\n module_paths = \"-M#{compilable.compiler_module_paths.join ';'}\" unless\r\n compilable.compiler_module_paths.empty?\r\n \r\n library_paths = \"-L#{compilable.compiler_library_paths.join ';'}\" unless\r\n compilable.compiler_library_paths.empty?\r\n \r\n # Run the NetLinx compiler.\r\n # Note: NLRC.exe v2.1 freaks out if empty arguments (\"\") are in the command.\r\n cmd = ''\r\n cmd += 'wine ' if @use_wine or compiler.include? '/.wine/'\r\n cmd += \"\\\"#{compiler}\\\" \\\"#{target_file}\\\"\"\r\n cmd += \" \\\"#{include_paths}\\\"\" if include_paths\r\n cmd += \" \\\"#{module_paths}\\\"\" if module_paths\r\n cmd += \" \\\"#{library_paths}\\\"\" if library_paths\r\n \r\n io = IO.popen cmd\r\n stream = io.read\r\n io.close\r\n \r\n # Build the result.\r\n result << NetLinx::CompilerResult.new(\r\n compiler_target_files: [target_file],\r\n compiler_include_paths: compilable.compiler_include_paths,\r\n compiler_module_paths: compilable.compiler_module_paths,\r\n compiler_library_paths: compilable.compiler_library_paths,\r\n stream: stream\r\n )\r\n end\r\n \r\n result\r\n end", "def checkTools()\n\n\tif (PATH_CLANG_FORMAT.empty?)\n\t\tabort(\"Unable to find clang-format!\");\n\tend\n\n\tif (PATH_UNCRUSTIFY.empty?)\n\t\tabort(\"Unable to find uncrustify!\");\n\tend\n\nend", "def bash_on_windows?; end", "def using_gcc?\n # Match gcc, /usr/local/bin/gcc-4.2, etc. (Clang is \"xcrun cc\")\n File.basename(RbConfig::MAKEFILE_CONFIG[\"CC\"]).match(/\\Agcc\\b/)\nend", "def target_win32?\n return true if ENV['OS'] == 'Windows_NT'\n build.is_a?(MRuby::CrossBuild) && build.host_target.to_s =~ /mingw/\nend", "def dlltool(dllname, deffile, libfile)\n # define if we are using GCC or not\n if Rake::ExtensionCompiler.mingw_gcc_executable then\n dir = File.dirname(Rake::ExtensionCompiler.mingw_gcc_executable)\n tool = case RUBY_PLATFORM\n when /mingw/\n File.join(dir, 'dlltool.exe')\n when /linux|darwin/\n File.join(dir, \"#{Rake::ExtensionCompiler.mingw_host}-dlltool\")\n end\n return \"#{tool} --dllname #{dllname} --def #{deffile} --output-lib #{libfile}\"\n else\n if RUBY_PLATFORM =~ /mswin/ then\n tool = 'lib.exe'\n else\n fail \"Unsupported platform for cross-compilation (please, contribute some patches).\"\n end\n return \"#{tool} /DEF:#{deffile} /OUT:#{libfile}\"\n end\nend", "def compileCmd(srcfile)\n\t\treturn case BuildEnv::entityTypeSafe(srcfile)[0]\n\t\t\twhen :c then \"#{@CC} #{@CFLAGS.join(' ')}\"\n\t\t\twhen :cxx then \"#{@CXX} #{@CXXFLAGS.join(' ')}\"\n\t\t\twhen :f then \"#{@FCC} #{@FFLAGS.join(' ')}\" \n\t\t\telse raise \"shouldn't happen\"\n\t\tend\n\tend", "def compiler cmdfile,config\n cc=config['compiler']\n raise GaudiConfigurationError,\"Missing 'compiler' setting\" unless cc\n return command_line(cc,cmdfile,config.fetch('compiler_commandfile_prefix',\"\"))\n end", "def install\n clang_version = '10.0.0'\n\n include.install Dir['include/c++'] unless File.directory? \"#{include}/c++\"\n \"#{include}/c++\".install Dir['include/c++/v1'] unless File.directory? \"#{include}/c++/v1\"\n lib.install Dir['lib/clang'] unless File.directory? \"#{lib}/clang\"\n \"#{lib}/clang\".install Dir['lib/clang/#{clang_version}'] unless File.directory? \"#{lib}/clang/#{clang_version}\"\n lib.install Dir['lib/oclint']\n bin.install Dir['bin/*']\n\n end", "def compile\n read_yml if File.exists?(@project)\n\n default_options = {}\n other_options = {}\n\n @compile_options.each do |k,v| \n if /default/.match(k)\n default_options[k] = v\n else\n other_options[k] = v\n end\n end\n\n command = \"#{@flex_sdk_bin}#{@compiler}\"\n other_options.each { |k,v| command += \" -#{k}=\\\"#{[v].flatten.join ','}\\\"\" }\n default_options.each { |k,v| command += \" -#{k}=\\\"#{[v].flatten.join ','}\\\"\" }\n command += \" #{@document_class}.as\"\n\n# puts command\n# TextMate.exit_show_html\n\n @command = command\n#TextMate.exit_show_html\n if process_output command\n# load_debugger\n display_web_player #unless ARGV[0] = \"--display\"\n# display_player\n end\nend", "def compile filetask,system_config,platform\n cmd_file=command_file(filetask.name,system_config,platform)\n if File.exists?(cmd_file)\n mkdir_p(File.dirname(filetask.name),:verbose=>false)\n config=system_config.platform_config(platform)\n if is_assembly?(filetask.prerequisites.first)\n cmdline = assembler(cmd_file,config)\n else\n cmdline = compiler(cmd_file,config)\n end\n sh(cmdline.join(' '))\n else\n raise GaudiError, \"Missing command file for #{filetask.name}\"\n end\n end", "def compile obj, src\n sh \"gcc #{$C_FLAGS.join ' '} -c #{src} -o #{obj}\"\nend", "def test_should_compile_file_with_linker_error\n \n dir = \"./test_case/linker_error.rb\"\n filename = dir\n \n result = check_and_compile_file( dir , filename )\n \n assert_equal(true, result)\n \n end", "def compile_c\n puts \"Compilando archivos fuentes:\"\n objs = @objs + [@main]\n objs.each do |obj|\n command = \"#{@cc} #{@debug} -c -o #{obj} #{obj[0..-2] + \"c\"} #{@cflags}\"\n puts \"\\t\"+ command\n exit (0) if not((system(command)))\n end\n command = \"#{@cc} #{@debug} -o #{@main[0..-3]} #{objs.join(\" \")}\" +\n \" #{@cflags}\"\n puts \"\\t\"+ command\n puts \"No compilo de forma correcta\" if not(system(command))\nend", "def compile\n cmd = self.command\n#puts ' + ' + cmd\n log.debug \"Invoking the compiler\"\n rc = Platform.execute cmd\n log.debug \"Compilation complete; rc=#{rc.to_s}\"\n rc\n end", "def lessc(file_info)\n compile_file(file_info[1], \"app\")\nend", "def compile_to_c\n \"\"\n end", "def compileCXX(params)\n\t\tsrc = requireParam(params, :src)\n\t\ttarget = requireParam(params, :target)\n\t\totherOptions = params[:opts] || []\n\t\t\n\t\tprintAndCall(\"#{compileCmd(src)} #{otherOptions.join(' ')} #{@INCDIRS.map {|dirpath| \"-I#{dirpath}\"}.join(' ')} -o #{target} -c #{src}\")\n\tend", "def compile\n exe_file = File.join(@temp_dir, 'exe')\n result = TTY::Command.new(printer: :quiet).run!('gcc', '-o', exe_file, @src_file)\n [exe_file, result]\n end", "def with_codethink_compiler_flags(platform, env = {}, opts = {})\n env = with_standard_compiler_flags(env = env, opts = opts)\n\n compiler_flags =\n {\n \"LDFLAGS\" => \"-Wl,-rpath,#{install_dir}/lib -L#{install_dir}/lib\",\n \"CFLAGS\" => \"-I#{install_dir}/include -O2\",\n }\n\n return env.merge(compiler_flags).\n merge(\"CXXFLAGS\" => compiler_flags[\"CFLAGS\"]).\n merge(\"CPPFLAGS\" => compiler_flags[\"CFLAGS\"])\nend", "def include_win?\n RUBY_PLATFORM.include?('mswin') or RUBY_PLATFORM.include?('mingw')\nend", "def build_source(source, target, echo, custom)\n verbose(VERBOSE)\n flags = \"\"\n cc = \"\"\n own_include = \"\"\n\n if LANGUAGE == \"c\" then\n flags = CFLAGS\n cc = CC\n else\n flags = CXXFLAGS\n cc = CXX\n end\n\n path = File.dirname(target)\n if not File.exists? path then\n cmd \"mkdir -p #{path}\"\n end\n\n if echo\n if ENV['silent'] != \"true\" and DRYRUN != true then\n msg \"src #{C_NORMAL}#{source}\"\n end\n end\n\n if defined? ADD_OWN_INCLUDE and ADD_OWN_INCLUDE == true then\n own_include = \"-Iinclude\"\n end\n\n cc_command = \"#{COMPILER} -c #{flags.join(\" \")} #{custom} #{DEFINE.map {|d| \"-D\" + d}.join(\" \")} #{own_include} #{INCLUDE.map {|i| \"-I\" + corto_replace(i)}.join(\" \")} #{source} -o #{target}\"\n begin\n cmd cc_command\n rescue\n STDERR.puts \"\\033[1;31mcorto:\\033[0;49mcorto: command failed: #{cc_command}\"\n abort\n end\nend", "def windows_init\n # FIXME: hardcoded to VS10 on C: drive, should pull the information from vcvars.bat\n ENV['PATH'] = 'C:\\Program Files\\Microsoft Visual Studio 10.0\\Common7\\IDE\\;C:\\Program Files\\Microsoft Visual Studio 10.0\\VC\\BIN;C:\\Program Files\\Microsoft Visual Studio 10.0\\Common7\\Tools' + ENV['PATH']\n ENV['INCLUDE'] = 'INCLUDE=C:\\Program Files\\Microsoft Visual Studio 10.0\\VC\\INCLUDE;C:\\Program Files\\Microsoft SDKs\\Windows\\v7.0A\\include;'\n ENV['LIB'] = 'C:\\Program Files\\Microsoft Visual Studio 10.0\\VC\\LIB;C:\\Program Files\\Microsoft SDKs\\Windows\\v7.0A\\lib;'\n ENV['LIBPATH'] = 'C:\\WINDOWS\\Microsoft.NET\\Framework\\v4.0.30319;C:\\WINDOWS\\Microsoft.NET\\Framework\\v3.5;C:\\Program Files\\Microsoft Visual Studio 10.0\\VC\\LIB;'\n ENV['VCINSTALLDIR'] = \"C:\\Program Files\\\\Microsoft Visual Studio 10.0\\\\VC\\\\\"\n ENV['VS100COMNTOOLS'] = \"C:\\\\Program Files\\\\Microsoft Visual Studio 10.0\\\\Common7\\\\Tools\\\\\"\n ENV['VSINSTALLDIR'] = \"C:\\\\Program Files\\\\Microsoft Visual Studio 10.0\\\\\"\n ENV['WindowsSdkDir'] = \"C:\\\\Program Files\\\\Microsoft SDKs\\\\Windows\\\\v7.0A\\\\\"\n end", "def make_command_line(ary)\n case RUBY_PLATFORM\n when /mswin32|bccwin32/\n make_command_line_windows(ary)\n else\n make_command_line_unix(ary)\n end\n end", "def compile_project\n debug_msg \" compiling #{@project_name}\"\n system(HHC_PATH, @project_name)\n end", "def mkcompiler source=''\n VishCompiler.new source\nend", "def compile_bin(cpu = host_cpu)\n platform = target_platform(cpu)\n native_dir = File.expand_path('../aws-crt-ffi', File.dirname(__FILE__))\n tmp_dir = File.expand_path(\"../tmp/#{platform.cpu}\", File.dirname(__FILE__))\n tmp_build_dir = File.expand_path('build', tmp_dir)\n\n # We need cmake to \"install\" aws-crt-ffi so that the binaries end up in a\n # predictable location. But cmake still adds subdirectories we don't want,\n # so we'll \"install\" under tmp, and manually copy to bin/ after that.\n tmp_install_dir = File.expand_path('install', tmp_dir)\n\n build_type = 'RelWithDebInfo'\n\n config_cmd = [\n CMAKE,\n \"-H#{native_dir}\",\n \"-B#{tmp_build_dir}\",\n \"-DCMAKE_INSTALL_PREFIX=#{tmp_install_dir}\",\n \"-DCMAKE_BUILD_TYPE=#{build_type}\",\n '-DBUILD_TESTING=OFF',\n ]\n\n # macOS can cross-compile for arm64 or x86_64.\n # This lets us prepare both types of gems from either type of machine.\n if platform.os == 'darwin'\n config_cmd.append(\"-DCMAKE_OSX_ARCHITECTURES=#{platform.cpu}\")\n end\n\n build_cmd = [\n CMAKE,\n '--build', tmp_build_dir,\n '--target', 'install',\n '--config', build_type,\n ]\n\n # Build using all processors\n if cmake_has_parallel_flag?\n build_cmd.append('--parallel')\n build_cmd.append(Etc.nprocessors.to_s)\n end\n\n run_cmd(config_cmd)\n run_cmd(build_cmd)\n\n # Move file to bin/, instead of where cmake installed it under tmp/\n bin_dir = crt_bin_dir(platform)\n FileUtils.mkdir_p(bin_dir)\n bin_name = crt_bin_name(platform)\n search_dirs = [\n 'bin', # windows\n 'lib64', # some 64bit unix variants\n 'lib', # some unix variants\n ]\n tmp_path = find_file(bin_name, search_dirs, tmp_install_dir)\n FileUtils.cp(tmp_path, bin_dir, verbose: true)\nend", "def isWindows()\r\n # See: http://stackoverflow.com/questions/4871309\r\n require 'rbconfig'\r\n return (RbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/)\r\nend", "def initialize(name)\n init(name)\n yield self if block_given?\n define unless name.nil?\n puts \"Windows users require sed and perl for rake:compile - please install onto PATH\" if /mswin|mingw/ =~ RUBY_PLATFORM\n end", "def windows_nix?\n (/cygwin|mingw|bccwin/ =~ ruby_platform) != nil\n end", "def basic_compiler\n @basic_compiler ||= indirection.terminus(config['basic_compiler'])\n end", "def compiler?(verify = false, writeable_directory = false)\n if (session.methods.include? :cache) && session.cache.exists?(\"compiler?\")\n return session.cache.read(\"compiler?\")\n end\n writeable_directory = '/var/tmp/' unless writeable_directory == true\n compilers = ['gcc', 'cc', 'tcc', 'pcc']\n compilers.each do |tool|\n next unless installed?(tool)\n if verify == true\n\n tmp_file = writeable_directory.to_s + ::Rex::Text.rand_text_alpha(12)\n tmp_file_c = tmp_file + '.c'\n match_string = ::Rex::Text.rand_text_alpha(32)\n test_c = \"#include <stdio.h>\\nint main(void){printf(\\\"#{match_string} %c\\\",0x0a);return 0;}\"\n test_c.each_line do |line|\n cmd_exec(\"echo '#{line}'>>#{tmp_file_c}\")\n end\n compile = \"#{tool} #{tmp_file_c} -o #{tmp_file}\"\n cmd_exec(compile)\n out = cmd_exec(tmp_file)\n rm tmp_file\n rm tmp_file_c\n if out.match(match_string)\n session.cache.add(\"compiler?\", tool) if session.methods.include? :cache\n return tool\n end\n else\n session.cache.add(\"compiler?\", tool) if session.methods.include? :cache\n return tool\n end\n end\n session.cache.add(\"compiler?\", false) if session.methods.include? :cache\n false\n end", "def target\n @target ||= \"i386-mingw32\"\n end", "def windows?\n ::RUBY_PLATFORM =~ /mingw|mswin/\n end", "def compile\n return if changed_ruby_files.empty?\n\n errors = changed_ruby_files.each_with_object([]) do |file, es|\n output = `ruby -cw \"#{file}\" 2>&1`\n next if output == \"Syntax OK\\n\"\n\n es << output\n end\n heading('Ruby Warnings', errors.join) unless errors.empty?\n end", "def test_should_be_syntax_error\n \n dir = \"./test_case/syntax_error.rb\"\n filename = dir\n \n result = check_and_compile_file( dir , filename )\n \n assert_equal(false, result)\n \n end", "def runCompiler(_override_type = nil)\n runOpen3StuckPrevention('make', '-j', $compileThreads.to_s) == 0\n end", "def javac(*args)\n Buildr.application.deprecated 'Use Java::Commands.javac instead.'\n Commands.javac(*args)\n end", "def compile(path)\n\tif isExecutable(path) and not $link\n\t\t$keep = true\n\t\treturn path\n\tend\n\t\n\tbinaryPath = File.join($testDir, File.basename($link ? $source[0] : path, File.extname(path)))\n\n\tcompilerOutput = Tempfile.new(\"compiler\")\n\n# FIXME: If $link is true, it will only check the extension of the first argument.\n\tif File.extname($link ? $source[0] : path) == \".c\"\n\t\tsystem \"gcc -O2 -o \\\"#{binaryPath}\\\" #{path} &> #{compilerOutput.path}\"\n\telsif File.extname($link ? $source[0] : path) == \".cpp\"\n\t\tsystem \"g++ -O2 -o \\\"#{binaryPath}\\\" #{path} &> #{compilerOutput.path}\"\n\telse\n\t\t$stderr.puts \"This program only works with C or C++ source code.\"\n\t\texit 1\n\tend\n\n\tcompilerMessages = compilerOutput.read\n\n\tunless compilerMessages.empty?\n\t\tunless File.exists?(binaryPath)\n\t\t\t$stderr.puts red(\"Couldn't compile #{path}.\")\n\t\tend\n\t\t$stderr.puts yellow(\"Compiler output for #{path}:\")\n\t\t$stderr.puts compilerMessages\n\tend\n\n\tcompilerOutput.close\n\tcompilerOutput.unlink\n\n\tunless File.exists?(binaryPath)\n\t\tcleanup\n\t\texit 1\n\tend\n\n\treturn binaryPath\nend", "def windows?\n\tRbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/\nend", "def command\n log.debug self.pretty_inspect\n\n throw 'Invalid linker' unless @ld.is_a?(Linker)\n throw 'One or more source files are required' unless @sources.length > 0\n# cflags = default_flags\n# cflags.concat @flags\n# end\n# throw cflags\n\n# topdir = h[:topdir] || ''\n# ld = @ld\n# ldadd = h[:ldadd]\n# ld.flags = h[:ldflags]\n# ld.output = Platform.pathspec(h[:output])\n# ld.rpath = h[:rpath] if h[:rpath].length > 0\n\n# inputs = h[:sources]\n# inputs = [ inputs ] if inputs.is_a? String\n# inputs = inputs.map { |x| Platform.pathspec(topdir + x) }\n# throw 'One or more sources are required' unless inputs.count\n\n#TODO:if @combine\n# return [ @path, cflags, '-combine', ldflags, inputs, ldadd ].flatten.join(' ')\n#\n \n cmd = [ @path, '-DHAVE_CONFIG_H', '-I.', @platform_cflags, flags, '-c', @sources ].flatten.join(' ')\n\n cmd += Platform.dev_null if @quiet\n\n log.debug \"Compiler command: #{cmd}\"\n\n cmd\n end", "def compile_ruby\n if $DEBUG\n puts \"user_home: #{user_home}\"\n puts \"user_local: #{user_local}\"\n puts \"make_command: #{make_command}\"\n puts \"version: #{version}\"\n puts \"ruby_cc_version: #{ruby_cc_version}\"\n puts \"ruby_source: #{ruby_source}\"\n puts \"srcdir: #{srcdir}\"\n puts \"blddir: #{blddir}\"\n puts \"libdir: #{libdir}\"\n end\n\n mingw32\n environment\n download_source\n extract_source\n makefile_in_bak # create Makefile.in.bak\n makefile_in # create Makefile.in\n configure # create Makefile\n make # creates ruby.exe\n make_install\n update_config\n end", "def compile_to_ruby\n \"\"\n end", "def compile(script, filename=nil, linenum=nil)\n filename ||= 'none'\n linenum ||= 1\n native_compile(script, filename, linenum)\n end", "def do_compile_command(name, file)\n output, result = compile_file(name)\n output = clean_debug(output)\n if (output != \"\" || result != 0)\n puts \"----------------------------------------------------------------------\"\n puts \"BUG: %s failed to compile\" % name\n puts \"----------------------------------------------------------------------\"\n puts output\n puts \"----------------------------------------------------------------------\"\n exit 1\n end\nend", "def is_mingw?\n @path =~ /mingw/ # Kludge\n end", "def tidy_command\n is_windows = (RbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/)\n bin = is_windows ? 'tidy.exe' : 'tidy'\n cmd = \"#{bin} #{@tidy_flags.join(' ')}\"\n cmd\n end", "def make_compile_rule(src)\n sha = Digest::SHA1.hexdigest(src)\n fn = File.basename(src, '.c')\n dest = \"build/#{sha}_#{fn}.o\"\n SOURCE_LOOKUP[src] = dest\n\n CompileTask.define_task(dest => [src]) do |t|\n sh \"#{TOOLCHAIN}gcc #{t.cflags.join(' ')} -c #{t.prerequisites.first} -o #{t.name}\"\n end\nend", "def make_deps t\n sh \"gcc -MM -MF #{t.name} #{$C_FLAGS.join ' '} -c #{t.source}\"\nend", "def cflags(cc_path)\n res = []\n if cc_path =~ /clang/\n res.push '-gcc-toolchain', @ndk_path + '/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86'\n res.push '-isystem', @ndk_path + '/toolchains/llvm-3.1/prebuilt/linux-x86/lib/clang/3.1/include'\n res.push %w{ -ffunction-sections -funwind-tables -fstack-protector -D__ARM_ARCH_5__ -D__ARM_ARCH_5T__ -D__ARM_ARCH_5E__ -D__ARM_ARCH_5TE__ -target armv7-none-linux-androideabi -march=armv7-a -mfloat-abi=softfp -mfpu=vfpv3-d16 -mthumb -Os -fomit-frame-pointer -fno-strict-aliasing -I. -DANDROID -fblocks -D_GNU_SOURCE -D__BLOCKS__ -Wa,--noexecstack -O0 -g }\n res.push \"-I#{@ndk_path}/platforms/android-14/arch-arm/usr/include\"\n end\n res.flatten\n end", "def build_projects_for_dir(path)\n puts\n puts \"Building Projects for Directory: #{path}\"\n \n Dir.foreach path do |filename|\n\n if $mac && filename.match(/.*\\.xcodeproj/)\n puts \" Building #{filename}\" \n result = `cd #{path}; xcodebuild -project #{filename} 2>&1`\n if result.match(/\\*\\* BUILD SUCCEEDED \\*\\*/)\n puts \" (success)\"\n else\n puts \" (FAIL) ************************************\"\n #puts result\n end\n\n elsif $win && filename.match(/.*\\.vcxproj/) && !filename.match(/.*\\.vcxproj\\..*/)\n toolset = \"\"\n toolset = \"/p:PlatformToolset=v110\" if $vs2012\n\n puts \" Building #{filename} for Win32\"\n result = `msbuild.exe /target:rebuild /p:Platform=Win32 #{toolset} #{path}/#{filename} 2>&1`\n if result.match(/(0 error|up\\-to\\-date|Build succeeded\\.)/)\n puts \" (success)\"\n else\n puts \" (FAIL) ************************************\"\n #puts result\n end\n\n puts \" Building #{filename} for x64\"\n result = `msbuild.exe /target:rebuild /p:Platform=x64 #{toolset} #{path}/#{filename} 2>&1`\n if result.match(/(0 error|up\\-to\\-date|Build succeeded\\.)/)\n puts \" (success)\"\n else\n puts \" (FAIL) ************************************\"\n #puts result\n end\n\n elsif File.directory?(\"#{path}/#{filename}\") && filename != \".\" && filename != \"..\" && filename != \"build\" && filename != \"sysbuild\" && !filename.match(/.*\\.xcodeproj/)\n build_projects_for_dir(\"#{path}/#{filename}\")\n end\n end\n \n puts\nend", "def compile\n Dir.chdir(build_path) do\n do_compile\n end\n end", "def compile_extension(extension, platform)\n compiler_options = compiler_options()\n compiler_class = compiler_class(extension)\n\n compiler_options[:platform] = platform\n\n compiler = compiler_class.new(extension, compiler_options)\n\n compiler.compile\n end", "def compile_scheme_scripts\n rm_f 'config/TRUST-PRECOMPILED'\n # We need to give a real path here, because \".\" will cause problems for\n # the engine. And it needs to be a Windows path, not a Cygwin path!\n # We need to be in command line mode, not runtime mode, so we will write\n # out our file count for our progress bar.\n run './engine/win32/Halyard', '-c', '(exit-script)', absolute_path(pwd)\n run 'touch', 'config/TRUST-PRECOMPILED'\nend", "def make(args)\n # Maybe we should write an \"uncrustify\" fastlane action?...\n Dir.chdir '..' do\n sh 'make ' + args\n end\nend", "def make(args)\n # Maybe we should write an \"uncrustify\" fastlane action?...\n Dir.chdir '..' do\n sh 'make ' + args\n end\nend", "def compile(script, filename=nil, linenum=nil, global=nil)\n filename ||= 'none'\n linenum ||= 1\n native_compile(script, filename, linenum, global)\n end", "def windows?\n ruby_platform?(:windows)\nend", "def locate_sysnative_cmd(cmd)\n if ::File.exist?(\"#{ENV['WINDIR']}\\\\sysnative\\\\#{cmd}\")\n \"#{ENV['WINDIR']}\\\\sysnative\\\\#{cmd}\"\n elsif ::File.exist?(\"#{ENV['WINDIR']}\\\\system32\\\\#{cmd}\")\n \"#{ENV['WINDIR']}\\\\system32\\\\#{cmd}\"\n else\n cmd\n end\nend", "def ruby_arch\n case Common.target_platform\n when /darwin/\n 'x86_64-darwin10'\n when 'linux-x86_64'\n 'x86_64-linux'\n when 'linux-x86'\n 'i686-linux'\n when /windows/\n 'x64-mingw64'\n end\nend", "def windows_ruby?\n !!(RUBY_PLATFORM =~ /mswin|mingw|windows/)\n end", "def try_to_compile(code, command_options = nil)\n begin\n boolean = false\n stderr_orig = $stderr.dup\n stdout_orig = $stdout.dup\n\n Dir.chdir(Dir.tmpdir) do\n File.write(cpp_source_file, code)\n\n if command_options\n command = \"#{cpp_command} #{command_options} \"\n else\n command = \"#{cpp_command} \"\n end\n\n command += \"#{cpp_out_file} \"\n command += cpp_source_file\n\n $stderr.reopen(IO::NULL)\n $stdout.reopen(IO::NULL)\n boolean = system(command)\n end\n ensure\n FileUtils.rm_f(cpp_source_file)\n FileUtils.rm_f(cpp_out_file)\n $stdout.reopen(stdout_orig)\n $stderr.reopen(stderr_orig)\n end\n\n boolean\n end", "def test_compile_bundle_artifacts_path_with_spaces\n skip(\"only necessary to test on Windows\") unless Gem.win_platform?\n old_tempdir = @tempdir\n old_output_dir = @output_dir\n\n old_tmp = ENV[\"TMP\"]\n old_temp = ENV[\"TEMP\"]\n old_tmpdir = ENV[\"TMPDIR\"]\n\n # We want to make sure Dir.tmpdir returns the path containing \"DIRWIT~1\"\n # so that we're testing whether the compiler expands the path properly. To\n # do this, \"dir with spaces\" must not be the last path component.\n #\n # This is because Dir.tmpdir calls File.expand_path on ENV[TMPDIR] (or\n # ENV[TEMP], etc.). When \"DIRWIT~1\" is the last component,\n # File.expand_path will expand this to \"dir with spaces\". When it's not\n # the last component, it will leave \"DIRWIT~1\" as-is.\n @tempdir = File.join(@tempdir, \"dir with spaces\", \"tmp\")\n FileUtils.mkdir_p(@tempdir)\n @tempdir = File.join(old_tempdir, \"DIRWIT~1\", \"tmp\")\n\n @output_dir = File.join(@tempdir, \"output\")\n FileUtils.mkdir_p(@output_dir)\n\n [\"TMP\", \"TEMP\", \"TMPDIR\"].each { |varname| ENV[varname] = @tempdir }\n\n util_reset_arch\n\n artifact = \"foo.#{RbConfig::CONFIG[\"DLEXT\"]}\"\n\n gem_file = util_bake_gem(\"foo\") { |s|\n util_fake_extension s, \"foo\", util_custom_configure(artifact)\n }\n\n compiler = Gem::Compiler.new(gem_file, :output => @output_dir)\n output_gem = nil\n\n use_ui @ui do\n output_gem = compiler.compile\n end\n\n assert_path_exists File.join(@output_dir, output_gem)\n spec = util_read_spec File.join(@output_dir, output_gem)\n\n assert_includes spec.files, \"lib/#{artifact}\"\n ensure\n if Gem.win_platform?\n FileUtils.rm_rf @tempdir\n\n ENV[\"TMP\"] = old_tmp\n ENV[\"TEMP\"] = old_temp\n ENV[\"TMPDIR\"] = old_tmpdir\n\n @tempdir = old_tempdir\n @output_dir = old_output_dir\n end\n end", "def compile\n end", "def search(compilers)\n res = nil\n if ENV['CC']\n res = ENV['CC']\n else\n compilers.each do |command|\n if (command =~ /^\\// and File.exists?(command)) or Platform.which(command)\n res = command\n break\n end\n end\n end\n\n # FIXME: kludge for Windows, breaks mingw\n if Platform.is_windows?\n res = 'cl.exe'\n end\n\n throw 'No suitable compiler found' if res.nil? || res == ''\n\n if Platform.is_windows? && res.match(/cl.exe/i)\n help = ' /? <NUL'\n else\n help = ' --help'\n end\n \n # Verify the command can be executed\n cmd = res + help + Platform.dev_null\n unless Platform.execute(cmd)\n puts \"not found\"\n print \" -- tried: \" + cmd\n raise\n end\n\n puts res\n res\n end", "def get_filecode()\n \"__EMIT_#{ARGV[0].gsub(/[^\\w]/, \"_\").upcase}__\"\nend", "def create_compile_ext_tasks(source_root, dest_root, invoking_task)\n compiled_ext = \"#{source_root}/#{SPECIAL_BUNDLE_NAMES[File.basename(source_root)] || File.basename(source_root)}.bundle\"\n create_copy_file_tasks(FileList[compiled_ext], source_root, dest_root, invoking_task)\n file compiled_ext => FileList[\"#{source_root}/*.c\"] do\n cd source_root do\n `ruby extconf.rb; make >/dev/null 2>&1`\n end\n end\nend", "def compile!\n puts \"Compiling website..\"\n puts %x[rm -rf output]\n puts %x[nanoc compile]\nend", "def compile!\n puts \"Compiling website..\"\n puts %x[rm -rf output]\n puts %x[nanoc compile]\nend", "def compile!\n puts \"Compiling website..\"\n puts %x[rm -rf output]\n puts %x[nanoc compile]\nend", "def folly_flags()\n return NewArchitectureHelper.folly_compiler_flags\nend", "def get_compile_line(input, output)\n\n\tlibs_path = LIBRARY_PATH.map{ |lib| \"-library-path+='#{File.join(current_path, lib)}'\"}.join(\" \")\n\tsources_path = SOURCE_PATH.map{ |lib| \"-sp+='#{File.join(current_path, lib)}'\"}.join(\" \")\n\n\tline = \"mxmlc #{File.join(current_path(), input)} -o=#{File.join(current_path(), output)} -debug=#{DEBUG} #{libs_path} #{sources_path} #{EXTRA}\"\nend", "def platform\n \"win\"\n end", "def compileSourceOfTarget(target)\n\t\tincstr = @includesString\t\n\t\tsrc_file = targetToSource(target)\n\t\ttarget_ext = File.extname target\n\t\tsource_ext = File.extname src_file\n\t\tartifacts = [target]\n\t\tdebug \"COMP: Target: \\'#{target}\\'\"\n\t\t#debug \"COMP: Source: \\'#{src_file}\\'\"\n\t\t#expected artifacts\n\t\tif (target_ext == '.cmx') \n\t\t\tartifacts.push(changeExt(target, '.o'))\n\t\tend\n\t\tif (source_ext == '.ml')\n\t\t\tunless(File.exist?(changeExt(target, '.mli')))\n\t\t\t\tartifacts.push(changeExt(target, '.cmi'))\n\t\t\tend\n\t\tend\n\n\t\tcase @compiler\n\t\t\twhen @@byteCodeCompiler \n\t\t\t\tcmd = \"ocamlfind ocamlc -g -c \"\n\t\t\t\tcmd += \"-package #{@libsStr} \" unless(@libsStr == \"\") \n\t\t\t\tcmd += \"#{incstr} #{src_file}\"\n\t\t\twhen @@machineCodeCompiler\n\t\t\t\tcmd = \"ocamlfind ocamlopt -c \"\n\t\t\t\tcmd += \"-p \" if (@profilingEnabled)\n\t\t\t\tcmd += \"-package #{@libsStr} \" unless(@libsStr == \"\") \n\t\t\t\tcmd += \"#{incstr} #{src_file}\"\n\t\t\telse \n\t\t\t\tputs \"ERROR couldn't compile to file: \" + target\n\t\tend\n\t\tdebug cmd\n\t\t`#{cmd}`\n\t\tif($? == 0) #the compiler had no error\n\t\t\tartifacts.each do |artfile|\n\t\t\t\t#debug \"compiler ran\"\n\t\t\t\tbuilddest = toBuildDirFile(artfile)\n\t\t\t\t#debug \"\\t builddirtarget: \" + builddest\n\t\t\t\t#debug \"\\t target: \" + artfile \n\t\t\t\t#if(not File.exists?(artfile)) then exit end\n\t\t\t\tunless (artfile == builddest)\n\t\t\t\t\tFile.rename(artfile, builddest)\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\tend", "def identify_compiler_version(executable, version_accuracy: :major)\n raise 'unknown version accuracy' if version_accuracy != :major\n\n _exit_status, output = runOpen3CaptureOutput executable, '--version'\n\n match = output.match(/\\(GCC\\)\\s+(\\d+)\\.(\\d+)\\.(\\d+)/i)\n\n if match\n major = match.captures[0]\n return \"gcc_#{major}\"\n end\n\n match = output.match(/clang\\s+version\\s+(\\d+)\\.(\\d+)\\.(\\d+)/i)\n\n if match\n major = match.captures[0]\n return \"clang_#{major}\"\n end\n\n raise 'could not detect compiler type or version'\nend", "def windows?\n RbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/\nend", "def build filetask,system_config,platform\n cmd_file=command_file(filetask.name,system_config,platform)\n if File.exists?(cmd_file)\n config=system_config.platform_config(platform)\n if cmd_file.end_with?('.library')\n cmdline = librarian(cmd_file,config)\n else\n cmdline = linker(cmd_file,config)\n end\n sh(cmdline.join(' '))\n else\n raise GaudiError, \"Missing command file for #{filetask.name}\"\n end\n end", "def compiled_cython_file?\n return false unless ['.c', '.cpp'].include? extname\n return false unless lines.count > 1\n return lines[0].include?(\"Generated by Cython\")\n end", "def rule\n [ '$(CC)', '-DHAVE_CONFIG_H', '-I.', flags, '$(CFLAGS)', '-c', @sources ].flatten.join(' ')\n end", "def symlink_host_gcc\n version = DevelopmentTools.non_apple_gcc_version \"/usr/bin/gcc\"\n return if version.null?\n suffix = (version < 5) ? version.to_s[/^\\d+\\.\\d+/] : version.to_s[/^\\d+/]\n return if File.executable?(\"/usr/bin/gcc-#{suffix}\") || File.executable?(HOMEBREW_PREFIX/\"bin/gcc-#{suffix}\")\n FileUtils.mkdir_p HOMEBREW_PREFIX/\"bin\"\n [\"gcc\", \"g++\", \"gfortran\"].each do |tool|\n source = \"/usr/bin/#{tool}\"\n dest = HOMEBREW_PREFIX/\"bin/#{tool}-#{suffix}\"\n next if !File.executable?(source) || File.executable?(dest)\n FileUtils.ln_sf source, dest\n end\n end", "def symlink_host_gcc\n version = DevelopmentTools.non_apple_gcc_version \"/usr/bin/gcc\"\n return if version.null?\n suffix = (version < 5) ? version.to_s[/^\\d+\\.\\d+/] : version.to_s[/^\\d+/]\n return if File.executable?(\"/usr/bin/gcc-#{suffix}\") || File.executable?(HOMEBREW_PREFIX/\"bin/gcc-#{suffix}\")\n FileUtils.mkdir_p HOMEBREW_PREFIX/\"bin\"\n [\"gcc\", \"g++\", \"gfortran\"].each do |tool|\n source = \"/usr/bin/#{tool}\"\n dest = HOMEBREW_PREFIX/\"bin/#{tool}-#{suffix}\"\n next if !File.executable?(source) || File.executable?(dest)\n FileUtils.ln_sf source, dest\n end\n end", "def compile(tokens, options = T.unsafe(nil)); end", "def _inter source\n c = compile source\n cifrom c\nend", "def with_standard_compiler_flags(env = {}, opts = {})\n env ||= {}\n opts ||= {}\n compiler_flags =\n case Ohai[\"platform\"]\n when \"aix\"\n {\n \"CC\" => \"xlc_r -q64\",\n \"CXX\" => \"xlC_r -q64\",\n \"CFLAGS\" => \"-q64 -I#{install_dir}/embedded/include -D_LARGE_FILES -O\",\n \"LDFLAGS\" => \"-q64 -L#{install_dir}/embedded/lib -Wl,-blibpath:#{install_dir}/embedded/lib:/usr/lib:/lib\",\n \"LD\" => \"ld -b64\",\n \"OBJECT_MODE\" => \"64\",\n \"ARFLAGS\" => \"-X64 cru\",\n }\n when \"solaris2\"\n {\n \"CC\" => \"gcc -m64 -static-libgcc\",\n \"LDFLAGS\" => \"-Wl,-rpath,#{install_dir}/embedded/lib -L#{install_dir}/embedded/lib -static-libgcc\",\n \"CFLAGS\" => \"-I#{install_dir}/embedded/include -O2\",\n }\n when \"freebsd\"\n {\n \"CC\" => \"clang\",\n \"CXX\" => \"clang++\",\n \"LDFLAGS\" => \"-L#{install_dir}/embedded/lib -Wl,-rpath,#{install_dir}/embedded/lib\",\n \"CFLAGS\" => \"-I#{install_dir}/embedded/include -O3 -D_FORTIFY_SOURCE=2 -fstack-protector\",\n }\n when \"windows\"\n arch_flag = windows_arch_i386? ? \"-m32\" : \"-m64\"\n opt_flag = windows_arch_i386? ? \"-march=i686\" : \"-march=x86-64\"\n {\n \"LDFLAGS\" => \"-L#{install_dir}/embedded/lib #{arch_flag} -fno-lto\",\n # We do not wish to enable SSE even though we target i686 because\n # of a stack alignment issue with some libraries. We have not\n # exactly ascertained the cause but some compiled library/binary\n # violates gcc's assumption that the stack is going to be 16-byte\n # aligned which is just fine as long as one is pushing 32-bit\n # values from general purpose registers but stuff hits the fan as\n # soon as gcc emits aligned SSE xmm register spills which generate\n # GPEs and terminate the application very rudely with very little\n # to debug with.\n \"CFLAGS\" => \"-I#{install_dir}/embedded/include #{arch_flag} -O3 #{opt_flag}\",\n }\n else\n {\n \"LDFLAGS\" => \"-Wl,-rpath,#{install_dir}/embedded/lib -L#{install_dir}/embedded/lib\",\n \"CFLAGS\" => \"-I#{install_dir}/embedded/include -O3 -D_FORTIFY_SOURCE=2 -fstack-protector\",\n }\n end\n\n # merge LD_RUN_PATH into the environment. most unix distros will fall\n # back to this if there is no LDFLAGS passed to the linker that sets\n # the rpath. the LDFLAGS -R or -Wl,-rpath will override this, but in\n # some cases software may drop our LDFLAGS or think it knows better\n # and edit them, and we *really* want the rpath setting and do know\n # better. in that case LD_RUN_PATH will probably survive whatever\n # edits the configure script does\n extra_linker_flags = {\n \"LD_RUN_PATH\" => \"#{install_dir}/embedded/lib\",\n }\n\n if solaris2?\n ld_options = \"-R#{install_dir}/embedded/lib\"\n\n if platform_version.satisfies?(\"<= 5.10\")\n # in order to provide compatibility for earlier versions of libc on solaris 10,\n # we need to specify a mapfile that restricts the version of system libraries\n # used. See http://docs.oracle.com/cd/E23824_01/html/819-0690/chapter5-1.html\n # for more information\n # use the mapfile if it exists, otherwise ignore it\n mapfile_path = File.expand_path(Config.solaris_linker_mapfile, Config.project_root)\n ld_options << \" -M #{mapfile_path}\" if File.exist?(mapfile_path)\n end\n\n # solaris linker can also use LD_OPTIONS, so we throw the kitchen sink against\n # the linker, to find every way to make it use our rpath. This is also required\n # to use the aforementioned mapfile.\n extra_linker_flags[\"LD_OPTIONS\"] = ld_options\n end\n\n env.merge(compiler_flags)\n .merge(extra_linker_flags).\n # always want to favor pkg-config from embedded location to not hose\n # configure scripts which try to be too clever and ignore our explicit\n # CFLAGS and LDFLAGS in favor of pkg-config info\n merge({ \"PKG_CONFIG_PATH\" => \"#{install_dir}/embedded/lib/pkgconfig\" }).\n # Set default values for CXXFLAGS and CPPFLAGS.\n merge(\"CXXFLAGS\" => compiler_flags[\"CFLAGS\"])\n .merge(\"CPPFLAGS\" => compiler_flags[\"CFLAGS\"])\n .merge(\"OMNIBUS_INSTALL_DIR\" => install_dir)\n end", "def build_this bt;\r\n nmake = NMake.new\r\n cmd = '\"' + nmake.path + '\" -f makefile.vc'\r\n Platform.execute cmd, wd: (builddir bt)\r\n end", "def initialize_compiler\n self.compile_tasks = [:multi, :test, :check_manifest]\n end", "def compile\r\n case language\r\n when :boolexp\r\n true\r\n when :fart\r\n @prog = Farts::Parser.new.parse(src)\r\n log.info \"Compile of FART program - #{name}\"\r\n true\r\n else\r\n false\r\n end\r\n rescue Exception\r\n log.error $!\r\n @prog = nil\r\n false\r\n end", "def test_assembles_one_array_argument_into_cmd\n Crd::Flex::Command.new 'mxmlc' do |s|\n s.source_path << 'src'\n s.source_path << 'lib/src'\n cmd = s.to_cmd.split( /\\s+/ )\n assert_equal( 'mxmlc', cmd.shift )\n assert( cmd.include?( '-source-path+=src,lib/src' ), 'Could not find argument in to_cmd' )\n end\n end", "def define_compiler_tasks\n require \"rake/extensiontask\"\n\n @extensions.each do |name|\n clean_globs << \"lib/#{name}/*.{so,bundle,dll}\"\n\n Rake::ExtensionTask.new name, spec do |ext|\n ext.lib_dir = File.join(*[\"lib\", name.to_s, ENV[\"FAT_DIR\"]].compact)\n end\n end\n\n compile_tasks.each do |t|\n task t => :compile\n end\n rescue LoadError\n warn \"Couldn't load rake-compiler. Skipping. Run `rake newb` to fix.\"\n end", "def setupEnv\n # We require that we are ran in the developer shell. Some deps also\n # need the vcvarsall but for that they apply it on a case by case\n # basis, so we just need to check that we are correctly setup for\n # visual studio stuff\n\n # if which(\"MSBuild.exe\") == nil\n\n # warning \"MSBuild not found\"\n # onError %{You need to run this setup in \"Developer Command Prompt for VS 2017\"}\n\n # end\n end", "def compile_native\n\n # Get include directories\n incl_dir_ruby2d = \"#{Ruby2D.gem_dir}/ext/ruby2d/\"\n incl_dir_deps = \"#{Ruby2D.assets}/include/\"\n\n # Add compiler flags for each platform\n case $RUBY2D_PLATFORM\n\n when :macos\n ld_dir = \"#{Ruby2D.assets}/macos/universal/lib\"\n\n c_flags = '-arch arm64 -arch x86_64'\n\n ld_flags = ''\n ['mruby', 'SDL2', 'SDL2_image', 'SDL2_mixer', 'SDL2_ttf',\n 'jpeg', 'jxl', 'avif', 'png', 'tiff', 'webp',\n 'mpg123', 'ogg', 'FLAC', 'vorbis', 'vorbisfile', 'modplug',\n 'freetype', 'harfbuzz', 'graphite2'].each do |name|\n add_ld_flags(ld_flags, name, :archive, ld_dir)\n end\n\n ld_flags << \"-lz -lbz2 -liconv -lstdc++ \"\n ['Cocoa', 'Carbon', 'CoreVideo', 'OpenGL', 'Metal', 'CoreAudio', 'AudioToolbox',\n 'IOKit', 'GameController', 'ForceFeedback', 'CoreHaptics'].each do |name|\n add_ld_flags(ld_flags, name, :framework)\n end\n\n when :linux, :linux_rpi, :bsd\n # TODO: implement this\n # ld_flags = '-lSDL2 -lSDL2_image -lSDL2_mixer -lSDL2_ttf -lm -lGL'\n\n when :windows\n\n if RUBY_PLATFORM =~ /ucrt/\n ld_dir = \"#{Ruby2D.assets}/windows/mingw-w64-ucrt-x86_64/lib\"\n else\n ld_dir = \"#{Ruby2D.assets}/windows/mingw-w64-x86_64/lib\"\n end\n\n ld_flags = '-static -Wl,--start-group '\n ['mruby',\n 'SDL2',\n 'SDL2_image', 'jpeg', 'png', 'tiff', 'webp', 'jxl', 'hwy', 'jbig', 'deflate', 'lzma', 'zstd', 'Lerc',\n 'SDL2_mixer', 'mpg123', 'FLAC', 'vorbis', 'vorbisfile', 'ogg', 'modplug', 'opus', 'opusfile', 'sndfile',\n 'SDL2_ttf', 'freetype', 'harfbuzz', 'graphite2', 'bz2', 'brotlicommon', 'brotlidec',\n 'glew32', 'stdc++', 'z', 'ssp'\n ].each do |name|\n add_ld_flags(ld_flags, name, :archive, ld_dir)\n end\n ld_flags << '-lmingw32 -lopengl32 -lole32 -loleaut32 -limm32 -lversion -lwinmm -lrpcrt4 -mwindows -lsetupapi -ldwrite '\\\n '-lws2_32 -lshlwapi '\n ld_flags << '-Wl,--end-group'\n end\n\n # Compile the app\n run_cmd \"cc #{c_flags} -I#{incl_dir_ruby2d} -I#{incl_dir_deps} build/app.c #{ld_flags} -o build/app\"\n\n create_macos_bundle if $RUBY2D_PLATFORM == :macos\nend", "def compile(script, filename=nil, linenum=nil)\n raise NotImplementedError\n end", "def test_assembles_two_array_arguments_into_cmd\n Crd::Flex::Command.new 'mxmlc' do |s|\n s.source_path << 'src'\n s.source_path << 'lib/src'\n s.library_path << 'lib/bin'\n cmd = s.to_cmd.split( /\\s+/ )\n assert_equal( 'mxmlc', cmd.shift )\n assert( cmd.include?( '-source-path+=src,lib/src' ), 'Could not find argument in to_cmd' )\n assert( cmd.include?( '-library-path+=lib/bin' ), 'Could not find argument in to_cmd' )\n end\n end", "def make_asm(test_files)\n Dir.mkdir(TESTS_BIN) unless File.exists?(TESTS_BIN)\n log = {}\n # in the remote case of porting to windows, this should use Open3 instead of '2>&1'\n test_files.each do |f|\n output = `powerpc-elf-gcc -O0 --specs=archc -S #{TESTS_CODE+'/'+f} -o #{TESTS_BIN+'/'+f[0..-3]+'.s'} 2>&1`\n log[f] = output == '' ? \"No error.\" : output\n end\n return log\nend", "def windows?\n !!(RUBY_PLATFORM =~ /mswin|mingw/)\n end", "def compile\n puts \"Compilando archivos fuentes:\"\n @objs.each do |obj|\n command = \"#{@cc} -fpic -c -o #{obj} #{obj[0..-2] + \"c\"} #{@cflags}\"\n puts \"\\t\"+ command\n exit (0) if not((system(command)))\n end\n \n puts \"Convirtiendo a bibliotecas dinamicas\"\n system(\"mkdir lib\")\n @shared.each do |obj|\n library = obj.split('/').last\n compiled_libraries = `ls #{@lib_dir}`.split(\" \")\n \n libs = compiled_libraries.inject(\"\") {\n |string,lib|\n string += \"-l#{lib[3...-3]} \"\n }\n \n if (@so == \"linux\")\n command = \"#{@cc} -shared -o lib/lib#{library} #{obj[0..-3] + \"o\"}\" +\n \" -L#{@lib_dir} #{libs}\"\n elsif (@so == \"mac\")\n command = \"#{@cc} -shared -o lib/lib#{library} #{obj[0..-3] + \"o\"}\" +\n \" -L#{@lib_dir} #{libs}\" \n end\n puts \"\\t\" + command\n puts \"No compilo de forma correcta\" if not((system(command)))\n end\nend", "def compiledo\n\n end", "def build_env_bat\n connector_dir = File.join(@mysql_dir, 'lib')\n env = {mysql_lib: to_windows_path(connector_dir)}\n\n print 'ruby.exe location: '; env[:ruby_bin] = read_path\n print '7z.exe location: '; env[:zip_bin] = read_path\n print 'wincmp3.exe location: '; env[:compare_bin] = read_path\n puts\n\n FileUtils.cd(FRAMEWORK_ROOT) { File.write('env.bat', ENV_BAT_TEMPLATE % env) }\n end", "def gcc?\n linux? && present?('g++')\n end" ]
[ "0.63968444", "0.61771774", "0.5931661", "0.5886322", "0.581332", "0.577381", "0.5728605", "0.56815547", "0.565777", "0.5630128", "0.56062436", "0.5596375", "0.5581845", "0.5578365", "0.5558083", "0.5547668", "0.5503448", "0.54798096", "0.54627705", "0.5406443", "0.53834856", "0.53483784", "0.53367704", "0.53339666", "0.5330942", "0.53267646", "0.53100884", "0.5233261", "0.52284193", "0.5223838", "0.52066004", "0.5197429", "0.51931894", "0.5190574", "0.51861644", "0.51797706", "0.51583236", "0.51395196", "0.5129348", "0.5128069", "0.512716", "0.5125656", "0.5124717", "0.51200026", "0.51071894", "0.5102464", "0.50990695", "0.5095099", "0.509351", "0.5087001", "0.50729495", "0.5070503", "0.50695765", "0.50683224", "0.50566393", "0.50398487", "0.50398487", "0.5039219", "0.50329065", "0.5022895", "0.50187993", "0.5018369", "0.50171137", "0.50103647", "0.5008273", "0.50075626", "0.5000959", "0.4994897", "0.49924007", "0.49924007", "0.49924007", "0.49808857", "0.4976306", "0.49739406", "0.49708274", "0.49521938", "0.49505132", "0.49463338", "0.49458805", "0.49446082", "0.49431422", "0.49431422", "0.49428654", "0.49395192", "0.4936356", "0.49287722", "0.48985735", "0.48966947", "0.48964107", "0.48957708", "0.48917028", "0.488954", "0.48859918", "0.48850995", "0.48820513", "0.48812395", "0.48798504", "0.4876473", "0.48743927", "0.4869121" ]
0.5472407
18
Helper for windows clang compiling
def unsetCXXEnv @Clang.unsetEnv end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_for_clang()\n catch_error(\"You passed the --clang option and clang is not in your path. \\nPlease try again or do not use --clang.\") do\n exec_cmd(%[command -v clang])\n end\n return true\n end", "def compile(compilable)\r\n compiler = File.expand_path @compiler_exe, @compiler_path\r\n result = []\r\n \r\n compilable.compiler_target_files.each do |target_file|\r\n # Construct paths.\r\n include_paths = \"-I#{compilable.compiler_include_paths.join ';'}\" unless\r\n compilable.compiler_include_paths.empty?\r\n \r\n module_paths = \"-M#{compilable.compiler_module_paths.join ';'}\" unless\r\n compilable.compiler_module_paths.empty?\r\n \r\n library_paths = \"-L#{compilable.compiler_library_paths.join ';'}\" unless\r\n compilable.compiler_library_paths.empty?\r\n \r\n # Run the NetLinx compiler.\r\n # Note: NLRC.exe v2.1 freaks out if empty arguments (\"\") are in the command.\r\n cmd = ''\r\n cmd += 'wine ' if @use_wine or compiler.include? '/.wine/'\r\n cmd += \"\\\"#{compiler}\\\" \\\"#{target_file}\\\"\"\r\n cmd += \" \\\"#{include_paths}\\\"\" if include_paths\r\n cmd += \" \\\"#{module_paths}\\\"\" if module_paths\r\n cmd += \" \\\"#{library_paths}\\\"\" if library_paths\r\n \r\n io = IO.popen cmd\r\n stream = io.read\r\n io.close\r\n \r\n # Build the result.\r\n result << NetLinx::CompilerResult.new(\r\n compiler_target_files: [target_file],\r\n compiler_include_paths: compilable.compiler_include_paths,\r\n compiler_module_paths: compilable.compiler_module_paths,\r\n compiler_library_paths: compilable.compiler_library_paths,\r\n stream: stream\r\n )\r\n end\r\n \r\n result\r\n end", "def checkTools()\n\n\tif (PATH_CLANG_FORMAT.empty?)\n\t\tabort(\"Unable to find clang-format!\");\n\tend\n\n\tif (PATH_UNCRUSTIFY.empty?)\n\t\tabort(\"Unable to find uncrustify!\");\n\tend\n\nend", "def bash_on_windows?; end", "def using_gcc?\n # Match gcc, /usr/local/bin/gcc-4.2, etc. (Clang is \"xcrun cc\")\n File.basename(RbConfig::MAKEFILE_CONFIG[\"CC\"]).match(/\\Agcc\\b/)\nend", "def target_win32?\n return true if ENV['OS'] == 'Windows_NT'\n build.is_a?(MRuby::CrossBuild) && build.host_target.to_s =~ /mingw/\nend", "def dlltool(dllname, deffile, libfile)\n # define if we are using GCC or not\n if Rake::ExtensionCompiler.mingw_gcc_executable then\n dir = File.dirname(Rake::ExtensionCompiler.mingw_gcc_executable)\n tool = case RUBY_PLATFORM\n when /mingw/\n File.join(dir, 'dlltool.exe')\n when /linux|darwin/\n File.join(dir, \"#{Rake::ExtensionCompiler.mingw_host}-dlltool\")\n end\n return \"#{tool} --dllname #{dllname} --def #{deffile} --output-lib #{libfile}\"\n else\n if RUBY_PLATFORM =~ /mswin/ then\n tool = 'lib.exe'\n else\n fail \"Unsupported platform for cross-compilation (please, contribute some patches).\"\n end\n return \"#{tool} /DEF:#{deffile} /OUT:#{libfile}\"\n end\nend", "def compileCmd(srcfile)\n\t\treturn case BuildEnv::entityTypeSafe(srcfile)[0]\n\t\t\twhen :c then \"#{@CC} #{@CFLAGS.join(' ')}\"\n\t\t\twhen :cxx then \"#{@CXX} #{@CXXFLAGS.join(' ')}\"\n\t\t\twhen :f then \"#{@FCC} #{@FFLAGS.join(' ')}\" \n\t\t\telse raise \"shouldn't happen\"\n\t\tend\n\tend", "def compiler cmdfile,config\n cc=config['compiler']\n raise GaudiConfigurationError,\"Missing 'compiler' setting\" unless cc\n return command_line(cc,cmdfile,config.fetch('compiler_commandfile_prefix',\"\"))\n end", "def install\n clang_version = '10.0.0'\n\n include.install Dir['include/c++'] unless File.directory? \"#{include}/c++\"\n \"#{include}/c++\".install Dir['include/c++/v1'] unless File.directory? \"#{include}/c++/v1\"\n lib.install Dir['lib/clang'] unless File.directory? \"#{lib}/clang\"\n \"#{lib}/clang\".install Dir['lib/clang/#{clang_version}'] unless File.directory? \"#{lib}/clang/#{clang_version}\"\n lib.install Dir['lib/oclint']\n bin.install Dir['bin/*']\n\n end", "def compile\n read_yml if File.exists?(@project)\n\n default_options = {}\n other_options = {}\n\n @compile_options.each do |k,v| \n if /default/.match(k)\n default_options[k] = v\n else\n other_options[k] = v\n end\n end\n\n command = \"#{@flex_sdk_bin}#{@compiler}\"\n other_options.each { |k,v| command += \" -#{k}=\\\"#{[v].flatten.join ','}\\\"\" }\n default_options.each { |k,v| command += \" -#{k}=\\\"#{[v].flatten.join ','}\\\"\" }\n command += \" #{@document_class}.as\"\n\n# puts command\n# TextMate.exit_show_html\n\n @command = command\n#TextMate.exit_show_html\n if process_output command\n# load_debugger\n display_web_player #unless ARGV[0] = \"--display\"\n# display_player\n end\nend", "def compile filetask,system_config,platform\n cmd_file=command_file(filetask.name,system_config,platform)\n if File.exists?(cmd_file)\n mkdir_p(File.dirname(filetask.name),:verbose=>false)\n config=system_config.platform_config(platform)\n if is_assembly?(filetask.prerequisites.first)\n cmdline = assembler(cmd_file,config)\n else\n cmdline = compiler(cmd_file,config)\n end\n sh(cmdline.join(' '))\n else\n raise GaudiError, \"Missing command file for #{filetask.name}\"\n end\n end", "def compile obj, src\n sh \"gcc #{$C_FLAGS.join ' '} -c #{src} -o #{obj}\"\nend", "def test_should_compile_file_with_linker_error\n \n dir = \"./test_case/linker_error.rb\"\n filename = dir\n \n result = check_and_compile_file( dir , filename )\n \n assert_equal(true, result)\n \n end", "def compile_c\n puts \"Compilando archivos fuentes:\"\n objs = @objs + [@main]\n objs.each do |obj|\n command = \"#{@cc} #{@debug} -c -o #{obj} #{obj[0..-2] + \"c\"} #{@cflags}\"\n puts \"\\t\"+ command\n exit (0) if not((system(command)))\n end\n command = \"#{@cc} #{@debug} -o #{@main[0..-3]} #{objs.join(\" \")}\" +\n \" #{@cflags}\"\n puts \"\\t\"+ command\n puts \"No compilo de forma correcta\" if not(system(command))\nend", "def compile\n cmd = self.command\n#puts ' + ' + cmd\n log.debug \"Invoking the compiler\"\n rc = Platform.execute cmd\n log.debug \"Compilation complete; rc=#{rc.to_s}\"\n rc\n end", "def lessc(file_info)\n compile_file(file_info[1], \"app\")\nend", "def compile_to_c\n \"\"\n end", "def setCXXEnv\n # Could store and restore these...\n @Clang.setupEnv \n end", "def compileCXX(params)\n\t\tsrc = requireParam(params, :src)\n\t\ttarget = requireParam(params, :target)\n\t\totherOptions = params[:opts] || []\n\t\t\n\t\tprintAndCall(\"#{compileCmd(src)} #{otherOptions.join(' ')} #{@INCDIRS.map {|dirpath| \"-I#{dirpath}\"}.join(' ')} -o #{target} -c #{src}\")\n\tend", "def compile\n exe_file = File.join(@temp_dir, 'exe')\n result = TTY::Command.new(printer: :quiet).run!('gcc', '-o', exe_file, @src_file)\n [exe_file, result]\n end", "def with_codethink_compiler_flags(platform, env = {}, opts = {})\n env = with_standard_compiler_flags(env = env, opts = opts)\n\n compiler_flags =\n {\n \"LDFLAGS\" => \"-Wl,-rpath,#{install_dir}/lib -L#{install_dir}/lib\",\n \"CFLAGS\" => \"-I#{install_dir}/include -O2\",\n }\n\n return env.merge(compiler_flags).\n merge(\"CXXFLAGS\" => compiler_flags[\"CFLAGS\"]).\n merge(\"CPPFLAGS\" => compiler_flags[\"CFLAGS\"])\nend", "def include_win?\n RUBY_PLATFORM.include?('mswin') or RUBY_PLATFORM.include?('mingw')\nend", "def build_source(source, target, echo, custom)\n verbose(VERBOSE)\n flags = \"\"\n cc = \"\"\n own_include = \"\"\n\n if LANGUAGE == \"c\" then\n flags = CFLAGS\n cc = CC\n else\n flags = CXXFLAGS\n cc = CXX\n end\n\n path = File.dirname(target)\n if not File.exists? path then\n cmd \"mkdir -p #{path}\"\n end\n\n if echo\n if ENV['silent'] != \"true\" and DRYRUN != true then\n msg \"src #{C_NORMAL}#{source}\"\n end\n end\n\n if defined? ADD_OWN_INCLUDE and ADD_OWN_INCLUDE == true then\n own_include = \"-Iinclude\"\n end\n\n cc_command = \"#{COMPILER} -c #{flags.join(\" \")} #{custom} #{DEFINE.map {|d| \"-D\" + d}.join(\" \")} #{own_include} #{INCLUDE.map {|i| \"-I\" + corto_replace(i)}.join(\" \")} #{source} -o #{target}\"\n begin\n cmd cc_command\n rescue\n STDERR.puts \"\\033[1;31mcorto:\\033[0;49mcorto: command failed: #{cc_command}\"\n abort\n end\nend", "def windows_init\n # FIXME: hardcoded to VS10 on C: drive, should pull the information from vcvars.bat\n ENV['PATH'] = 'C:\\Program Files\\Microsoft Visual Studio 10.0\\Common7\\IDE\\;C:\\Program Files\\Microsoft Visual Studio 10.0\\VC\\BIN;C:\\Program Files\\Microsoft Visual Studio 10.0\\Common7\\Tools' + ENV['PATH']\n ENV['INCLUDE'] = 'INCLUDE=C:\\Program Files\\Microsoft Visual Studio 10.0\\VC\\INCLUDE;C:\\Program Files\\Microsoft SDKs\\Windows\\v7.0A\\include;'\n ENV['LIB'] = 'C:\\Program Files\\Microsoft Visual Studio 10.0\\VC\\LIB;C:\\Program Files\\Microsoft SDKs\\Windows\\v7.0A\\lib;'\n ENV['LIBPATH'] = 'C:\\WINDOWS\\Microsoft.NET\\Framework\\v4.0.30319;C:\\WINDOWS\\Microsoft.NET\\Framework\\v3.5;C:\\Program Files\\Microsoft Visual Studio 10.0\\VC\\LIB;'\n ENV['VCINSTALLDIR'] = \"C:\\Program Files\\\\Microsoft Visual Studio 10.0\\\\VC\\\\\"\n ENV['VS100COMNTOOLS'] = \"C:\\\\Program Files\\\\Microsoft Visual Studio 10.0\\\\Common7\\\\Tools\\\\\"\n ENV['VSINSTALLDIR'] = \"C:\\\\Program Files\\\\Microsoft Visual Studio 10.0\\\\\"\n ENV['WindowsSdkDir'] = \"C:\\\\Program Files\\\\Microsoft SDKs\\\\Windows\\\\v7.0A\\\\\"\n end", "def make_command_line(ary)\n case RUBY_PLATFORM\n when /mswin32|bccwin32/\n make_command_line_windows(ary)\n else\n make_command_line_unix(ary)\n end\n end", "def compile_project\n debug_msg \" compiling #{@project_name}\"\n system(HHC_PATH, @project_name)\n end", "def mkcompiler source=''\n VishCompiler.new source\nend", "def compile_bin(cpu = host_cpu)\n platform = target_platform(cpu)\n native_dir = File.expand_path('../aws-crt-ffi', File.dirname(__FILE__))\n tmp_dir = File.expand_path(\"../tmp/#{platform.cpu}\", File.dirname(__FILE__))\n tmp_build_dir = File.expand_path('build', tmp_dir)\n\n # We need cmake to \"install\" aws-crt-ffi so that the binaries end up in a\n # predictable location. But cmake still adds subdirectories we don't want,\n # so we'll \"install\" under tmp, and manually copy to bin/ after that.\n tmp_install_dir = File.expand_path('install', tmp_dir)\n\n build_type = 'RelWithDebInfo'\n\n config_cmd = [\n CMAKE,\n \"-H#{native_dir}\",\n \"-B#{tmp_build_dir}\",\n \"-DCMAKE_INSTALL_PREFIX=#{tmp_install_dir}\",\n \"-DCMAKE_BUILD_TYPE=#{build_type}\",\n '-DBUILD_TESTING=OFF',\n ]\n\n # macOS can cross-compile for arm64 or x86_64.\n # This lets us prepare both types of gems from either type of machine.\n if platform.os == 'darwin'\n config_cmd.append(\"-DCMAKE_OSX_ARCHITECTURES=#{platform.cpu}\")\n end\n\n build_cmd = [\n CMAKE,\n '--build', tmp_build_dir,\n '--target', 'install',\n '--config', build_type,\n ]\n\n # Build using all processors\n if cmake_has_parallel_flag?\n build_cmd.append('--parallel')\n build_cmd.append(Etc.nprocessors.to_s)\n end\n\n run_cmd(config_cmd)\n run_cmd(build_cmd)\n\n # Move file to bin/, instead of where cmake installed it under tmp/\n bin_dir = crt_bin_dir(platform)\n FileUtils.mkdir_p(bin_dir)\n bin_name = crt_bin_name(platform)\n search_dirs = [\n 'bin', # windows\n 'lib64', # some 64bit unix variants\n 'lib', # some unix variants\n ]\n tmp_path = find_file(bin_name, search_dirs, tmp_install_dir)\n FileUtils.cp(tmp_path, bin_dir, verbose: true)\nend", "def isWindows()\r\n # See: http://stackoverflow.com/questions/4871309\r\n require 'rbconfig'\r\n return (RbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/)\r\nend", "def initialize(name)\n init(name)\n yield self if block_given?\n define unless name.nil?\n puts \"Windows users require sed and perl for rake:compile - please install onto PATH\" if /mswin|mingw/ =~ RUBY_PLATFORM\n end", "def windows_nix?\n (/cygwin|mingw|bccwin/ =~ ruby_platform) != nil\n end", "def basic_compiler\n @basic_compiler ||= indirection.terminus(config['basic_compiler'])\n end", "def compiler?(verify = false, writeable_directory = false)\n if (session.methods.include? :cache) && session.cache.exists?(\"compiler?\")\n return session.cache.read(\"compiler?\")\n end\n writeable_directory = '/var/tmp/' unless writeable_directory == true\n compilers = ['gcc', 'cc', 'tcc', 'pcc']\n compilers.each do |tool|\n next unless installed?(tool)\n if verify == true\n\n tmp_file = writeable_directory.to_s + ::Rex::Text.rand_text_alpha(12)\n tmp_file_c = tmp_file + '.c'\n match_string = ::Rex::Text.rand_text_alpha(32)\n test_c = \"#include <stdio.h>\\nint main(void){printf(\\\"#{match_string} %c\\\",0x0a);return 0;}\"\n test_c.each_line do |line|\n cmd_exec(\"echo '#{line}'>>#{tmp_file_c}\")\n end\n compile = \"#{tool} #{tmp_file_c} -o #{tmp_file}\"\n cmd_exec(compile)\n out = cmd_exec(tmp_file)\n rm tmp_file\n rm tmp_file_c\n if out.match(match_string)\n session.cache.add(\"compiler?\", tool) if session.methods.include? :cache\n return tool\n end\n else\n session.cache.add(\"compiler?\", tool) if session.methods.include? :cache\n return tool\n end\n end\n session.cache.add(\"compiler?\", false) if session.methods.include? :cache\n false\n end", "def target\n @target ||= \"i386-mingw32\"\n end", "def windows?\n ::RUBY_PLATFORM =~ /mingw|mswin/\n end", "def compile\n return if changed_ruby_files.empty?\n\n errors = changed_ruby_files.each_with_object([]) do |file, es|\n output = `ruby -cw \"#{file}\" 2>&1`\n next if output == \"Syntax OK\\n\"\n\n es << output\n end\n heading('Ruby Warnings', errors.join) unless errors.empty?\n end", "def test_should_be_syntax_error\n \n dir = \"./test_case/syntax_error.rb\"\n filename = dir\n \n result = check_and_compile_file( dir , filename )\n \n assert_equal(false, result)\n \n end", "def runCompiler(_override_type = nil)\n runOpen3StuckPrevention('make', '-j', $compileThreads.to_s) == 0\n end", "def javac(*args)\n Buildr.application.deprecated 'Use Java::Commands.javac instead.'\n Commands.javac(*args)\n end", "def compile(path)\n\tif isExecutable(path) and not $link\n\t\t$keep = true\n\t\treturn path\n\tend\n\t\n\tbinaryPath = File.join($testDir, File.basename($link ? $source[0] : path, File.extname(path)))\n\n\tcompilerOutput = Tempfile.new(\"compiler\")\n\n# FIXME: If $link is true, it will only check the extension of the first argument.\n\tif File.extname($link ? $source[0] : path) == \".c\"\n\t\tsystem \"gcc -O2 -o \\\"#{binaryPath}\\\" #{path} &> #{compilerOutput.path}\"\n\telsif File.extname($link ? $source[0] : path) == \".cpp\"\n\t\tsystem \"g++ -O2 -o \\\"#{binaryPath}\\\" #{path} &> #{compilerOutput.path}\"\n\telse\n\t\t$stderr.puts \"This program only works with C or C++ source code.\"\n\t\texit 1\n\tend\n\n\tcompilerMessages = compilerOutput.read\n\n\tunless compilerMessages.empty?\n\t\tunless File.exists?(binaryPath)\n\t\t\t$stderr.puts red(\"Couldn't compile #{path}.\")\n\t\tend\n\t\t$stderr.puts yellow(\"Compiler output for #{path}:\")\n\t\t$stderr.puts compilerMessages\n\tend\n\n\tcompilerOutput.close\n\tcompilerOutput.unlink\n\n\tunless File.exists?(binaryPath)\n\t\tcleanup\n\t\texit 1\n\tend\n\n\treturn binaryPath\nend", "def windows?\n\tRbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/\nend", "def command\n log.debug self.pretty_inspect\n\n throw 'Invalid linker' unless @ld.is_a?(Linker)\n throw 'One or more source files are required' unless @sources.length > 0\n# cflags = default_flags\n# cflags.concat @flags\n# end\n# throw cflags\n\n# topdir = h[:topdir] || ''\n# ld = @ld\n# ldadd = h[:ldadd]\n# ld.flags = h[:ldflags]\n# ld.output = Platform.pathspec(h[:output])\n# ld.rpath = h[:rpath] if h[:rpath].length > 0\n\n# inputs = h[:sources]\n# inputs = [ inputs ] if inputs.is_a? String\n# inputs = inputs.map { |x| Platform.pathspec(topdir + x) }\n# throw 'One or more sources are required' unless inputs.count\n\n#TODO:if @combine\n# return [ @path, cflags, '-combine', ldflags, inputs, ldadd ].flatten.join(' ')\n#\n \n cmd = [ @path, '-DHAVE_CONFIG_H', '-I.', @platform_cflags, flags, '-c', @sources ].flatten.join(' ')\n\n cmd += Platform.dev_null if @quiet\n\n log.debug \"Compiler command: #{cmd}\"\n\n cmd\n end", "def compile_ruby\n if $DEBUG\n puts \"user_home: #{user_home}\"\n puts \"user_local: #{user_local}\"\n puts \"make_command: #{make_command}\"\n puts \"version: #{version}\"\n puts \"ruby_cc_version: #{ruby_cc_version}\"\n puts \"ruby_source: #{ruby_source}\"\n puts \"srcdir: #{srcdir}\"\n puts \"blddir: #{blddir}\"\n puts \"libdir: #{libdir}\"\n end\n\n mingw32\n environment\n download_source\n extract_source\n makefile_in_bak # create Makefile.in.bak\n makefile_in # create Makefile.in\n configure # create Makefile\n make # creates ruby.exe\n make_install\n update_config\n end", "def compile_to_ruby\n \"\"\n end", "def compile(script, filename=nil, linenum=nil)\n filename ||= 'none'\n linenum ||= 1\n native_compile(script, filename, linenum)\n end", "def do_compile_command(name, file)\n output, result = compile_file(name)\n output = clean_debug(output)\n if (output != \"\" || result != 0)\n puts \"----------------------------------------------------------------------\"\n puts \"BUG: %s failed to compile\" % name\n puts \"----------------------------------------------------------------------\"\n puts output\n puts \"----------------------------------------------------------------------\"\n exit 1\n end\nend", "def is_mingw?\n @path =~ /mingw/ # Kludge\n end", "def tidy_command\n is_windows = (RbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/)\n bin = is_windows ? 'tidy.exe' : 'tidy'\n cmd = \"#{bin} #{@tidy_flags.join(' ')}\"\n cmd\n end", "def make_compile_rule(src)\n sha = Digest::SHA1.hexdigest(src)\n fn = File.basename(src, '.c')\n dest = \"build/#{sha}_#{fn}.o\"\n SOURCE_LOOKUP[src] = dest\n\n CompileTask.define_task(dest => [src]) do |t|\n sh \"#{TOOLCHAIN}gcc #{t.cflags.join(' ')} -c #{t.prerequisites.first} -o #{t.name}\"\n end\nend", "def make_deps t\n sh \"gcc -MM -MF #{t.name} #{$C_FLAGS.join ' '} -c #{t.source}\"\nend", "def cflags(cc_path)\n res = []\n if cc_path =~ /clang/\n res.push '-gcc-toolchain', @ndk_path + '/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86'\n res.push '-isystem', @ndk_path + '/toolchains/llvm-3.1/prebuilt/linux-x86/lib/clang/3.1/include'\n res.push %w{ -ffunction-sections -funwind-tables -fstack-protector -D__ARM_ARCH_5__ -D__ARM_ARCH_5T__ -D__ARM_ARCH_5E__ -D__ARM_ARCH_5TE__ -target armv7-none-linux-androideabi -march=armv7-a -mfloat-abi=softfp -mfpu=vfpv3-d16 -mthumb -Os -fomit-frame-pointer -fno-strict-aliasing -I. -DANDROID -fblocks -D_GNU_SOURCE -D__BLOCKS__ -Wa,--noexecstack -O0 -g }\n res.push \"-I#{@ndk_path}/platforms/android-14/arch-arm/usr/include\"\n end\n res.flatten\n end", "def build_projects_for_dir(path)\n puts\n puts \"Building Projects for Directory: #{path}\"\n \n Dir.foreach path do |filename|\n\n if $mac && filename.match(/.*\\.xcodeproj/)\n puts \" Building #{filename}\" \n result = `cd #{path}; xcodebuild -project #{filename} 2>&1`\n if result.match(/\\*\\* BUILD SUCCEEDED \\*\\*/)\n puts \" (success)\"\n else\n puts \" (FAIL) ************************************\"\n #puts result\n end\n\n elsif $win && filename.match(/.*\\.vcxproj/) && !filename.match(/.*\\.vcxproj\\..*/)\n toolset = \"\"\n toolset = \"/p:PlatformToolset=v110\" if $vs2012\n\n puts \" Building #{filename} for Win32\"\n result = `msbuild.exe /target:rebuild /p:Platform=Win32 #{toolset} #{path}/#{filename} 2>&1`\n if result.match(/(0 error|up\\-to\\-date|Build succeeded\\.)/)\n puts \" (success)\"\n else\n puts \" (FAIL) ************************************\"\n #puts result\n end\n\n puts \" Building #{filename} for x64\"\n result = `msbuild.exe /target:rebuild /p:Platform=x64 #{toolset} #{path}/#{filename} 2>&1`\n if result.match(/(0 error|up\\-to\\-date|Build succeeded\\.)/)\n puts \" (success)\"\n else\n puts \" (FAIL) ************************************\"\n #puts result\n end\n\n elsif File.directory?(\"#{path}/#{filename}\") && filename != \".\" && filename != \"..\" && filename != \"build\" && filename != \"sysbuild\" && !filename.match(/.*\\.xcodeproj/)\n build_projects_for_dir(\"#{path}/#{filename}\")\n end\n end\n \n puts\nend", "def compile\n Dir.chdir(build_path) do\n do_compile\n end\n end", "def compile_extension(extension, platform)\n compiler_options = compiler_options()\n compiler_class = compiler_class(extension)\n\n compiler_options[:platform] = platform\n\n compiler = compiler_class.new(extension, compiler_options)\n\n compiler.compile\n end", "def compile_scheme_scripts\n rm_f 'config/TRUST-PRECOMPILED'\n # We need to give a real path here, because \".\" will cause problems for\n # the engine. And it needs to be a Windows path, not a Cygwin path!\n # We need to be in command line mode, not runtime mode, so we will write\n # out our file count for our progress bar.\n run './engine/win32/Halyard', '-c', '(exit-script)', absolute_path(pwd)\n run 'touch', 'config/TRUST-PRECOMPILED'\nend", "def make(args)\n # Maybe we should write an \"uncrustify\" fastlane action?...\n Dir.chdir '..' do\n sh 'make ' + args\n end\nend", "def make(args)\n # Maybe we should write an \"uncrustify\" fastlane action?...\n Dir.chdir '..' do\n sh 'make ' + args\n end\nend", "def compile(script, filename=nil, linenum=nil, global=nil)\n filename ||= 'none'\n linenum ||= 1\n native_compile(script, filename, linenum, global)\n end", "def windows?\n ruby_platform?(:windows)\nend", "def locate_sysnative_cmd(cmd)\n if ::File.exist?(\"#{ENV['WINDIR']}\\\\sysnative\\\\#{cmd}\")\n \"#{ENV['WINDIR']}\\\\sysnative\\\\#{cmd}\"\n elsif ::File.exist?(\"#{ENV['WINDIR']}\\\\system32\\\\#{cmd}\")\n \"#{ENV['WINDIR']}\\\\system32\\\\#{cmd}\"\n else\n cmd\n end\nend", "def ruby_arch\n case Common.target_platform\n when /darwin/\n 'x86_64-darwin10'\n when 'linux-x86_64'\n 'x86_64-linux'\n when 'linux-x86'\n 'i686-linux'\n when /windows/\n 'x64-mingw64'\n end\nend", "def windows_ruby?\n !!(RUBY_PLATFORM =~ /mswin|mingw|windows/)\n end", "def try_to_compile(code, command_options = nil)\n begin\n boolean = false\n stderr_orig = $stderr.dup\n stdout_orig = $stdout.dup\n\n Dir.chdir(Dir.tmpdir) do\n File.write(cpp_source_file, code)\n\n if command_options\n command = \"#{cpp_command} #{command_options} \"\n else\n command = \"#{cpp_command} \"\n end\n\n command += \"#{cpp_out_file} \"\n command += cpp_source_file\n\n $stderr.reopen(IO::NULL)\n $stdout.reopen(IO::NULL)\n boolean = system(command)\n end\n ensure\n FileUtils.rm_f(cpp_source_file)\n FileUtils.rm_f(cpp_out_file)\n $stdout.reopen(stdout_orig)\n $stderr.reopen(stderr_orig)\n end\n\n boolean\n end", "def test_compile_bundle_artifacts_path_with_spaces\n skip(\"only necessary to test on Windows\") unless Gem.win_platform?\n old_tempdir = @tempdir\n old_output_dir = @output_dir\n\n old_tmp = ENV[\"TMP\"]\n old_temp = ENV[\"TEMP\"]\n old_tmpdir = ENV[\"TMPDIR\"]\n\n # We want to make sure Dir.tmpdir returns the path containing \"DIRWIT~1\"\n # so that we're testing whether the compiler expands the path properly. To\n # do this, \"dir with spaces\" must not be the last path component.\n #\n # This is because Dir.tmpdir calls File.expand_path on ENV[TMPDIR] (or\n # ENV[TEMP], etc.). When \"DIRWIT~1\" is the last component,\n # File.expand_path will expand this to \"dir with spaces\". When it's not\n # the last component, it will leave \"DIRWIT~1\" as-is.\n @tempdir = File.join(@tempdir, \"dir with spaces\", \"tmp\")\n FileUtils.mkdir_p(@tempdir)\n @tempdir = File.join(old_tempdir, \"DIRWIT~1\", \"tmp\")\n\n @output_dir = File.join(@tempdir, \"output\")\n FileUtils.mkdir_p(@output_dir)\n\n [\"TMP\", \"TEMP\", \"TMPDIR\"].each { |varname| ENV[varname] = @tempdir }\n\n util_reset_arch\n\n artifact = \"foo.#{RbConfig::CONFIG[\"DLEXT\"]}\"\n\n gem_file = util_bake_gem(\"foo\") { |s|\n util_fake_extension s, \"foo\", util_custom_configure(artifact)\n }\n\n compiler = Gem::Compiler.new(gem_file, :output => @output_dir)\n output_gem = nil\n\n use_ui @ui do\n output_gem = compiler.compile\n end\n\n assert_path_exists File.join(@output_dir, output_gem)\n spec = util_read_spec File.join(@output_dir, output_gem)\n\n assert_includes spec.files, \"lib/#{artifact}\"\n ensure\n if Gem.win_platform?\n FileUtils.rm_rf @tempdir\n\n ENV[\"TMP\"] = old_tmp\n ENV[\"TEMP\"] = old_temp\n ENV[\"TMPDIR\"] = old_tmpdir\n\n @tempdir = old_tempdir\n @output_dir = old_output_dir\n end\n end", "def compile\n end", "def search(compilers)\n res = nil\n if ENV['CC']\n res = ENV['CC']\n else\n compilers.each do |command|\n if (command =~ /^\\// and File.exists?(command)) or Platform.which(command)\n res = command\n break\n end\n end\n end\n\n # FIXME: kludge for Windows, breaks mingw\n if Platform.is_windows?\n res = 'cl.exe'\n end\n\n throw 'No suitable compiler found' if res.nil? || res == ''\n\n if Platform.is_windows? && res.match(/cl.exe/i)\n help = ' /? <NUL'\n else\n help = ' --help'\n end\n \n # Verify the command can be executed\n cmd = res + help + Platform.dev_null\n unless Platform.execute(cmd)\n puts \"not found\"\n print \" -- tried: \" + cmd\n raise\n end\n\n puts res\n res\n end", "def get_filecode()\n \"__EMIT_#{ARGV[0].gsub(/[^\\w]/, \"_\").upcase}__\"\nend", "def create_compile_ext_tasks(source_root, dest_root, invoking_task)\n compiled_ext = \"#{source_root}/#{SPECIAL_BUNDLE_NAMES[File.basename(source_root)] || File.basename(source_root)}.bundle\"\n create_copy_file_tasks(FileList[compiled_ext], source_root, dest_root, invoking_task)\n file compiled_ext => FileList[\"#{source_root}/*.c\"] do\n cd source_root do\n `ruby extconf.rb; make >/dev/null 2>&1`\n end\n end\nend", "def compile!\n puts \"Compiling website..\"\n puts %x[rm -rf output]\n puts %x[nanoc compile]\nend", "def compile!\n puts \"Compiling website..\"\n puts %x[rm -rf output]\n puts %x[nanoc compile]\nend", "def compile!\n puts \"Compiling website..\"\n puts %x[rm -rf output]\n puts %x[nanoc compile]\nend", "def folly_flags()\n return NewArchitectureHelper.folly_compiler_flags\nend", "def get_compile_line(input, output)\n\n\tlibs_path = LIBRARY_PATH.map{ |lib| \"-library-path+='#{File.join(current_path, lib)}'\"}.join(\" \")\n\tsources_path = SOURCE_PATH.map{ |lib| \"-sp+='#{File.join(current_path, lib)}'\"}.join(\" \")\n\n\tline = \"mxmlc #{File.join(current_path(), input)} -o=#{File.join(current_path(), output)} -debug=#{DEBUG} #{libs_path} #{sources_path} #{EXTRA}\"\nend", "def platform\n \"win\"\n end", "def compileSourceOfTarget(target)\n\t\tincstr = @includesString\t\n\t\tsrc_file = targetToSource(target)\n\t\ttarget_ext = File.extname target\n\t\tsource_ext = File.extname src_file\n\t\tartifacts = [target]\n\t\tdebug \"COMP: Target: \\'#{target}\\'\"\n\t\t#debug \"COMP: Source: \\'#{src_file}\\'\"\n\t\t#expected artifacts\n\t\tif (target_ext == '.cmx') \n\t\t\tartifacts.push(changeExt(target, '.o'))\n\t\tend\n\t\tif (source_ext == '.ml')\n\t\t\tunless(File.exist?(changeExt(target, '.mli')))\n\t\t\t\tartifacts.push(changeExt(target, '.cmi'))\n\t\t\tend\n\t\tend\n\n\t\tcase @compiler\n\t\t\twhen @@byteCodeCompiler \n\t\t\t\tcmd = \"ocamlfind ocamlc -g -c \"\n\t\t\t\tcmd += \"-package #{@libsStr} \" unless(@libsStr == \"\") \n\t\t\t\tcmd += \"#{incstr} #{src_file}\"\n\t\t\twhen @@machineCodeCompiler\n\t\t\t\tcmd = \"ocamlfind ocamlopt -c \"\n\t\t\t\tcmd += \"-p \" if (@profilingEnabled)\n\t\t\t\tcmd += \"-package #{@libsStr} \" unless(@libsStr == \"\") \n\t\t\t\tcmd += \"#{incstr} #{src_file}\"\n\t\t\telse \n\t\t\t\tputs \"ERROR couldn't compile to file: \" + target\n\t\tend\n\t\tdebug cmd\n\t\t`#{cmd}`\n\t\tif($? == 0) #the compiler had no error\n\t\t\tartifacts.each do |artfile|\n\t\t\t\t#debug \"compiler ran\"\n\t\t\t\tbuilddest = toBuildDirFile(artfile)\n\t\t\t\t#debug \"\\t builddirtarget: \" + builddest\n\t\t\t\t#debug \"\\t target: \" + artfile \n\t\t\t\t#if(not File.exists?(artfile)) then exit end\n\t\t\t\tunless (artfile == builddest)\n\t\t\t\t\tFile.rename(artfile, builddest)\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\tend", "def identify_compiler_version(executable, version_accuracy: :major)\n raise 'unknown version accuracy' if version_accuracy != :major\n\n _exit_status, output = runOpen3CaptureOutput executable, '--version'\n\n match = output.match(/\\(GCC\\)\\s+(\\d+)\\.(\\d+)\\.(\\d+)/i)\n\n if match\n major = match.captures[0]\n return \"gcc_#{major}\"\n end\n\n match = output.match(/clang\\s+version\\s+(\\d+)\\.(\\d+)\\.(\\d+)/i)\n\n if match\n major = match.captures[0]\n return \"clang_#{major}\"\n end\n\n raise 'could not detect compiler type or version'\nend", "def windows?\n RbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/\nend", "def build filetask,system_config,platform\n cmd_file=command_file(filetask.name,system_config,platform)\n if File.exists?(cmd_file)\n config=system_config.platform_config(platform)\n if cmd_file.end_with?('.library')\n cmdline = librarian(cmd_file,config)\n else\n cmdline = linker(cmd_file,config)\n end\n sh(cmdline.join(' '))\n else\n raise GaudiError, \"Missing command file for #{filetask.name}\"\n end\n end", "def compiled_cython_file?\n return false unless ['.c', '.cpp'].include? extname\n return false unless lines.count > 1\n return lines[0].include?(\"Generated by Cython\")\n end", "def rule\n [ '$(CC)', '-DHAVE_CONFIG_H', '-I.', flags, '$(CFLAGS)', '-c', @sources ].flatten.join(' ')\n end", "def symlink_host_gcc\n version = DevelopmentTools.non_apple_gcc_version \"/usr/bin/gcc\"\n return if version.null?\n suffix = (version < 5) ? version.to_s[/^\\d+\\.\\d+/] : version.to_s[/^\\d+/]\n return if File.executable?(\"/usr/bin/gcc-#{suffix}\") || File.executable?(HOMEBREW_PREFIX/\"bin/gcc-#{suffix}\")\n FileUtils.mkdir_p HOMEBREW_PREFIX/\"bin\"\n [\"gcc\", \"g++\", \"gfortran\"].each do |tool|\n source = \"/usr/bin/#{tool}\"\n dest = HOMEBREW_PREFIX/\"bin/#{tool}-#{suffix}\"\n next if !File.executable?(source) || File.executable?(dest)\n FileUtils.ln_sf source, dest\n end\n end", "def symlink_host_gcc\n version = DevelopmentTools.non_apple_gcc_version \"/usr/bin/gcc\"\n return if version.null?\n suffix = (version < 5) ? version.to_s[/^\\d+\\.\\d+/] : version.to_s[/^\\d+/]\n return if File.executable?(\"/usr/bin/gcc-#{suffix}\") || File.executable?(HOMEBREW_PREFIX/\"bin/gcc-#{suffix}\")\n FileUtils.mkdir_p HOMEBREW_PREFIX/\"bin\"\n [\"gcc\", \"g++\", \"gfortran\"].each do |tool|\n source = \"/usr/bin/#{tool}\"\n dest = HOMEBREW_PREFIX/\"bin/#{tool}-#{suffix}\"\n next if !File.executable?(source) || File.executable?(dest)\n FileUtils.ln_sf source, dest\n end\n end", "def compile(tokens, options = T.unsafe(nil)); end", "def _inter source\n c = compile source\n cifrom c\nend", "def with_standard_compiler_flags(env = {}, opts = {})\n env ||= {}\n opts ||= {}\n compiler_flags =\n case Ohai[\"platform\"]\n when \"aix\"\n {\n \"CC\" => \"xlc_r -q64\",\n \"CXX\" => \"xlC_r -q64\",\n \"CFLAGS\" => \"-q64 -I#{install_dir}/embedded/include -D_LARGE_FILES -O\",\n \"LDFLAGS\" => \"-q64 -L#{install_dir}/embedded/lib -Wl,-blibpath:#{install_dir}/embedded/lib:/usr/lib:/lib\",\n \"LD\" => \"ld -b64\",\n \"OBJECT_MODE\" => \"64\",\n \"ARFLAGS\" => \"-X64 cru\",\n }\n when \"solaris2\"\n {\n \"CC\" => \"gcc -m64 -static-libgcc\",\n \"LDFLAGS\" => \"-Wl,-rpath,#{install_dir}/embedded/lib -L#{install_dir}/embedded/lib -static-libgcc\",\n \"CFLAGS\" => \"-I#{install_dir}/embedded/include -O2\",\n }\n when \"freebsd\"\n {\n \"CC\" => \"clang\",\n \"CXX\" => \"clang++\",\n \"LDFLAGS\" => \"-L#{install_dir}/embedded/lib -Wl,-rpath,#{install_dir}/embedded/lib\",\n \"CFLAGS\" => \"-I#{install_dir}/embedded/include -O3 -D_FORTIFY_SOURCE=2 -fstack-protector\",\n }\n when \"windows\"\n arch_flag = windows_arch_i386? ? \"-m32\" : \"-m64\"\n opt_flag = windows_arch_i386? ? \"-march=i686\" : \"-march=x86-64\"\n {\n \"LDFLAGS\" => \"-L#{install_dir}/embedded/lib #{arch_flag} -fno-lto\",\n # We do not wish to enable SSE even though we target i686 because\n # of a stack alignment issue with some libraries. We have not\n # exactly ascertained the cause but some compiled library/binary\n # violates gcc's assumption that the stack is going to be 16-byte\n # aligned which is just fine as long as one is pushing 32-bit\n # values from general purpose registers but stuff hits the fan as\n # soon as gcc emits aligned SSE xmm register spills which generate\n # GPEs and terminate the application very rudely with very little\n # to debug with.\n \"CFLAGS\" => \"-I#{install_dir}/embedded/include #{arch_flag} -O3 #{opt_flag}\",\n }\n else\n {\n \"LDFLAGS\" => \"-Wl,-rpath,#{install_dir}/embedded/lib -L#{install_dir}/embedded/lib\",\n \"CFLAGS\" => \"-I#{install_dir}/embedded/include -O3 -D_FORTIFY_SOURCE=2 -fstack-protector\",\n }\n end\n\n # merge LD_RUN_PATH into the environment. most unix distros will fall\n # back to this if there is no LDFLAGS passed to the linker that sets\n # the rpath. the LDFLAGS -R or -Wl,-rpath will override this, but in\n # some cases software may drop our LDFLAGS or think it knows better\n # and edit them, and we *really* want the rpath setting and do know\n # better. in that case LD_RUN_PATH will probably survive whatever\n # edits the configure script does\n extra_linker_flags = {\n \"LD_RUN_PATH\" => \"#{install_dir}/embedded/lib\",\n }\n\n if solaris2?\n ld_options = \"-R#{install_dir}/embedded/lib\"\n\n if platform_version.satisfies?(\"<= 5.10\")\n # in order to provide compatibility for earlier versions of libc on solaris 10,\n # we need to specify a mapfile that restricts the version of system libraries\n # used. See http://docs.oracle.com/cd/E23824_01/html/819-0690/chapter5-1.html\n # for more information\n # use the mapfile if it exists, otherwise ignore it\n mapfile_path = File.expand_path(Config.solaris_linker_mapfile, Config.project_root)\n ld_options << \" -M #{mapfile_path}\" if File.exist?(mapfile_path)\n end\n\n # solaris linker can also use LD_OPTIONS, so we throw the kitchen sink against\n # the linker, to find every way to make it use our rpath. This is also required\n # to use the aforementioned mapfile.\n extra_linker_flags[\"LD_OPTIONS\"] = ld_options\n end\n\n env.merge(compiler_flags)\n .merge(extra_linker_flags).\n # always want to favor pkg-config from embedded location to not hose\n # configure scripts which try to be too clever and ignore our explicit\n # CFLAGS and LDFLAGS in favor of pkg-config info\n merge({ \"PKG_CONFIG_PATH\" => \"#{install_dir}/embedded/lib/pkgconfig\" }).\n # Set default values for CXXFLAGS and CPPFLAGS.\n merge(\"CXXFLAGS\" => compiler_flags[\"CFLAGS\"])\n .merge(\"CPPFLAGS\" => compiler_flags[\"CFLAGS\"])\n .merge(\"OMNIBUS_INSTALL_DIR\" => install_dir)\n end", "def build_this bt;\r\n nmake = NMake.new\r\n cmd = '\"' + nmake.path + '\" -f makefile.vc'\r\n Platform.execute cmd, wd: (builddir bt)\r\n end", "def initialize_compiler\n self.compile_tasks = [:multi, :test, :check_manifest]\n end", "def compile\r\n case language\r\n when :boolexp\r\n true\r\n when :fart\r\n @prog = Farts::Parser.new.parse(src)\r\n log.info \"Compile of FART program - #{name}\"\r\n true\r\n else\r\n false\r\n end\r\n rescue Exception\r\n log.error $!\r\n @prog = nil\r\n false\r\n end", "def test_assembles_one_array_argument_into_cmd\n Crd::Flex::Command.new 'mxmlc' do |s|\n s.source_path << 'src'\n s.source_path << 'lib/src'\n cmd = s.to_cmd.split( /\\s+/ )\n assert_equal( 'mxmlc', cmd.shift )\n assert( cmd.include?( '-source-path+=src,lib/src' ), 'Could not find argument in to_cmd' )\n end\n end", "def define_compiler_tasks\n require \"rake/extensiontask\"\n\n @extensions.each do |name|\n clean_globs << \"lib/#{name}/*.{so,bundle,dll}\"\n\n Rake::ExtensionTask.new name, spec do |ext|\n ext.lib_dir = File.join(*[\"lib\", name.to_s, ENV[\"FAT_DIR\"]].compact)\n end\n end\n\n compile_tasks.each do |t|\n task t => :compile\n end\n rescue LoadError\n warn \"Couldn't load rake-compiler. Skipping. Run `rake newb` to fix.\"\n end", "def setupEnv\n # We require that we are ran in the developer shell. Some deps also\n # need the vcvarsall but for that they apply it on a case by case\n # basis, so we just need to check that we are correctly setup for\n # visual studio stuff\n\n # if which(\"MSBuild.exe\") == nil\n\n # warning \"MSBuild not found\"\n # onError %{You need to run this setup in \"Developer Command Prompt for VS 2017\"}\n\n # end\n end", "def compile_native\n\n # Get include directories\n incl_dir_ruby2d = \"#{Ruby2D.gem_dir}/ext/ruby2d/\"\n incl_dir_deps = \"#{Ruby2D.assets}/include/\"\n\n # Add compiler flags for each platform\n case $RUBY2D_PLATFORM\n\n when :macos\n ld_dir = \"#{Ruby2D.assets}/macos/universal/lib\"\n\n c_flags = '-arch arm64 -arch x86_64'\n\n ld_flags = ''\n ['mruby', 'SDL2', 'SDL2_image', 'SDL2_mixer', 'SDL2_ttf',\n 'jpeg', 'jxl', 'avif', 'png', 'tiff', 'webp',\n 'mpg123', 'ogg', 'FLAC', 'vorbis', 'vorbisfile', 'modplug',\n 'freetype', 'harfbuzz', 'graphite2'].each do |name|\n add_ld_flags(ld_flags, name, :archive, ld_dir)\n end\n\n ld_flags << \"-lz -lbz2 -liconv -lstdc++ \"\n ['Cocoa', 'Carbon', 'CoreVideo', 'OpenGL', 'Metal', 'CoreAudio', 'AudioToolbox',\n 'IOKit', 'GameController', 'ForceFeedback', 'CoreHaptics'].each do |name|\n add_ld_flags(ld_flags, name, :framework)\n end\n\n when :linux, :linux_rpi, :bsd\n # TODO: implement this\n # ld_flags = '-lSDL2 -lSDL2_image -lSDL2_mixer -lSDL2_ttf -lm -lGL'\n\n when :windows\n\n if RUBY_PLATFORM =~ /ucrt/\n ld_dir = \"#{Ruby2D.assets}/windows/mingw-w64-ucrt-x86_64/lib\"\n else\n ld_dir = \"#{Ruby2D.assets}/windows/mingw-w64-x86_64/lib\"\n end\n\n ld_flags = '-static -Wl,--start-group '\n ['mruby',\n 'SDL2',\n 'SDL2_image', 'jpeg', 'png', 'tiff', 'webp', 'jxl', 'hwy', 'jbig', 'deflate', 'lzma', 'zstd', 'Lerc',\n 'SDL2_mixer', 'mpg123', 'FLAC', 'vorbis', 'vorbisfile', 'ogg', 'modplug', 'opus', 'opusfile', 'sndfile',\n 'SDL2_ttf', 'freetype', 'harfbuzz', 'graphite2', 'bz2', 'brotlicommon', 'brotlidec',\n 'glew32', 'stdc++', 'z', 'ssp'\n ].each do |name|\n add_ld_flags(ld_flags, name, :archive, ld_dir)\n end\n ld_flags << '-lmingw32 -lopengl32 -lole32 -loleaut32 -limm32 -lversion -lwinmm -lrpcrt4 -mwindows -lsetupapi -ldwrite '\\\n '-lws2_32 -lshlwapi '\n ld_flags << '-Wl,--end-group'\n end\n\n # Compile the app\n run_cmd \"cc #{c_flags} -I#{incl_dir_ruby2d} -I#{incl_dir_deps} build/app.c #{ld_flags} -o build/app\"\n\n create_macos_bundle if $RUBY2D_PLATFORM == :macos\nend", "def compile(script, filename=nil, linenum=nil)\n raise NotImplementedError\n end", "def test_assembles_two_array_arguments_into_cmd\n Crd::Flex::Command.new 'mxmlc' do |s|\n s.source_path << 'src'\n s.source_path << 'lib/src'\n s.library_path << 'lib/bin'\n cmd = s.to_cmd.split( /\\s+/ )\n assert_equal( 'mxmlc', cmd.shift )\n assert( cmd.include?( '-source-path+=src,lib/src' ), 'Could not find argument in to_cmd' )\n assert( cmd.include?( '-library-path+=lib/bin' ), 'Could not find argument in to_cmd' )\n end\n end", "def make_asm(test_files)\n Dir.mkdir(TESTS_BIN) unless File.exists?(TESTS_BIN)\n log = {}\n # in the remote case of porting to windows, this should use Open3 instead of '2>&1'\n test_files.each do |f|\n output = `powerpc-elf-gcc -O0 --specs=archc -S #{TESTS_CODE+'/'+f} -o #{TESTS_BIN+'/'+f[0..-3]+'.s'} 2>&1`\n log[f] = output == '' ? \"No error.\" : output\n end\n return log\nend", "def windows?\n !!(RUBY_PLATFORM =~ /mswin|mingw/)\n end", "def compile\n puts \"Compilando archivos fuentes:\"\n @objs.each do |obj|\n command = \"#{@cc} -fpic -c -o #{obj} #{obj[0..-2] + \"c\"} #{@cflags}\"\n puts \"\\t\"+ command\n exit (0) if not((system(command)))\n end\n \n puts \"Convirtiendo a bibliotecas dinamicas\"\n system(\"mkdir lib\")\n @shared.each do |obj|\n library = obj.split('/').last\n compiled_libraries = `ls #{@lib_dir}`.split(\" \")\n \n libs = compiled_libraries.inject(\"\") {\n |string,lib|\n string += \"-l#{lib[3...-3]} \"\n }\n \n if (@so == \"linux\")\n command = \"#{@cc} -shared -o lib/lib#{library} #{obj[0..-3] + \"o\"}\" +\n \" -L#{@lib_dir} #{libs}\"\n elsif (@so == \"mac\")\n command = \"#{@cc} -shared -o lib/lib#{library} #{obj[0..-3] + \"o\"}\" +\n \" -L#{@lib_dir} #{libs}\" \n end\n puts \"\\t\" + command\n puts \"No compilo de forma correcta\" if not((system(command)))\n end\nend", "def compiledo\n\n end", "def build_env_bat\n connector_dir = File.join(@mysql_dir, 'lib')\n env = {mysql_lib: to_windows_path(connector_dir)}\n\n print 'ruby.exe location: '; env[:ruby_bin] = read_path\n print '7z.exe location: '; env[:zip_bin] = read_path\n print 'wincmp3.exe location: '; env[:compare_bin] = read_path\n puts\n\n FileUtils.cd(FRAMEWORK_ROOT) { File.write('env.bat', ENV_BAT_TEMPLATE % env) }\n end", "def gcc?\n linux? && present?('g++')\n end" ]
[ "0.63968444", "0.61771774", "0.5931661", "0.5886322", "0.581332", "0.577381", "0.5728605", "0.56815547", "0.565777", "0.5630128", "0.56062436", "0.5596375", "0.5581845", "0.5578365", "0.5558083", "0.5547668", "0.5503448", "0.54798096", "0.5472407", "0.54627705", "0.5406443", "0.53834856", "0.53483784", "0.53367704", "0.53339666", "0.5330942", "0.53267646", "0.53100884", "0.5233261", "0.52284193", "0.5223838", "0.52066004", "0.5197429", "0.51931894", "0.5190574", "0.51861644", "0.51797706", "0.51583236", "0.51395196", "0.5129348", "0.5128069", "0.512716", "0.5125656", "0.5124717", "0.51200026", "0.51071894", "0.5102464", "0.50990695", "0.5095099", "0.509351", "0.5087001", "0.50729495", "0.5070503", "0.50695765", "0.50683224", "0.50566393", "0.50398487", "0.50398487", "0.5039219", "0.50329065", "0.5022895", "0.50187993", "0.5018369", "0.50171137", "0.50103647", "0.5008273", "0.50075626", "0.5000959", "0.4994897", "0.49924007", "0.49924007", "0.49924007", "0.49808857", "0.4976306", "0.49739406", "0.49708274", "0.49521938", "0.49505132", "0.49463338", "0.49458805", "0.49446082", "0.49431422", "0.49431422", "0.49428654", "0.49395192", "0.4936356", "0.49287722", "0.48985735", "0.48966947", "0.48964107", "0.48957708", "0.48917028", "0.488954", "0.48859918", "0.48850995", "0.48820513", "0.48812395", "0.48798504", "0.4876473", "0.48743927", "0.4869121" ]
0.0
-1
Method to add an item to a list input: list, item name, and optional quantity steps: take a string, take an item from another input, [nil] output:list with new item added, and quantity
def add_item(grocery_list, item, quantity=1) grocery_list[item]= quantity end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_item(list, item, quantity=1)\r\n# input: item name and optional quantity\r\n# steps: \r\n # Use shopping list as input\r\n # Use the item to be added as 2nd input\r\n # Use the item quantity as a 3rd input (look up whether optional input is possible)\r\n # Add the item and quantity to the shopping list\r\n list[item] = quantity\r\n# output: shopping list with the added item and quantity\r\n printlist(list)\r\nend", "def add_item(list, item_name, quantity = 1)\n\tlist[item_name] = quantity\n\tlist\nend", "def add_item(list, item_name, quantity = 1)\n\tlist[item_name] = quantity\n\tlist\nend", "def add_item(list, item_name, quantity = 1)\n\tlist[item_name] = quantity\n\tlist\nend", "def add_item(list, item_name, quantity)\n\tlist[item_name] = quantity\n\tlist\nend", "def add_item(list, item_name, quantity)\n\tlist[item_name] = quantity\n\tlist\nend", "def add_item(list, item, quantity = 1)\r\n# input: list, item name, and optional quantity\r\n# steps: \r\n # check for optional quantity. if not present, set to default (1)\r\n list[item] = quantity\r\n # update list with input\r\n puts \"#{item.upcase} has been added to your grocery list!\"\r\n p list\r\n# output: complete list, updated\r\nend", "def add(list, item_name, quantity)\n\tlist[item_name] = quantity\n\tlist\nend", "def add_item(list, add_item, add_quantity=1)\n# steps:\n # Add item as key and quantity as value to final_list hash(item and quantity will be user inputs)\n list[add_item] = add_quantity\n # Return updated hash\n list\nend", "def add_item(list_name, item, quantity=1)\r\n# input: list, item name, and optional quantity\r\n# steps: add item and its quantity to the hash\r\n list_name.store(item, quantity)\r\n# output: hash with new item\r\n p list_name\r\nend", "def add_to_list(list,item,quantity)\n\tupdate_item(list,item,quantity)\nend", "def add_item(list, item, quantity=0)\n\tlist[item] = quantity\n\tlist\nend", "def add_item(item, quantity, list)\n\tlist[item] = quantity\n\titem\nend", "def add_items(list, item_name, quantity=0)\r\n\tlist[item_name] = quantity\r\n\tlist\r\nend", "def additem(list, item, number = 1)\n# input: hash, item name and optional quantity\n# steps: push an item into the hash\n list.store(item, number)\nend", "def add_new_items(list, item_name, quantity=1)\n list[item_name] = quantity\n list\nend", "def add_item(list, name, quantity = 1) \r\n# input: item name and optional quantity\r\n# steps: \r\n# create add method with name and optional quantity arguments\r\n# add name and quantity to hash\r\n list[name] = quantity\r\n# output: print \"your item has been added to the hash\"\r\n return list\r\nend", "def add_item(item, quantity, list)\n\t# steps: \n\t# if the item is already in the list\n\tif list[item.to_sym] \n\t\t# add to the quantity\n\t\t#list[item.to_sym] = list[item.to_sym] + quantity\n\t\tlist[item.to_sym] += quantity\n\t# otherwise\n\telse\n\t\t# make a new key with the input quantity\n\t\tlist[item.to_sym] = quantity\n\tend\n\t# output:\n\t\t# updated list\n\tlist\nend", "def update_list(item_name, item_list, quantity)\n add_list(item_name, item_list, quantity)\nend", "def add_item(list, item_name, quantity=1)\n list[item_name] = quantity\nend", "def add_item(list, item_name_string, quantity=1)\r\n\titem_key = item_name_string.to_sym\r\n\tlist[item_key] = quantity\r\n\treturn list\r\nend", "def add_list(item_name, item_list, quantity = 1)\n item_list[item_name] = quantity\nend", "def add_item(list, item, quantity)\n\n\tlist[item] = quantity\n\tlist\n\nend", "def add_item(new_list, item_name, quantity=1)\r\n \r\n new_list[item_name] = quantity\r\n \r\nend", "def add_item (list, item, quantity = 1)\n\tlist[item] = quantity\n\treturn list\nend", "def add_item_to_list(list_name,item_to_add,quantity_to_add = 1)\n#create a hash element with the item name and the quantity, if specified\n#if no quantity is specified, default value = 1\n new_item = { \n item_name: item_to_add,\n quantity: quantity_to_add\n }\n# insert the hash into array \n list_name.push(new_item)\n new_item\nend", "def add(input_list, item, quantity)\n input_list[item] = quantity\nend", "def add_item(list, item, qty=nil, print=true)\n\t# input: \n\t# existing list, \n\t# item name (string), \n\t# quantity qty (int or string or nil(default)), \n\t# print? (t(default)/f)\n\t# output: updated list (array)\n\t\n\t# format list item \n\tif qty\n\t\tqty = qty.to_s\n\tend\n\titem = [item.strip.capitalize, qty]\n\t# append to list\n\tlist = list.push(item)\n\t# print list if print=true\n\tif print\n\t\tputs \"\\n\\nList updated: added item '#{item}'\\n\"\n\t\tprint_list(list)\n\tend\n\treturn list\nend", "def add_item(list, item_name, quantity)\n list[item_name.to_sym] = quantity\n list \nend", "def add_item(list, item_name, quantity = 1)\r\n list[item_name] = quantity\r\nend", "def add_item(list, new_item, quantity=1)\n list[new_item] = quantity \nend", "def add_item(list,item_name, qty)\n list[item_name] = qty\nend", "def add_item(my_list, item, quantity)\r\n #input: a string seperated by an integer \r\n #steps: use a method that adds an item and quantity to the hash.\r\n my_list.store(item, quantity)\r\n # use a hash or store method hash.merge!(key => value) \r\n #output = hash with the updated item/quantity. \r\n my_list\r\n \r\nend", "def add_item(existing_list, new_item, item_integer)\n\t# input: item name and optional quantity\n\t# steps: pass in item name as key and assign value\n\texisting_list[new_item] = item_integer\n\t# output: hash\n\tp existing_list\nend", "def add_item(current_list, item_added, quantity)\n current_list[item_added] = quantity\n current_list\nend", "def add_item(list, item, quantity=0)\n list[item] = quantity\n list\nend", "def add_item(list, item, quantity)\n\tlist[item] = quantity\n\tp list\nend", "def add_item(item,amount=1,the_lists)\r\n\t# steps: accept 2 arguments. Item name and Quantity (with default)\r\n\tif the_lists[item] != nil\r\n\t puts \"Item is already on the List !\"\r\n\t puts \"Added amount to the quantity\"\r\n\t the_lists[item] += amount\r\n\telse\r\n the_lists[item] = amount\r\n\tend\r\n\r\n\tprint_list(the_lists)\r\nend", "def add_to_list(list, item, quantity = 1)\n\tlist[item] = quantity\nend", "def list_add(list, item_name, quantity=1)\n list[item_name] = quantity\n p list\nend", "def add_item(item,quantity,list)\n\tlist[item] = quantity\nend", "def add_item(list, new_item, qty=1)\n list[new_item] = qty\nend", "def update_item(list, item, quantity)\r\n add_item(list, item, quantity)\r\n# input: Shopping list, item to be updated, new quantity\r\n# steps:\r\n # Use shopping list as input\r\n # Use item to be updated as input\r\n # Use new quantity to be updated as input\r\n # Update the quantity of the item on the list\r\n # list[item] = quantity\r\n# output: shopping list with updated quantity\r\n # printlist(list)\r\nend", "def list_adder(list, item_name, quantity=1)\n\tlist.store(item_name, quantity)\n\tp list\nend", "def add_item(list,name,quantity=1)\n list[name]=quantity\n return list\nend", "def add_list(list,new_item,quantity=1)\n list[new_item] = quantity\n list\nend", "def add_item(list, item, quantity = 1)\n list[item] = quantity\n list\nend", "def add_item(list, item, quantity)\n list[item] = quantity\n list\nend", "def update_quantity_of_item(list,item,quantity)\r\n add_item_to_list(list,item,quantity)\r\n list\r\nend", "def add_item(item, list, quantity)\n list[item] = quantity\nend", "def add_item(list, item, quantity=1)\n list[item] = quantity\nend", "def add_item(list,item,quantity=1)\r\n list[item] = quantity\r\n list\r\nend", "def add_to_list(list, item, quantity)\n #method to add items\n list[item] = quantity\nend", "def update_quantity(item, list, quantity)\n add_item(item, list, quantity)\nend", "def add_item(list, item_name, quantity = 1)\n if list.include?(item_name)\n list[item_name] += quantity\n else\n list[item_name] = quantity\n end\n list\nend", "def add_item(item_list, item, qty)\r\n item_list[item] = qty\r\n item_list\r\nend", "def list_add(hash_items, item_name, quantity = 1)\n hash_items [item_name] = quantity\n return hash_items\nend", "def add(list, item, quantity)\r\n\tlist[item] = quantity\r\n\tlist\r\nend", "def add_item(list,item,quantity=1)\n list[item] = quantity\nend", "def add_item(list,item,quantity=1)\n list[item] = quantity\nend", "def add_item(list, item_name, optional_quantity)\n\nlist[item_name] = optional_quantity\n\n# input: list, item name, and optional quantity\n# steps:\n # Add new hash key based on item name\n # Tell method there may or may not be a argument passed in for quantity\n # assign quantity to 'optional-quantity' argument in method definition\n # Access method that contains initialized hash def add_item(create_list, item_name, quantity = 3)\n# output:\n # updated grocery list\nend", "def add_item_to_list(grocery_list, grocery_item, quantity = 1)\n grocery_list.store(grocery_item,quantity)\n puts \"I've added #{grocery_item} to the list. Please pick up #{quantity} of these.\"\nend", "def add_item!(list, item, qty=1)\r\n list[item] = qty\r\n list\r\nend", "def add_item(list, item, quantity=1)\r\n\tlist[item] = quantity\r\n#\tp list\r\nend", "def add_item(list, item, qty)\n list[item] = qty\n list\nend", "def update(list, item, qty)\n add_item(list, item, qty)\nend", "def add_item(name, quantity, list)\n list[name] = quantity\n p list\n return list\nend", "def add_item(list, item, qty=1)\n list[item] = qty\n p \"#{item} : #{list[item]}\"\nend", "def add_item(list, item, quantity)\n list[item] = quantity\n return list\nend", "def add_item(list, item, quantity=1)\n list[item] = quantity\n p list\nend", "def add_item(list, item, quantity)\n list[item] = quantity\nend", "def add_item(list, item, quantity)\n list[item] = quantity\nend", "def update_quantity(list, item, quantity)\n\tadd_to_list(list, item, quantity)\nend", "def add (list, item, quantity)\n\tlist[item] = quantity\nend", "def input (list, item, quantity)\n\tlist[item] = quantity\nend", "def add_item(input_item, list, input_number=5)\n list[input_item] = input_number\n return list\nend", "def add_item(shopping_list, item, quantity=1)\r\n\r\n\tshopping_list[item] = quantity\r\n#\treturn shopping_list\r\nend", "def add_item(list, item, qty = 1)\n list[item] = qty\n\n list\nend", "def add_item(list, item, qty = 1)\n list[item] = qty\n return list\nend", "def list_add(list, item_name, quantity=1)\r\n list[item_name] = quantity\r\n p list\r\nend", "def add_item(list,name,value)\n list = List.find(list)\n list.add(Item.new(name,value))\n say_set list.name, name, value\n end", "def add_item(list, item, quantity)\n #list = item.push\n list[item] = quantity.to_i\n list\nend", "def adding_item(list,item,quantity = 1)\r\n# if quantity == nil\r\n# quantity = 1\r\n# end\r\n list[item] = quantity\r\n list\r\nend", "def update(item, quantity, list)\n\t# steps: if the item is in the list\n\tif list.include? item.to_sym\n\t\t# update the quantity\n\t\tlist[item.to_sym] = quantity\n\telse \n\t\tadd_item(item, quantity, list)\n\tend\n\t# output: return the updated list\n\tlist\nend", "def add_item(item,quantity,first_list)\n if quantity == \"\"\n quantity = 1\n else\n quantity\n end\n first_list[item] = quantity\n\nend", "def add(list, item, quantity)\r\n# input: item name and optional quantity\r\nlist.include?(item) ? list[item] += quantity : list[item] = quantity\r\np list\r\nend", "def add_item(item, quant=0)\n\t$grocery_list.store(item, quant)\n\tp \"You added #{item} to your list.\"\nend", "def add_item_or_update_quantity(hash_of_list, item_name, item_quantity = 1)\n hash_of_list[item_name] = item_quantity\n hash_of_list\nend", "def add_or_update_item(list, item, quantity=1)\n\tlist[item] = quantity\nend", "def add_item(list, item_name, quantity = 1)\n # steps:\n # check IF item is included in list\n # increment item by quantity\n # ELSE - set item as new key and assign it to quantity as value\n list.include?(item_name) ? list[item_name] += quantity : list[item_name] = quantity\n\n # output: return the updated hash\n list\nend", "def add_item(item_name, grocery_list, quantity=1)\n grocery_list[item_name] = quantity\n grocery_list\n end", "def add_item(grocery_list, item_name, quantity=1)\n\tgrocery_list[item_name] = quantity\n\treturn grocery_list\nend", "def add_item(item)\n\t\t#takes in item and adds to list\n\t\t@list << item\n\tend", "def add_item(list_items, item_name, item_qty)\n if list_items.include?(item_name)\n list_items[item_name] += item_qty\n else\n list_items[item_name] = item_qty\n end\nend", "def add_item (list, item, qty)\n list[item]=qty\nend", "def add_item(list, item, quantity = 1)\r\n list[item] = quantity\r\nend", "def add_to_list(item,quantity,list)\n list[item]=quantity\nend", "def add_item(list, item, quantity)\n list[item] = quantity\n p list\nend", "def add_item(grocery,item_name,quantity)\n # input: list, item name, and optional quantity\n # steps: insert item_name as a key and quantity as a value to the hash \n grocery[item_name] = quantity\n # output:display the latest list\n display_list(grocery)\nend", "def update_quantity(list, item_name, quantity)\n\tlist[item_name] = quantity\n\tlist\nend", "def update_quantity(list, item_name, quantity)\n\tlist[item_name] = quantity\n\tlist\nend" ]
[ "0.8416545", "0.82361287", "0.8235973", "0.8235973", "0.8181764", "0.8181764", "0.816011", "0.80380267", "0.79817057", "0.79689276", "0.79129446", "0.7896436", "0.7882986", "0.78582394", "0.78233325", "0.78039056", "0.78034896", "0.7787218", "0.77802134", "0.7777428", "0.7770904", "0.77372694", "0.7700453", "0.76558495", "0.7632041", "0.7629177", "0.762168", "0.76086205", "0.7588133", "0.75775623", "0.7575464", "0.7552176", "0.75401396", "0.75363505", "0.75354993", "0.7532587", "0.75134724", "0.7494735", "0.74902296", "0.7483883", "0.74807245", "0.7478379", "0.7445717", "0.7435907", "0.7424048", "0.741996", "0.74107057", "0.7388286", "0.7382994", "0.73759943", "0.7374047", "0.7369284", "0.73626435", "0.7358154", "0.7342273", "0.733886", "0.73385954", "0.73338026", "0.73335075", "0.73335075", "0.7331562", "0.73271024", "0.7313674", "0.73084337", "0.72996736", "0.7293659", "0.72929627", "0.72910345", "0.7290106", "0.72793627", "0.72789323", "0.72789323", "0.72787774", "0.7277372", "0.7275123", "0.727222", "0.72612256", "0.725767", "0.72476983", "0.72398335", "0.7228409", "0.7226484", "0.721152", "0.72107536", "0.72020346", "0.72002584", "0.71992695", "0.71989197", "0.7180408", "0.7167732", "0.7165112", "0.7162385", "0.716097", "0.7142372", "0.7132666", "0.71312493", "0.71297103", "0.7128456", "0.7125174", "0.711424", "0.711424" ]
0.0
-1
Method to remove an item from the list input: list, string"key", quantity"value" steps: find key/value and delete it output: p the hash without the deleted key
def remove_item(grocery_list, item) grocery_list.delete(item) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_item(input_hash, item)\n# input: list, item name, and optional quantity\n# steps: use input item to delete key\n input_hash.delete(item)\n# output: hash data structure of key/value pairs\nreturn input_hash\nend", "def remove_item(my_list, item)\r\n# input: an item (something already in the list)\r\n# steps:\r\n my_list.delete(item)\r\n \r\n my_list\r\n# declare hash\r\n# delete method for item\r\n# output: hash with removed item\r\nend", "def remove_item(item_to_remove)\n $list_hash.delete_if{|current_item, quantity|item_to_remove==current_item} #goes through entire item in delete if hash if item in hash delete from hash\nend", "def remove_item(list_name, item)\r\n# input: list, item name\r\n# steps: delete item name and value from hash\r\n list_name.delete(item)\r\n# output: updated hash with item removed\r\np list_name\r\nend", "def remove_item(hash_list, item)\n hash_list.delete(item)\n p hash_list\nend", "def delitem(list, item)\n# input: list and key\n list.delete(item)\n# steps: delete a given key item\nend", "def remove_item(item,the_lists)\r\n\t\t# steps: search the hash for the key that match the item name\r\n the_lists.delete_if{|item_name,item_quantity| item_name == item}\r\n\t\t#then delete the element\r\n\r\n\t\tprint_list(the_lists)\r\nend", "def remove_item(list, rm_item)\n# steps:\n # use delete method with key (item) as argument\n list.delete(rm_item)\n # return list\n list\nend", "def remove_item(item, hash)\n hash.delete(item)\n return hash\nend", "def remove_item(list,item)\r\n list.delete_if {|key,value| key == item}\r\n list\r\nend", "def remove(item, hash)\n hash.delete(item)\nend", "def remove_item(list, item)\n\t# if list.keys.include?(item)\n\t# \tlist.delete(item)\n\t# end\n\t# list.delete(item) if list.keys.include?(item)\n\tlist.delete_if { |key, value| key == item } \n\t# Destructive method\n\tp list\nend", "def remove_item(item, hash)\n hash.delete(item)\n return hash\nend", "def remove_item(hash, item)\n hash.delete(item)\n return hash\nend", "def remove_item(hash, item)\n hash.delete(item)\n hash\nend", "def remove_item(hash, item)\n hash.delete(item)\n hash\nend", "def remove_item_from_list(hash, item_to_remove)\n if hash.has_key?(item_to_remove)\n hash.delete(item_to_remove)\n else\n puts \"It looks like #{item_to_remove} wasn't in the hash after all!\"\n end\nend", "def remove(item, hash)\n hash.delete(item)\n puts hash\nend", "def remove_from_list(list_hash, item)\r\n\tlist_hash.delete(item) { |el| \"#{el} not found\" }\r\n\tlist_hash\r\nend", "def remove_item(hash, item)\n hash.delete(item)\nend", "def remove_item(hash_of_items, item)\n hash_of_items.delete(item)\n hash_of_items\nend", "def deleted_item(list, key)\n list.delete_if {|k, v| k == \"#{key}\"}\n list\nend", "def remove_item(hash, item)\r\n\thash.delete(item)\r\n\thash\r\nend", "def remove(hash, item)\n hash.delete(item)\nend", "def remove(hash, item)\n hash.delete(item)\nend", "def remove_item(list, name)\r\n# create remove method with name arguments\r\n# check if item is in the hash\r\n if list[name] != nil\r\n# remove item if present\r\n list.delete(name)\r\n end\r\n# output: print \"your item has been deleted from the hash\"\r\n return list\r\nend", "def list_remove(hash_items, item_name)\n hash_items.delete(item_name)\n return hash_items\nend", "def remove_an_item(list_hash,item_name)\n if list_hash[item_name]\n puts \"Deleting item: #{item_name}.\"\n list_hash.delete(item_name)\n else\n puts \"Item does not exist.\"\n end\n\n list_hash\nend", "def remove_item(hash, key)\n hash.delete(key)\n p hash\nend", "def delete_item(hash, item)\n\thash.delete(item)\n\treturn hash \nend", "def remove_item(list, item_to_be_removed)\n # if list.has_key? item_to_be_removed\n # list.delete(item_to_be_removed)\n # end\n list.delete(item_to_be_removed) if list.has_key? item_to_be_removed\n list\nend", "def delete_hash_item(hash, item)\n [item, hash.delete(item)]\nend", "def remove_item(list, item)\n list_hash = list\n if list_hash.keys.include?(item)\n list_hash.delete(item)\n end\n return list_hash\nend", "def remove_item(hash, item_name)\r\n hash.delete(item_name)\r\n hash\r\nend", "def remove_item(list, item)\n if list.has_key?(item)\n list.delete(item)\n else\n puts \"ERROR: Item not in list\"\n end\n return list\nend", "def remove_item(list, key_name)\r\n\tlist.delete(key_name)\r\n\treturn list\r\nend", "def remove_item(hash,item)\n\tif hash[\"#{item}\"] == hash[\"#{item}\"]\n\t\thash.delete(\"#{item}\")\n\telse puts\n\t\t\"Invalid item\"\n\tend\n\treturn hash\nend", "def remove_item(list, list_item)\n if list.has_key?(list_item)\n list.delete(list_item)\n else\n puts \"Item is not in list\"\n end\n p list\n list\nend", "def remove_item(item, list)\n\t# steps: delete the item if it exists\n\tlist.delete_if {|list_item| list_item == item.to_sym}\n\t# output: updated list\n\tlist\nend", "def removeitem(list, item)\n\n list.delete_if { |iterator| iterator[:item] == item }\nend", "def remove_item(hash, item)\n if hash.has_key?(item)\n hash.delete(item)\n end\n return hash\nend", "def remove_from_list(item_to_rmv)\n item_to_rmv.to_str\n updated_list = Hash.new\n updated_list = $old_list.to_h\n #updated_list.delete_if {|key, value| key == item_to_rmv}\n if updated_list.include?(item_to_rmv) == true \n #p \"test\"\n #p item_to_rmv\n #p $old_list\n p updated_list.delete(item_to_rmv)\n p updated_list.each {|key, val| p key, val}\n #p updated_list\n else \n p \"that item isn't on the list\"\n end\n #if $old_list.to_h.include?(item_to_rmv) == true\n # updated_list.delete(item_to_rmv)\n # updated_list = $old_list.to_h\n #else\n # p \"that item isn't on the list\"\n #end\n \n \nend", "def remove_item(g_hash,item)\n g_hash.delete(item)\nend", "def remove_item(g_hash,item)\n g_hash.delete(item)\nend", "def remove_item(list, item)\r\n# input: shopping list and item to be removed\r\n# steps: \r\n # Use shopping list as input\r\n # Use item to be removed as input\r\n # Remove the item from the list if it exists on the list\r\n list.delete(item)\r\n# output: shopping list with item removed\r\n printlist(list)\r\nend", "def delete_item(grocery,item_name)\n # input: list, item name.\n # steps: delete item_name from the hash\n grocery.delete(item_name)\n # output: display the latest list\n display_list(grocery)\nend", "def remove_item(list, item_name)\n # steps:\n # check IF item is included in list\n # delete the item\n # ELSE, print a message to the user\n list.include?(item_name) ? list.delete(item_name) : puts(\"No #{item_name} on the list.\")\n \n # output: return updated hash or the original hash\n list\nend", "def remove_from_grocery_list(grocery_list, item_name)\n # steps: remove item from hash\n grocery_list.delete(item_name.to_sym)\n # output: explicit return updated hash\n grocery_list\nend", "def remove_item (item)\n item_hash[item].delete\nend", "def remove_item(list, item_name)\n if list.has_key?(item_name)\n list.delete(item_name)\n end\n list\nend", "def remove_item (list, item)\n list.delete(item)\nend", "def remove(input_list, item)\n input_list.delete(item)\nend", "def remove_item(list, item)\r\n# input: item to be removed, list\r\n# steps: \r\n # check if item exists\r\n # remove item\r\n list.delete(item)\r\n # print success message of item removed\r\n puts \"#{item.upcase} has been removed to your grocery list!\"\r\n p list\r\n# output: updated list\r\nend", "def remove_item(list, item)\n list.delete(item)\n return list.each {|k,v| puts \"#{k}: #{v}\"}\nend", "def item_remover(hash, item, quantity = 1)\n hash[item] -= quantity\n if hash[item] <= 0\n hash.delete(item) \n end\n hash\nend", "def remove_item(list, item)\n list.delete(item)\n list\nend", "def remove_item(item_to_be_removed)\n $groceryhash.delete(item_to_be_removed)\nend", "def remove_item(list, item)\n if list.has_key?(item) == false\n puts \"Nothing to remove\"\n else\n list.delete(item)\n end\n list\nend", "def del_item(list, item_to_del)\n list.delete(item_to_del)\nend", "def remove_item(updated_hash, food)\n updated_hash.delete(food)\n return updated_hash\nend", "def remove_item(updated_hash, food)\n updated_hash.delete(food)\n return updated_hash\nend", "def remove_item(list,item)\r\n\r\n list.delete(item)\r\n list\r\nend", "def remove_item(list, item_name)\r\n # list.delete_if { |item, amount| item == item_name }\r\n list.delete(item_name)\r\nend", "def remove_item(list, item)\r\n list.delete(item)\r\n list\r\nend", "def remove_item(list, item)\n list.delete(item)\nend", "def remove_item (item,list)\nlist.delete(item)\nlist\nend", "def remove_item(list, item)\n list.delete(item)\n list\nend", "def remove_item(list, item)\n list.delete(item)\n list\nend", "def remove_item(list, item)\n list.delete(item)\n list\nend", "def remove_item(list, item)\n list.delete(item)\n list\nend", "def remove_item(list, item)\n list.delete(item)\nend", "def remove_item(list, item)\n list.delete(item)\n list\nend", "def remove_item(list, item)\n list.delete(item)\n list\nend", "def delete_item(grocery_hash, remove_item)\n if grocery_hash.has_key?(remove_item)\n grocery_hash.delete(remove_item)\n else\n return \"Item is not on list.\"\n end\nend", "def remove_item(list, item)\n list.delete(item)\nend", "def remove_item(list, item)\n list.delete(item)\nend", "def remove_item (list, item)\n list.delete(item)\nend", "def delete_item(item, hash)\n# get item name\n\tif hash.has_key?(item)\n\t# if item is in hash, delete the item from hash\n\t\thash.delete(item)\n\telse\n\t# otherwise send error message if item is not in hash\n\t\tp \"#{item} is not in your grocery list.\"\n\tend\n\t# output: return the hash\n\thash\nend", "def remove_item(list, item_removed)\n list.delete(item_removed)\nend", "def remove_item(list, item)\n list.delete(item)\n p list\nend", "def remove_item(list, item)\n list.delete(item)\n p list\nend", "def remove_item(list, item)\n list.delete(item)\n p list\nend", "def remove_item(list, item_name)\r\n\tif list.has_key?(item_name)\r\n\t\tlist.delete(item_name)\r\n\tend\r\n\tlist\r\nend", "def remove_item(list,item)\n list.delete(item)\n p list\nend", "def remove_item(item, list)\n list.delete(item)\nend", "def remove_item(list, item_remove)\n list.delete(item_remove)\nend", "def remove(item)\n if @hash.include?(item)\n @hash.delete(item)\n item\n else\n nil\n end\n end", "def remove_item(list, item)\r\n list.delete(item)\r\n p list\r\nend", "def remove_item(list,item)\n\tlist.delete(item)\nend", "def remove_item(hash, item)\r\n grocery_list=hash\r\n if grocery_list.include?item\r\n grocery_list.delete(item)\r\n else\r\n puts \"Item not in list.\"\r\n end\r\n p grocery_list\r\nend", "def delete_item(list,item)\n list.delete(item)\n list\nend", "def delete_item(list,item)\n list.delete(item)\n list\nend", "def delete_item(list,item)\n list.delete(item)\nend", "def delete_item(list,item)\n list.delete(item)\nend", "def delete_item(list, item)\n list.delete(item)\n list\nend", "def delete_item(list, item)\n list.delete(item)\n list\nend", "def remove_item(item_list, item)\r\n item_list.delete(item)\r\n item_list\r\nend", "def remove_item(list, item)\n list.delete(item)\n return list\nend", "def delete_item(list, item)\n list.delete(item)\nend", "def remove_item(list, item)\n list.delete(item)\n return list\nend", "def remove_item(list, item)\n # list.delete_if do |grocery_item, qty|\n # grocery_item == item\n # end\n list.delete(item)\n\n list\nend" ]
[ "0.85334784", "0.82279915", "0.8173182", "0.8148352", "0.8082274", "0.80267936", "0.7893859", "0.78660905", "0.76861584", "0.76582456", "0.7614071", "0.75751853", "0.75597817", "0.7526166", "0.75247985", "0.75247985", "0.7509318", "0.7505354", "0.7504471", "0.7498081", "0.74787825", "0.74737763", "0.7473281", "0.74671334", "0.74671334", "0.74493915", "0.7430261", "0.7424191", "0.7417849", "0.7408795", "0.734189", "0.7333158", "0.73255086", "0.7321351", "0.7303229", "0.7302915", "0.72437394", "0.7220465", "0.7217317", "0.7206629", "0.71994525", "0.7194632", "0.7178953", "0.7178953", "0.7176843", "0.7175286", "0.7161193", "0.7151954", "0.7102785", "0.7090455", "0.7081666", "0.70694196", "0.70500296", "0.7049913", "0.70453286", "0.70424014", "0.70413166", "0.7036916", "0.70348257", "0.70193774", "0.70193774", "0.70067006", "0.700503", "0.69937575", "0.698635", "0.6985898", "0.69783294", "0.69783294", "0.69783294", "0.69783294", "0.69678944", "0.6966673", "0.6966673", "0.6966558", "0.6961258", "0.6961258", "0.69539934", "0.6942946", "0.69370514", "0.6925879", "0.6925879", "0.6925552", "0.6920018", "0.6919116", "0.69006276", "0.6899841", "0.6884569", "0.6877952", "0.6876726", "0.68616617", "0.6858244", "0.6858244", "0.68578655", "0.68578655", "0.68539244", "0.68539244", "0.6848043", "0.68311995", "0.6826669", "0.6825993", "0.6825928" ]
0.0
-1
Method to update the quantity of an item input: List and updating quantity"value" string"key" steps: pass in grocery_list and change value of string output: p updated grocery_list
def update_quantity(grocery_list, item, quantity) add_item(grocery_list, item, quantity) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_quantity_in_grocery_list(grocery_list, item_name, quantity)\n # steps: update quantity for item\n grocery_list[item_name.to_sym] = quantity\n # output: explicit return updated hash\n grocery_list\nend", "def update_quantity(grocery_list, item_to_update, qty)\n grocery_list[item_to_update] = qty\n grocery_list\nend", "def update_quantity(grocery,item_name,new_quantity)\n # input:list, item name, and new_quantity\n # steps: change old value of item_name with the new_quantity\n grocery[item_name] = new_quantity\n # output: display the latest list\n display_list(grocery)\nend", "def update_quantity(item_name, grocery_list, quantity)\n grocery_list[item_name] = quantity\n grocery_list\n end", "def update_quantity(grocery_list, item, quantity)\n grocery_list[item] = quantity\nend", "def update_qty(grocery_list, item, qty)\n grocery_list[item] = qty\n grocery_list\nend", "def update_quantity(item, new_quantity, grocery_list)\n grocery_list[item] = new_quantity\nend", "def update_quantity(item, grocery_list, quantity)\n grocery_list[item] = quantity\n return grocery_list\n end", "def update_quantity(item, grocery_list, quantity)\n grocery_list[item] = quantity\n return grocery_list\n end", "def update_quantity(grocery_list, item, quantity)\n grocery_list[item] = quantity\n grocery_list\nend", "def update_qty(item, item_qty, grocery_list)\n grocery_list[item] = item_qty\n # print_list(grocery_list)\n grocery_list\nend", "def update_quantity(grocery_list, item, quantity)\n grocery_list[item] = quantity.to_i \n grocery_list\nend", "def update_quantity(groceries_list, item, new_quantity)\n\t# Change value for inputted key to the desired quantity\n\tgroceries_list [item] = new_quantity\n\tgroceries_list\nend", "def update_quantity(grocery_list,item_name,quantity)\r\n\t \r\n\t grocery_list[item_name] = quantity\r\n\r\nend", "def update_quantity(grocery_list, item, new_quantity)\n\tif grocery_list.include?(item.to_sym)\n\t\tgrocery_list[item.to_sym] = new_quantity\n\telse\n\t\tputs \"item name invalid\"\n\tend\nend", "def update(grocery_list, item, quantity)\n grocery_list[item] = quantity\nend", "def update_item(grocery_list, item, quantity)\n grocery_list[item] = quantity\n grocery_list \nend", "def update_quantity(grocery_list, item, new_quantity)\n grocery_list[item] = new_quantity\n return grocery_list\nend", "def update_quantity (grocery, item_name, new_quantity)\n grocery[item_name] = new_quantity\n display_list(grocery)\n end", "def update_quantity(grocery_list, item, quantity)\r\n\tgrocery_list[item] = quantity\r\n\tgrocery_list\r\nend", "def update_quantity(grocery_list, item_name, quantity)\n\tadd_item(grocery_list, item_name, quantity)\n\treturn grocery_list\nend", "def update_quantity(grocery_list,item,new_quantity)\n # if item on the list\n grocery_list.store(item,new_quantity)\n puts \"When you pick up #{item}, make sure to grab #{new_quantity} instead.\"\nend", "def update_quantity(grocery_list, item_name, qty = 1)\n\tgrocery_list = add_item(grocery_list, item_name, qty)\n\treturn grocery_list\nend", "def update_quantity (grocery_list, item, quantity)\n\tif grocery_list[item] == nil\n\t\tputs \"Item not on list.\"\n\telse\n\t\tadd_item(grocery_list, item, quantity)\n\tend\n\treturn grocery_list\nend", "def update_quantity(grocery_list, item_name, qty = 1)\r\n\tgrocery_list = add_item(grocery_list, item_name, qty)\r\n\treturn grocery_list\r\nend", "def update_quantity (grocery_list, item, quantity)\n if grocery_list[item] == nil\n \tputs \"Item not on list.\"\n else\n \tadd_item(grocery_list,item,quantity)\n end\n return grocery_list\n print_list(grocery_list)\nend", "def update_quantity(grcy_list, string, i)\n # hash(key[value])\n grcy_list[string] = i\nend", "def add_to_grocery_list(grocery_list, item_name, quantity=1)\n # steps: add item to hash and set quantity\n grocery_list[item_name.to_sym] = quantity\n # output: explicit return updated hash\n grocery_list\nend", "def update_quanity(list, item, quantity)\r\n# input: list, item and quantity to be updated to\r\n# steps:\r\n # check if item exists\r\n # update quantity\r\n list[item] = quantity\r\n # print success \"your cart has been updated!\"\r\n puts \"The quantity for #{item.upcase} has been updated in your grocery list!\"\r\n p list\r\n# output: updated list with new quantity\r\nend", "def update_quantity(grocery_list, grocery_item, new_quantity)\n if grocery_list.has_key?(grocery_item)\n grocery_list[grocery_item] = new_quantity\n else\n puts \"Sorry, that's not on the list (check your spelling).\"\n end\nend", "def update_quantity(list, upd_item, new_quantity)\n# steps:\n # reassign key (item) a new value (quantity)\n list[upd_item] = new_quantity\n # return list\n list\nend", "def update_quantity(item, quantity)\n # puts \"List with updated value of #{quantity} for #{item}:\"\n $grocery_list[item] = quantity\n $grocery_list\nend", "def update(groceries_list, update_item, update_quantity)\n\tgroceries_list[update_item.to_sym] = update_quantity\nend", "def updated_quantity(list, item_name, quantity)\r\n\tlist[item_name] = quantity\r\n\tlist\r\nend", "def update_quantity(list, item_name, quantity)\n\tlist[item_name] = quantity.to_i\n\tlist\nend", "def update_quantity(list, item_name, quantity)\n\tlist[item_name] = quantity\n\tlist\nend", "def update_quantity(list, item_name, quantity)\n\tlist[item_name] = quantity\n\tlist\nend", "def change_quantity(grocery_list, item_name, quantity)\r\n grocery_list[item_name] = quantity\r\n #puts grocery_list\r\n return grocery_list\r\nend", "def update_quantity(list, item_name, qty)\n list[item_name] = qty\nend", "def update_quantity(new_list, item_name, quantity)\r\n \r\n new_list[item_name] = quantity\r\nend", "def update_quantity(list, item, quantity)\n\tlist[item] = quantity\n\tlist\nend", "def update_quantity(item, quantity, groceries_list)\n groceries_list[item] = quantity\nend", "def update(groceries_list, update_item, update_quantity)\n groceries_list[update_item.to_sym] = update_quantity\nend", "def update_quantity(shopping_list, item, quantity)\n shopping_list[item] = quantity\nend", "def update_quantity(list, item, qty)\n list[item] = qty\n list\nend", "def update_quantity(list, item_name, new_quantity)\n list[item_name] = new_quantity\nend", "def update_quantity(list, item_name, new_quantity)\n list[item_name] = new_quantity\nend", "def update_item(list, item, quantity)\r\n add_item(list, item, quantity)\r\n# input: Shopping list, item to be updated, new quantity\r\n# steps:\r\n # Use shopping list as input\r\n # Use item to be updated as input\r\n # Use new quantity to be updated as input\r\n # Update the quantity of the item on the list\r\n # list[item] = quantity\r\n# output: shopping list with updated quantity\r\n # printlist(list)\r\nend", "def update_quantity(list, item, quantity)\n\tlist[item] = quantity\n\tp list\nend", "def update_grocery_list_quantity(list, category, item, quantity)\n list[category][item] = quantity\n list\nend", "def change_quantity(list, item, new_qty)\n list[item] = new_qty\nend", "def update_quantity(list_name, item, value)\r\n# input: list, item name, new quantity\r\n# steps: find item in the hash and change quantity to new quantity\r\n list_name[item] = value\r\n# output: updated hash with new value for item key\r\n p list_name\r\nend", "def update_quantity(list, item, quantity)\n\tadd_to_list(list, item, quantity)\nend", "def add_item(grocery_list,new_item,quantity=1)\n grocery_list[new_item] = quantity\nend", "def update_qty(item_list, item, qty)\r\n item_list[item] = qty\r\n item_list\r\nend", "def update_quantity(list, item_name, quantity)\r\n list[item_name] = quantity\r\nend", "def update_quantity(list, string, integer)\n\tlist[string]=integer\n\treturn list\nend", "def update_quantity(list, string, integer)\n\tlist[string]=integer\n\treturn list\nend", "def update_qty(shopping_list, item, quantity)\r\n\r\n\tadd_item(shopping_list, item, quantity)\r\n\r\nend", "def update_item(list, item, new_quantity)\n\tlist[item] = new_quantity\n\tlist\nend", "def update_quantity(list, item, quant)\n list[item] = quant\nend", "def add_an_item(grocery_list, new_item, qty = 1)\n grocery_list[new_item] = qty\n grocery_list\nend", "def update_quantity_of_items(list, item, quantity)\n list[item] = quantity\nend", "def change_quantity(list, item, qty)\n list[item] = qty\n list\nend", "def update_groceries(list, item, qty)\n\tlist[item] = qty\n\tp list\nend", "def update_quantity(list, item, quant)\n list[item] = quant\nend", "def update_quantity(list, item, quantity)\n #method to update quantity\n #can also add items\n list[item] = quantity\nend", "def update_item_quantity(list, item, quantity)\n list[item] = quantity\n list\nend", "def new_quantity(grocery_list, item, new_quan)\r\n grocery_list[item] = new_quan\r\n p grocery_list\r\nend", "def update_quanity(list, item_name, new_quantity)\n\n\tlist[item_name] = new_quantity\n\tp list\nend", "def update_quant(grocery_list)\n puts \"Which item would you like to update?\"\n item = gets.chomp\n puts \"How many do you want?\"\n quantity = gets.chomp\n grocery_list[item] = quantity\n return grocery_list\nend", "def update_quantity(list, item, quantity)\n list[item] = quantity\n list\nend", "def update_quantity(list, item, quantity)\n list[item] = quantity\n list\nend", "def update_quantity(item, list, quantity)\n add_item(item, list, quantity)\nend", "def update_quantity(list, item, updated_quantity)\n list[item] = updated_quantity\n list\nend", "def update_quantity(list, item, quantity)\n list[item] = quantity\nend", "def update_quantity(list, item, quantity)\r\n\tlist[item] = quantity\r\n\treturn list\r\nend", "def update_quantity(item, grocery_bag, quantity)\n grocery_bag[item] = quantity\n p grocery_bag\nend", "def update_quantity(list, key, quantity)\n list[\"#{key}\"] = quantity\n list\nend", "def update_quantity(list, item, quantity)\n list[item] = quantity\nend", "def update_quantity(thingtochange)\n userinputarray=thingtochange.split(\",\")\n $list_hash.each do|current_item, qty| \n if current_item==userinputarray[0]\n $list_hash[current_item]=userinputarray[1]\n else\n end\n end\nend", "def update_quantity (list, item, quantity)\n list[item] = quantity\nend", "def update_quantity (list, item, quantity)\n list[item] = quantity\nend", "def add_item(grocery_list, item, qty)\n grocery_list[item] = qty \n grocery_list\nend", "def update_quantity(list, item, quantity)\nlist[item] = quantity\nlist\nend", "def update(item, quantity, list)\n\t# steps: if the item is in the list\n\tif list.include? item.to_sym\n\t\t# update the quantity\n\t\tlist[item.to_sym] = quantity\n\telse \n\t\tadd_item(item, quantity, list)\n\tend\n\t# output: return the updated list\n\tlist\nend", "def update_quantity(item, list, quantity)\n list[item] = quantity\n return list\nend", "def update_quant(item, quant)\n\t$grocery_list.store(item, quant)\n\tp \"You updated #{item} number to #{quant}.\"\nend", "def update(list, food_item, quantity)\n\tlist[food_item] = quantity\n\tlist\nend", "def update_quantity(list, item_name, quantity)\n\tlist.each do |item, qty|\n\t\tif item === item_name\n\t\t\tlist[item] = quantity\n\t\tend\n\tend\nend", "def current_list(grocery_list, item, qty)\n grocery_list[item] = qty\n grocery_list\nend", "def update_quantity(list,item,quantity)\nlist[item]= quantity\n p list\nend", "def update_quantity(list,item,item_count)\n\tlist[item] = item_count\nend", "def update_quantity(list, item, quantity)\n list[item] = quantity.to_i\n list\nend", "def update_quantity(list, item, quantity)\n list[item] = quantity\n p list\nend", "def update_item (list,item,quantity)\n\tlist[item] = quantity\nend", "def update_quantity_of_item(list,item,quantity)\r\n add_item_to_list(list,item,quantity)\r\n list\r\nend", "def item_quantity(list, item_to_update, quantity)\n list[item_to_update] = quantity \nend", "def update(list, item, quantity)\n\tlist[item] = quantity\n\tlist\nend", "def update(list, item_name, quantity)\n\tlist[item_name] = quantity\nend" ]
[ "0.8949828", "0.8696384", "0.86850566", "0.86380774", "0.857044", "0.8537167", "0.8535923", "0.85338753", "0.85338753", "0.85156566", "0.8491477", "0.84675807", "0.84233785", "0.84010017", "0.83982396", "0.8363292", "0.83294576", "0.8319808", "0.8300036", "0.82935274", "0.82800317", "0.8236894", "0.8184083", "0.8169637", "0.81479734", "0.81200004", "0.8087821", "0.8085444", "0.8068708", "0.80281407", "0.80272996", "0.8003417", "0.7954957", "0.7938641", "0.7917959", "0.7894289", "0.7894289", "0.7883011", "0.78830034", "0.78624594", "0.78618556", "0.78515744", "0.78468406", "0.7837387", "0.7829818", "0.78154707", "0.78154707", "0.7803215", "0.7788158", "0.7779638", "0.7778877", "0.77775294", "0.777611", "0.77723616", "0.777092", "0.776005", "0.774666", "0.774666", "0.77410144", "0.774062", "0.7731391", "0.7720244", "0.7718681", "0.77171534", "0.7715427", "0.7710944", "0.7710865", "0.77093995", "0.77071023", "0.77052546", "0.77041596", "0.7695598", "0.7695598", "0.7693623", "0.76801157", "0.7658045", "0.7656856", "0.76548404", "0.7644945", "0.76375175", "0.763499", "0.7625698", "0.7625698", "0.7623148", "0.761879", "0.76184314", "0.76172626", "0.76129836", "0.7607338", "0.76003104", "0.75967807", "0.75941813", "0.7591958", "0.7581806", "0.75787854", "0.7569696", "0.75675327", "0.75600785", "0.7558326", "0.75578624" ]
0.7998542
32
Method to print a list and make it look pretty input: take grocery_list steps: loop list, add title output: pretty list
def pretty_list(grocery_list) puts "Your Grocery List for next week!" grocery_list.each do |item, num| puts "#{item} qty #{num}" end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def pretty_list(list)\n\tlist.each do |item, quantity|\n\t\tputs \"There are #{quantity} #{item} on the grocery list.\"\n\tend\nend", "def look_pretty(list)\n puts \"Here is your grocery list:\"\n list.each { |item, quantity| puts \"#{item}: #{quantity}\" }\nend", "def pretty_list(list)\n puts \"Grocery List:\"\n list.each do |key, value|\n puts \"#{value} #{key}(s)\"\n end\n \nend", "def pretty_list(groceries_list)\n\t# Print a line of text to lead into the list\n\tputs \"Here is your updated grocery list:\"\n\t# for each key and value in the list, print a string \n\t# containing the key and value\n\tgroceries_list.each do |item, quantity|\n\t\tputs \"#{item}: #{quantity}\"\n\tend\nend", "def pretty_list(grocery_list)\n grocery_list.each do |item, quantity|\n puts \"#{quantity} #{item}\"\n end\nend", "def pretty_list(grocery_list)\n grocery_list.each do |item, quantity|\n puts \"#{quantity} #{item}\"\n end\nend", "def pretty_list(grocery_list)\n puts \"Shopping List\"\n grocery_list.each do |item, quantity|\n puts \"#{item}: #{quantity}\"\n end\n puts \"Happy Shopping!\"\nend", "def print_pretty(new_list)\n puts \"Grocery List:\"\n new_list.each do |item, amount|\n \n puts \"#{item}: #{amount}\"\n end\nend", "def grocery_printer(list)\n\tlist.each do |list_item|\n\t\tputs \"* #{list_item}\"\n\n\tend \nend", "def pretty_up(list)\n puts \"\\nHere is your grocery list: \"\n list.each do |key, value|\n puts value.to_s + ' ' + key.to_s\n end\nend", "def pretty_list(list)\r\n\tputs \"Grocery List\".center(30)\r\n\tlist.each do |item, quantity|\r\n\t\tputs \"#{item} \".ljust(20) + \"#{quantity}\".rjust(10)\r\n\tend\r\nend", "def pretty_list(list)\n list.each do |grocery_item, qty|\n puts \"#{grocery_item}, quantity: #{qty}\"\n end\nend", "def print_pretty (list)\n puts \"Grocery list\".upcase.center(50)\n puts \"-----------------\".center(50)\n list.each do |item,quantity|\n puts item.rjust(25) + \":\" + quantity.to_s.ljust(25) \n end\nend", "def format_list(list)\n puts \"Your grocery list:\"\n list.each do |key, value|\n puts \"#{key}: #{value}\"\n end\nend", "def pretty_in_print(list)\n puts \"---------------------------------------\"\n puts \"These are the items we are gonna buy\"\n list.each {|item, qty| puts \"#{qty} pieces of #{item}\" }\nend", "def beautify_list(list)\n puts \"Grocery List\"\n puts \" Item: Quantity\"\n list.each do |list_item, item_quantity|\n puts \" #{list_item} : #{item_quantity}\"\n end\nend", "def print_list(grocery_list)\n puts \"*\" * 40 + \"Grocery List\" + \"*\" * 40 \n grocery_list.each do |item, quantity|\n puts \"#{item.capitalize}\".ljust(30) + \"#{quantity}\".rjust(10)\n\n end\n end", "def print_pretty(grocery_list)\r\n\tgrocery_list.each do |item, quantity| \r\n\t\tputs \"you bought #{quantity} #{item}\"\r\n\tend\r\nend", "def pretty_list(list)\n\tlist.each { |item_name, item_quantity|\n\t\tputs \"You will need to purchase #{item_quantity} of #{item_name}.\"\n\t}\nend", "def print_list(list)\r\n# input: completed list\r\n# steps:\r\n # iterate over list and print formatted list\r\n puts \"Your Grocery List\"\r\n list.each do |item, quantity|\r\n puts \"#{item}, qty: #{quantity}\"\r\n end\r\n # format: each item with its own line\r\n # \"item - quantity\"\r\n# output: implicit return of list\r\nend", "def print_list(list)\n\t# input: list\n\t# output: none\n\n\t# print header\n\tputs \"Grocery List\\n------------\"\n\t# iterate over list\n\tfor item, qty in list do\n\t\t# print each item to screen ex: \" - Lemonade, 2 liters\"\n\t\tputs \" - #{item}, #{qty}\"\n\tend\n\nend", "def print_list(list)\n grocery_list = \"Grocery List\" + \"\\n\"\n\n list.each do |item, qty|\n grocery_list += \"#{item} : #{qty}\" + \"\\n\"\n end\n\n puts grocery_list\nend", "def print_list(grocery_list)\n line_width = 30\n puts\n puts ('Grocery List'.center(line_width))\n puts\n puts (\"ITEMS\".ljust(line_width/2)) + \"QTY\".rjust(line_width/2)\n puts (\"------------------------------\").center(line_width)\n grocery_list.each { |item, quantity| \n puts (item.ljust(line_width/2)) + quantity.to_s.rjust(line_width/2)\n puts (\"------------------------------\") }\nend", "def print_list(grocery_list)\n grocery_list.each do |item,quantity|\n puts \" #{item} #{quantity}\"\n end\n end", "def print_list(grocery_list)\n puts \" GROCERY LIST \".center(50, \"=\")\n puts\n grocery_list.each { |item,qty| puts \"#{item} qty: #{qty}\" }\n puts\n puts \"=\".center(50, \"=\")\nend", "def list_to_print(title,list)\n line = \"\" \n 1.upto(title.size){line << \"-\"}\n title = title + \"\\n\" + line + \"\\n\"\n return title + (list.collect {|x| \" => #{x}\" }).join(\"\\n\")\n end", "def print_grocery_list(grocery_list) \n\t# create title and line break\n\tputs \"Current grocery list:\"\n\tputs \"----------\"\n\t# iterate through hash\n\tgrocery_list.each do |item, quantity|\n\t\t# print each item with its quantity\n\t\tputs \"#{item}: #{quantity}\"\n\tend\n\t# create line break for readability\n\tputs \"----------\"\nend", "def print_list(list)\n puts \"Grocery List\"\n list.each { |item, qty| puts \"#{item}: #{qty}\" }\nend", "def printlist(list) \n\tputs \"Today's Grocery List is:\"\n\tlist.map { |item| puts \"* #{item}\"}\n\t\nend", "def pretty(list)\n list = list.map {|k, v| k.capitalize}\n puts \"Grocery list:\" + list\nend", "def print_list(grocery_list)\n grocery_list.each do |item, qty|\n p \"#{item}: #{qty}\"\n end\nend", "def pretty_list(list)\r\n\tlist.each { |item, quantity| puts \"Item: #{item.capitalize} - Amount: #{quantity}\" }\r\nend", "def print_list(grocery_list)\n grocery_list.each do |x,y|\n puts \"Item: #{x}-- ##{y}\"\n end\nend", "def print_list(list)\n\tputs \"Grocery List\"\n\tlist.each{|item, quantity|\n\t\tputs \"#{item} : #{quantity}\"\n\t}\nend", "def pretty_list(list)\n list.each{|food, quantity| puts \"Please get #{quantity} #{food}.\"}\n puts \"Thanks!\"\nend", "def print_list(list)\n puts \"This week's grocery list:\"\n list.each do |item, quantity|\n puts \"#{item}: #{quantity}\"\n end\nend", "def print_list\n \t\tputs \"\\n----------------------------\"\n \t\tputs \"#{@date_created.month}/#{@date_created.day}/#{date_created.year}\"\n \t\tputs \"Your Grocery List:\\n\\n\" \t\t\n \t\tif @list.empty?\n \t\t\tputs \"The List Is Empty!\"\n \t\telse\n\n \[email protected]_with_index { |item, index| puts \"#{index+1}. #{item.qty} #{item.name}\" }\n end\n puts \"\\n----------------------------\"\n end", "def print_list(grocery_list)\n puts \" Grocery List\"\n grocery_list.each do |item, quantity|\n puts \"#{item.capitalize} --> #{quantity}\"\n end\nend", "def print_list(grocery_list)\n puts \" Grocery List\"\n grocery_list.each do |item, quantity|\n puts \"#{item.capitalize} --> #{quantity}\"\n end\nend", "def print_list(list)\n puts \"Here's your grocery list:\"\n list.each { |item, quantity| puts \"#{item}: #{quantity}\" }\nend", "def pretty_list(list)\n # new_list = \"\"\n # list.each { |item, quantity| new_list << \"You want #{quantity} #{item}\\n\" }\n # return new_list\n list.each { |item, quantity| puts \"You want #{quantity} #{item}\\n\" }\nend", "def print_list(my_list)\r\n# input: \r\n \r\n# steps:\r\n# print to screen: iterate through hash item - quantity\r\n puts '------'\r\n puts \"Grocery list:\"\r\n my_list.each do |item, qty|\r\n puts \"#{item} - #{qty}\"\r\n end\r\n puts '-------'\r\n# output: each k,v pair printed surrounded by dashes\r\nend", "def print_list(grocery_list)\n puts \"Here is your grocery list: \"\n grocery_list.each do |item, integer|\n puts \"#{item.capitalize}, #{integer}\"\n end\nend", "def printed_list(grocery_list)\n grocery_list.each {|item, quantity|\n puts \"#{item}: #{quantity}\" }\nend", "def print_list(grocery_list)\n grocery_list.each do | key, value |\n \tputs \"#{key}: #{value.to_s}\"\n end\nend", "def print_list\n\t puts \"\"\n\t puts \"\"\n\t\tputs \"#{@list_name}\"\n\t\tprint \"-\" * 40\n\t\t@grocery_list.each {|k, v| puts \"#{k} #{v}\"}\n\t\tputs \"\"\n\t\tget_item\n\tend", "def grocery_list_beautifier(list)\n list.sort\n list.each do |key, values|\n puts \"Category: #{key.capitalize}\"\n values.each do |item, quantity|\n puts \"#{item.capitalize}: #{quantity}\"\n end\n end\nend", "def nice_print(grocery_list)\n\tgrocery_list.each { |food, quantity| puts \"We need #{quantity}, of #{food}.\"}\nend", "def print_list(list)\r\n puts \"Your current grocery list\"\r\n puts \"---------------------------\"\r\n list.each do |item, quantity|\r\n puts \"#{item}: #{quantity}\"\r\n end \r\nend", "def print_list(grocery_list)\r\n puts \"Here is your grocery list: \"\r\n puts grocery_list.each { |x, y| puts \"#{x}: #{y}\" }\r\n puts \"Have a wonderful shopping day!\"\r\nend", "def pretty_list(list)\n list.each do |item,quantity|\n p \"#{item}! You have #{quantity}.\"\n end\nend", "def pretty_list(list)\n list.each do |item,quantity|\n p \"#{item}! You have #{quantity}.\"\n end\nend", "def print_list(title,list)\n# steps:\n # print title of list (will ask user for this input)\n puts \"**********************\"\n puts \" #{title.upcase}:\"\n puts \"**********************\"\n # print headers of item and quantity\n puts \" # ITEM\"\n puts \"----------------------\"\n # print each item and it's quantity, bulleted if possible\n list.each {|item,quantity| puts \" #{quantity} #{item}\"}\n puts \"**********************\"\n # print today's date\n date = Time.new\n puts \" Made on: #{date.month}/#{date.day}/#{date.year}\"\nend", "def pretty_list(hash)\r\n puts \"Grocery List:\"\r\n puts \" \"\r\n hash.each do |item_name, quantity|\r\n puts \"#{item_name}: #{quantity}\"\r\n end\r\nend", "def prettify_list(list)\n\tlist.each do |item, quantity|\n\t\tp \"You have #{quantity} #{item}\"\n\tend\nend", "def print_list(grocery_list)\n grocery_list.each { |k, v| puts \"#{v.to_s} #{k}\" }\nend", "def print_list(list)\r\n puts \"_-\" *25 + \"\\n\\n\"\r\n puts \"Here is your Grocery List: \\n\\n\"\r\n list.each do |item, quantity|\r\n puts \"\\tItem: #{item} \\tAmount: #{quantity}\"\r\n end\r\n puts \"_-\" *25\r\nend", "def print_list(glist)\n\tputs \"Grocery Shopping list\"\n\tglist.each do |item, count|\n\t\tputs \"#{item}: \".ljust(20) + \" #{count}\".rjust(20)\n\tend\nend", "def pretty_list(list)\n list.each {|item, quantity| puts \"You need #{quantity} #{item}\"}\nend", "def print_list(grocery_list)\n\tgrocery_list.each do |item_name, quantity|\n\t\tputs \"#{item_name} => #{quantity}\"\n\tend\nend", "def print(list)\n puts \"***This is your grocery list:***\"\n list.each do |item,quantity|\n puts \"-#{quantity} #{item}\"\n end\nend", "def print_list(list)\n# input:list\n# steps: iterate through the list\n list.each do |item, number|\n puts \"we have #{number} #{item}\"\n end\n puts \"in our grocery list\"\nend", "def pretty_print(list)\n list.each {|item, quantity| puts \"#{item} : #{quantity}\"}\nend", "def print_list(list)\n\tlist.each do |item, quantity|\n\t\tputs \"There are #{quantity} #{item} on the grocery list!!\"\n\tend\nend", "def print_list(grocery_list)\n grocery_list.each do |item, quantity|\n puts \"#{item}: #{quantity}\"\n end\nend", "def print_list(grocery_list)\n grocery_list.each do |item, quantity|\n puts \"#{item}: #{quantity}\"\n end\nend", "def print_grocery_list(list)\n #print method\n puts \"Here is your grocery list:\"\n #user interface print out\n puts \"~~~~~~~~~~~~~~~~~~~~~~~~~\"\n list.each do |key, variable|\n #do loop\n puts \"#{key.capitalize} #{variable}\"\n end\n #ends do loop\n puts \"~~~~~~~~~~~~~~~~~~~~~~~~~\"\nend", "def print_grocery_list(grocery_list)\n # steps: print \"Grocery List\"\n puts \"Grocery List:\"\n # for each item, print \"item name: quantity\"\n grocery_list.each do |item_name, quantity|\n puts \"#{item_name.to_s}: #{quantity}\"\n # output:\n end\nend", "def print_list(list)\n\tlist.each do |iterator|\n\tputs \"#{iterator[:item].split.map(&:capitalize).join(' ')} - QTY #{iterator[:qty]}\"\n\tend\n\nend", "def prettify(list)\r\n\r\n list.each do |item, quantity|\r\n puts \"There are #{quantity} #{item} in the list.\"\r\n end\r\n\r\nend", "def pretty(list)\n list.each {|item, quantity| p \"You need to buy #{quantity} of #{item}.\"}\nend", "def print_list(grocery_list)\r\n grocery_list.each do |item, quantity|\r\n puts \"#{item}: #{quantity}\"\r\n end\r\nend", "def print_list(grocery_list)\n grocery_list.each do |item, quantity|\n puts \"#{item}, qty: #{quantity}\"\n end\nend", "def print_list(grocery_list)\n grocery_list.each { |grocery_item, quantity| puts \"#{grocery_item} : #{quantity}\"}\nend", "def present ( grocery_list )\n #present grocery list with an asterisk in front of each item, and one item per line\n grocery_list.each do |grocery|\n puts \"* #{grocery}\"\n end\nend", "def print_list(list)\r\n puts \"GROCERY LIST\"\r\n list.each do | item, quantity |\r\n puts \"#{item.capitalize}: #{quantity}\"\r\n end\r\nend", "def print_list(my_groceries)\n puts \"----------\"\n puts \"Our grocery list contains:\" \n my_groceries.each { |item, quantity| puts \"#{item}: #{quantity}\"}\n puts \"----------\"\nend", "def print(list)\r\n\tputs \"Grocery List:\"\r\n\tlist.each do |key,value|\r\n\t\tputs \"#{key} : #{value}\"\r\n\tend\r\nend", "def print_list(grocery_list)\n grocery_list.each { |item, quantity| puts \"You need #{quantity} #{item}\"}\nend", "def groceryList(*list)\n \ti=0\n \twhile i<list.length\n \t\tputs list[i]\n \t\ti+=1\n \tend\n end", "def print_list(grocery_list)\n\tgrocery_list.each do | key, value |\n\t\tputs \"#{key}: #{value.to_s}\"\n\tend\nend", "def list_formatter(list)\n\tlist.each do |item, quanity|\n\t\tputs \"we need #{quanity} of #{item}\"\n\tend\nend", "def print_list(grocery_list)\n grocery_list.each do |item, quantity|\n puts \"Buy: #{quantity} #{item}\"\n end\nend", "def print_friendly_list(list)\n puts \n puts \"Shopping List:\"\n puts \"----------------\"\n list.each {|key, value| puts \"#{key} #{value}\" }\n puts \nend", "def print_list(item_list)\r\n\t# steps: iterate through each of hash element\r\n\t# print the header\r\n puts \"=========================\"\r\n\tputs \"== Grocery List So Far ==\"\r\n\tputs \"=========================\"\r\n\titem_list.each do |item,quantity|\r\n\t\t# display each element key and the value to make it look pretty\r\n\r\n\t\tputs item.ljust(20)+quantity.to_s\r\n\tend\r\nend", "def display_list(list_items)\n title = \"Shopping List:\"\n puts title\n puts \"-\" * title.length\n list_items.each do |item_name, item_qty|\n puts \"#{item_qty}x - #{item_name}\"\n end\n\nend", "def list_items(grocery_list)\n grocery_list.sort!\n grocery_list.each do |item|\n puts \"* #{item}\"\n end\n puts \"#{grocery_list.length} items on the list right now\"\nend", "def print_list(list) \n puts \"Your grocery list includes:\"\n\n list.each do |key, value|\n puts \"#{key} = #{value}\" \n end\nend", "def print_list(grocery_list)\n grocery_list.each do |item,quantity|\n puts \"Buy: #{quantity} #{item}\"\n end\nend", "def print_list(groceries)\n groceries.each do |grocery|\n puts \"* \" + grocery\n end\nend", "def list_groceries(list)\n\tlist.each {|item| puts \"* #{item}\"}\n\tputs \"---------------------------\"\nend", "def print_list(list)\n\tlist.each do |item, quantity|\n\t\tputs \"#{item.capitalize}: #{quantity}\"\n\tend\nend", "def print_list(list)\r\n puts \"-\"*20\r\n list.each do |item,quantity|\r\n puts \"Item:#{item} quantity:#{quantity}\"\r\n end\r\n puts \"-\"*20\r\n list\r\nend", "def format_list(items); end", "def print_list(grocery_hash={})\n\tputs \"*\" * 40\n\tputs \"Grocery List\"\n\tputs \"*\" * 40\n\tgrocery_hash.each do |food, quantity| puts \"#{food}:\".ljust(30) + \"#{quantity}\".rjust(10)\n\tend\n\tputs \"\"\nend", "def print_pretty(list)\n list.each { |item, value| puts \"#{item} #{value}\" }\nend", "def print_finished_list(grocery_list)\n puts \"----------------------\"\n puts \"Here is your grocery list: \"\n grocery_list.each do |item, quantity|\n puts \"Buy #{quantity} #{item}\" \n end\n puts \"----------------------\"\nend", "def print(groceries_list)\n\tputs \"---------------------------------------------\"\n\tgroceries_list.each do |item, quantity|\n\t\tputs \"#{item}: #{quantity}\"\n\tend\n\tputs \"---------------------------------------------\"\nend", "def final(list)\n\tputs \"Grocery List\"\n\tputs \"*\"*12\n\tlist.each {|item_name, quantity| puts \"#{item_name}: #{quantity}\"}\nend", "def print_list(grocery_list)\r\n\tgrocery_list.each { |item, quantity| puts \"You have #{quantity} #{item}(s)!\" } \r\nend" ]
[ "0.8215363", "0.80579245", "0.8027588", "0.80151266", "0.80043745", "0.80043745", "0.7907686", "0.7894245", "0.7850812", "0.78442687", "0.780863", "0.7738925", "0.77158856", "0.7688331", "0.7617733", "0.7599587", "0.75658864", "0.7556144", "0.75404185", "0.7502246", "0.74962896", "0.74527276", "0.7446482", "0.73980904", "0.73975015", "0.7391575", "0.73805344", "0.7377513", "0.73648095", "0.73385775", "0.73336524", "0.73236006", "0.73015976", "0.7291792", "0.72887474", "0.7269467", "0.7261356", "0.72533876", "0.72533876", "0.72506684", "0.7246989", "0.72358334", "0.7227969", "0.72249365", "0.7220843", "0.72081345", "0.7206483", "0.72051585", "0.72040606", "0.71956474", "0.71840584", "0.71840584", "0.718207", "0.7181049", "0.71542716", "0.7150367", "0.71444196", "0.7136538", "0.71135455", "0.7097345", "0.70958716", "0.7093873", "0.70888096", "0.7084916", "0.7084274", "0.7084274", "0.7078958", "0.70669955", "0.7052815", "0.70503205", "0.70477074", "0.7030945", "0.70283157", "0.7023947", "0.6974995", "0.6960881", "0.6957365", "0.6945515", "0.6945191", "0.6937782", "0.69256604", "0.6902984", "0.68705493", "0.6864497", "0.6860746", "0.68334955", "0.682539", "0.6823669", "0.68223447", "0.67943674", "0.6792705", "0.6789823", "0.6788549", "0.67576027", "0.6742044", "0.6738938", "0.67347693", "0.6730584", "0.67296636", "0.67241895" ]
0.8106599
1
4) a new method that will create a new blog post
def new end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def new_blog_post\n\t\t# Get current time when post was created\n\t\tcurrent_time = Time.now\n\n\t\t# Name of the content creator\n\t\tputs \"Enter your name: \"\n\t\tblog_author = gets.chomp\n\n\t\t# Title of blog post\n\t\tputs \"Post title: \"\n\t\tblog_title=gets.chomp\n\n\t\t# Content of blog post\n\t\tputs \"Create a post: \"\n\t\tblog_content = gets.chomp\n\n\t\t# Create a blog post\n\t\tblogpost=Blog_post.new(current_time.to_s, blog_title, blog_author, blog_content)\n\tend", "def create\n blog_post_creator = BlogPostCreator.new(ActivityLogger.new(BlogTweeter.new(self)))\n blog_post_creator.create_with(params[:blog_post])\n end", "def post\n # TODO re-enable blog post creation. Requires a config file.\n #if @args.length > 0\n # new_post = BlogPost.new(@args.first, @config)\n #else\n # @stderr.puts 'create:post requires a filename. Try yuzu create:post \"Post Title Here\"'\n #end\n end", "def newPost(title, contents, categories = nil, keywords = nil, created = nil, publish = 1, user = nil, password = nil)\n article = MetaWeblogStructs::Article.new\n article.title = title\n article.description = contents\n article.categories = categories if categories\n article.mt_keywords = keywords if keywords\n article.dateCreated = created if created\n\n @client.newPostStruct(@blog.blogid, user, password, article, publish)\n end", "def newPost(appkey, blogid, username, password, content, publish)\n begin\n re = %r{^<title>(.+?)</title>(?:<category>(.+?)</category>)?(.+)$}mi\n title, categories, body = content.match(re).captures rescue nil\n\n body ||= content.to_s\n title ||= body.to_s.split.split(0..5).join(' ')\n categories = categories.to_s.split(',')\n\n doc = Document.new.tap do |doc|\n doc.content = body\n doc.title = title\n doc.published = publish\n doc.author = @account\n doc.page = this_page\n doc.categories = this_page.categories.many_matching(categories)\n end\n \n doc.save!\n doc.external_id\n rescue Exception => e\n logger.error(e.message)\n \n # Return failure.\n 0\n end\n end", "def create\n @blog_post = BlogPost.create(blog_post_params)\n end", "def newPost(_, _, _, metaweblog_struct, _)\n post = Post.create(@db.content_path, metaweblog_struct)\n @db.refresh_post_paths\n\n run_user_cmd\n post['postid']\n end", "def create\n unless user_signed_in? && current_user && current_user.is_admin?\n render status: 403, json: 'You must be signed-in as an admin to create a post.'\n end\n binding.pry\n content = params.require('blog').require('content')\n puts content\n binding.pry\n render text: blog_path, status: :created, location: blog_path\n end", "def create_post_action\n post = Post.create(:title => \"Untitled Post\", :title_html => \"\", :content => \"\", :content_html => \"\", :javascript => \"\", :css => \"\", :is_public => false, :sort_id => 1)\n post.tags = [Tag.get_tag_by_name(\"home\")]\n post.sort_id = post.id\n post.markdown!\n post.save!\n flash[:notice] = \"New post created.\"\n return redirect_to \"/edit_post/\" + post.id.to_s\n end", "def new\r\n @blog_post = BlogPost.new\r\n end", "def create_posts\n end", "def create_posts\n end", "def new\n @robots=\"noindex,nofollow\"\n authorize! :create, Roxiware::Blog::Post\n @post = Roxiware::Blog::Post.new({:person_id=>current_user.person.id,\n :blog_class=>(params[:blog_class] || \"blog\"),\n :post_date=>DateTime.now.utc,\n :post_content=>\"\",\n :post_title=>\"\",\n :post_status=>\"publish\"}, :as=>\"\")\n\n # We need to pass the post category in separately as on new post creation, the\n # category joins are not yet created for the post.\n @post_category = Roxiware::Param::Param.application_param_val('blog', 'default_category')\n respond_to do |format|\n format.html { render :partial =>\"roxiware/blog/post/editform\" }\n format.json { render :json => @post.ajax_attrs(@role) }\n end\n end", "def create\n @blog = Blog.find(params[:blog_id])\n @post = @blog.posts.build(params[:post])\n @post.user = @current_user\n\n respond_to do |format|\n if @post.save\n flash[:notice] = 'Post was successfully created.'\n format.html { redirect_to(admin_blog_post_url(@blog, @post)) }\n format.xml { render :xml => @post, :status => :created, :location => @post }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @post.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @blog_post = BlogPost.new(blog_post_params)\n if @blog_post.save\n # got the new record in the database\n redirect_to blog_post_path(@blog_post)\n else\n # something went wrong, have suer fix errors on the form\n render :new\n end\n end", "def create\n # if the position field exists, set this object as last object, given the conditions of this class.\n\n #Creating BlogPost model object with the params blog_post\n @blog_post = BlogPost.new(params[:blog_post])\n\n\t@blog_post.user_id = current_user ? current_user.id: 0\n @blog_post.body = params[:blog_post][:body] #assigning params[:content] as blog_post body\n @blog_post.cached_slug = params[:blog_post][:cached_slug] #assigning params[:content1] as blog_post cached_slug\n\n\n if @blog_post.save\n redirect_to admin_blog_root_path\n else\n render 'new'\n end\n end", "def new\n @blog_post = BlogPost.new\n end", "def new\n @blog_post = BlogPost.new\n end", "def new\n @page_title = \"New Blog\"\n @blogpost = Blogpost.new\n end", "def create\n @titre = t('blog.titre')\n @blog = Blog.new(blog_params)\n @blog.id = Blog.last.id + 1\n @blog.users_id = current_user.id\n if @blog.save\n flash[:success] = t('blog.post_creer')\n redirect_to @blog\n end\n end", "def create\n @post = BlogPost.new(blog_post_params)\n @post.author = current_user\n create_resource_response(@post)\n end", "def create\n #initialize a new post object with the parameters submitted, validated by post_params\n @post = Post.new(post_params)\n \n isComment = false\n #check whether this is actually a comment, meaning it should have kind=2 and will need an originating post id\n if params[:kind].present?\n @post.kind = params[:kind].to_i\n @post.originatingPost_id = params[:originatingPost_id].to_i\n isComment = true\n \n #otherwise, it is a post, which optionally has tags\n else\n @post.kind = 0\n @tagsToAdd = params[:tagsToAdd].split(\" \")\n @tagsToAdd.each do |t|\n @post.tags << createTag(t)\n end\n end\n \n #either way, the currently logged in user should be logged as the creator of this post/comment\n @post.user = User.find(session[:user_id])\n \n if @post.save!\n if isComment\n redirect_to action: \"show\", :id => params[:originatingPost_id] #stay on the post's show page\n else\n redirect_to action: \"show\", :id => @post.id #go to this new post's show page\n end\n else\n redirect_to action: 'new' #upon failure, try again\n end\n end", "def create\n @blog_post = BlogPost.new(params[:blog_post])\n\n respond_to do |format|\n if @blog_post.save\n format.html { redirect_to blog_posts_url, :notice => 'Blog post was successfully created.' }\n format.json { render :json => blog_posts_url, :status => :created, :location => @blog_post }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @blog_post.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n # 'new' initializes a new instance of the model\n # 'post_params' supplies params from frontend\n @post = Post.new(post_params)\n\n # TODO, blog_id validation with user_blogs table\n\n if @post.save!\n render 'create.json'\n else\n @post.errors.full_messages\n end\n end", "def create\n @blog_post = BlogPost.new(blog_post_params)\n\n respond_to do |format|\n if @blog_post.save\n format.html { redirect_to @blog_post, notice: 'Blog post was successfully created.' }\n format.json { render :show, status: :created, location: @blog_post }\n else\n format.html { render :new }\n format.json { render json: @blog_post.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @blog_post = BlogPost.new(blog_post_params)\n\n respond_to do |format|\n if @blog_post.save\n format.html { redirect_to @blog_post, notice: 'Blog post was successfully created.' }\n format.json { render :show, status: :created, location: @blog_post }\n else\n format.html { render :new }\n format.json { render json: @blog_post.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @blogpost = Blogpost.new(blog_params)\n\n if @blogpost.save\n redirect_to @blogpost\n else\n render 'new'\n end\n end", "def create\n redirect_to posts_path and return unless Mist.authorized?(:create_post, self)\n coerce_date(params[:post], 'published_at')\n @post = Mist::Post.new(params[:post])\n\n respond_to do |format|\n if @post.save\n format.html { redirect_to @post, :notice => 'Post was successfully created.' }\n format.json { render :json => @post, :status => :created, :location => @post }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @post.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @blog_post = BlogPost.new(blog_post_params)\n\n respond_to do |format|\n if @blog_post.save\n format.html { redirect_to @blog_post, notice: 'Blog post was successfully created.' }\n format.json { render json: @blog_post, status: :created, location: @blog_post }\n else\n format.html { render action: \"new\" }\n format.json { render json: @blog_post.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @blog_post = BlogPost.new(params[:blog_post])\n @blog_post.user_id = current_user.id\n\n respond_to do |format|\n if @blog_post.save\n format.html { redirect_to @blog_post, notice: 'Blog post was successfully created.' }\n format.json { render json: @blog_post, status: :created, location: @blog_post }\n else\n format.html { render action: \"new\" }\n format.json { render json: @blog_post.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @blogpost = Blogpost.new(blogpost_params)\n\n @article.save\n redirect_to @blogpost\n end", "def add_blog(date, text)\n\t\tBlog.new date, self, text\n\tend", "def create_post_for(options)\n site_id = options[:blog]\n options.delete(:blog)\n options[:site_id] = site_id\n post(new_post_uri, options)\n end", "def create\n@blog = Blog.find(params[:blog_id])\n@post =\[email protected](posts_params)\nif @post.save\n# Post saved, redirect to blog page\nredirect_to blog_post_url(@blog, @post)\nelse\nrender :action => \"new\"\nend\nend", "def create\n @blog_post = BlogPost.new(params[:blog_post])\n\n respond_to do |format|\n if @blog_post.save\n flash[:notice] = 'BlogPost was successfully created.'\n format.html { redirect_to(@blog_post) }\n format.xml { render :xml => @blog_post, :status => :created, :location => @blog_post }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @blog_post.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create_new_post(params = {})\n titile = params.delete(:tile)\n title ||= 'new-post'\n date = params.delete(:date)\n comments = params.delete(:comments)\n categories = params.delete(:categories)\n\n puts \"Begin a new post in #{source_dir}/#{posts_dir}\"\n raise \"### You haven't set anything up yet. First run `rake install` to set up an Octopress theme.\" unless File.directory?(source_dir)\n puts \"set posts_dir\"\n `mkdir -p \"#{source_dir}/#{posts_dir}\"`\n\n filename = \"#{source_dir}/#{posts_dir}/#{date.strftime('%Y-%m-%d')}-#{title.to_url}.#{new_post_ext}\"\n if File.exist?(filename)\n abort(\"rake aborted!\") if ask(\"#{filename} already exists. Do you want to overwrite?\", ['y', 'n']) == 'n'\n end\n puts \"Creating new post: #{filename}\"\n open(filename, 'w') do |post|\n post.puts \"---\"\n post.puts \"layout: post\"\n post.puts \"title: \\\"#{title.gsub(/&/,'&amp;')}\\\"\"\n post.puts \"date: #{date.strftime('%Y-%m-%d %H:%M')}\"\n post.puts \"comments: #{comments}\"\n post.puts \"categories: #{categories}\"\n post.puts \"---\"\n post.puts \"#{content}\"\n end\nend", "def create\n @post = Post.new(params[:post])\n\n respond_to do |format|\n if @post.save\n flash[:notice] = 'Oprettet'\n format.html { redirect_to(:action => 'blog', :page => session[:page]) }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @post.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @post = Post.new(post_params)\n @post.blog = Blog.find(params[:blog_id])\n\n respond_to do |format|\n if @post.duplicate? or @blog.posts << @post\n flash[:notice] = 'Post was successfully created.'\n format.html { redirect_to blog_posts_url(:id => @post) }\n elsif @post.save\n format.html { redirect_to blog_posts_path, notice: 'Post was successfully created.' }\n format.xml { head :created, :location => post_url(@post) }\n format.json { render :show, status: :created, location: @post }\n else\n format.html { render :new }\n format.xml { render :xml => @post.errors.to_xml }\n format.json { render json: @post.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n\t\t\tauthorize! :manage, Blog\n\t\t\t\n\t\t\t@blog = Blog.new\n\n\t\t\tif request.post?\n\t\t\t\tif @blog.save_with_params params[:blog]\n\t\t\t\t\trender json: { status: 0, result: { redirect: _route_helpers.blogs_path } }\n\t\t\t\telse\n\t\t\t\t\trender json: { status: 2 }\n\t\t\t\tend\n\n\t\t\t\treturn\n\t\t\tend\n\n\t\t\trender layout: 'layout_back'\n\t\tend", "def create\n @blog_post = BlogPost.new(blog_post_params) #need to create the blog_post_params method, goes at very bottom\n if @blog_post.save\n #got a new record in db, want to redirect if successful save\n redirect_to blog_post_path(@blog_post) #has dynamic id so must pass in\n else\n #something went wrong, have user fix errors on form\n render :new\n end\n end", "def new\n @post = Post.new #creates new instance of the/a post\n #since it's only creating an instance in memory have to create/post method create blw\n end", "def create\n\t\tparams[:blog_post][:tags] = params[:blog_post][:tags].split(',').map(&:strip)\n\t\t@blog_post = BlogPost.new(blog_post_params)\n respond_to do |format|\n if @blog_post.save\n format.html { redirect_to @blog_post, notice: 'Blog post was successfully created.' }\n format.json { render :show, status: :created, location: @blog_post }\n else\n format.html { render :new }\n format.json { render json: @blog_post.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n post = Post.new\n post.title = params[:title]\n post.description = params[:description]\n post.save\n end", "def create\n params[:post][:published_at] = \"#{params[:published_date]} #{params[:date][:hour]}:#{params[:date][:minute]}:00\"\n\n if params[:post_id]\n Post.find(params[:post_id]).update_attributes(params[:post])\n this_post_id = params[:post_id]\n else\n this_post = Post.create(params[:post])\n this_post_id = this_post.id\n end\n\n redirect_to blog_path(this_post_id)\n end", "def new_post *args\n groups(:studio1).posts.new *args\n end", "def create\r\n\t\t@blog = Blog.new(params[:blog])\r\n\t @blog.posted_by = current_user\r\n\r\n\t\trespond_to do |format|\r\n if @blog.save\r\n flash[:notice] = 'Blog was successfully created.'\r\n format.html { redirect_to(@blog) }\r\n format.xml { render :xml => @blog, :status => :created, :location => @blog }\r\n else\r\n format.html { render :action => \"new\" }\r\n format.xml { render :xml => @blog.errors, :status => :unprocessable_entity }\r\n end\r\n end\r\n end", "def create\n @blog_post = BlogPost.new(blog_post_params)\n @blog_post.user_id = current_user.id\n\n respond_to do |format|\n if @blog_post.save\n format.html { redirect_to @blog_post, notice: 'Статья успешно создана.' }\n format.json { render :show, status: :created, location: @blog_post }\n else\n format.html { render :new }\n format.json { render json: @blog_post.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @blog_post = @current_user.blogs.create!(params[:blog_post])\n if @blog_post.save\n flash[:notice] = \"Message save.\"\n redirect_to :controller=>\"blog_posts\", :action => \"index\"\n end\n \n\n\n end", "def create\n @blogpost = Blogpost.new(params[:blogpost])\n @blogpost.user_id = current_user.id\n\n respond_to do |format|\n if @blogpost.save\n format.html { redirect_to @blogpost, notice: 'Blogpost was successfully created.' }\n format.json { render json: @blogpost, status: :created, location: @blogpost }\n else\n format.html { render action: \"new\" }\n format.json { render json: @blogpost.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\r\n @blog_post = BlogPost.new(params[:blog_post])\r\n if @blog_post.save\r\n flash[:success] = \"\\\"#{@blog_post.title}\\\" song has been successfully published to your blog.\"\r\n redirect_to blog_post_path(@blog_post)\r\n else\r\n render 'new'\r\n end\r\n end", "def create\n @blogpost = Blogpost.new(blogpost_params)\n\n respond_to do |format|\n if @blogpost.save\n format.html { redirect_to @blogpost, notice: 'Blogpost was successfully created.' }\n format.json { render :show, status: :created, location: @blogpost }\n else\n format.html { render :new }\n format.json { render json: @blogpost.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\t\n\t\t@post = Post.new\n\tend", "def create\n @blog = Blog.new(params[:blog])\n\n if @blog.save\n redirect_to @blog, notice: 'Blog was successfully created.' \n else\n render action: \"new\"\n end\n end", "def create\n @post = Post.new(post_params)\n\n respond_to do |format|\n if @post.save\n @ultimo_post = Post.last\n @historico_post = HistoricoPost.new(id_post: @ultimo_post.id, title: @ultimo_post.title, text: @ultimo_post.text)\n @historico_post.save\n format.html { redirect_to @post, notice: 'Post was successfully created.' }\n format.json { render action: 'show', status: :created, location: @post }\n else\n format.html { render action: 'new' }\n format.json { render json: @post.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @myblog = Myblog.new(params[:myblog])\n\n respond_to do |format|\n if @myblog.save\n format.html { redirect_to @myblog, notice: 'Myblog was successfully created.' }\n format.json { render json: @myblog, status: :created, location: @myblog }\n else\n format.html { render action: \"new\" }\n format.json { render json: @myblog.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n respond_to do |format|\n if @post.save\n format.html { redirect_to blog_post_no_prefix_path(@blog, @post),\n notice: 'Post was successfully created.' }\n format.json { render :show, status: :created, location: @post }\n else\n format.html { render :new }\n format.json { render json: @post.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @post = current_user.posts.new(params[:post])\n @post.edited_at = Time.current\n @post.published_at = Time.current if params[:publish]\n if @post.save\n flash[:success] = \"Post criado com sucesso.\"\n redirect_to admin_posts_path\n else\n @page_title = \"Novo Post\"\n render :action => 'new'\n end\n end", "def new \n\t\t@post = Post.new\n\tend", "def new_resource_post\n if forum?\n post = @topic.posts.build(params[:post])\n post.person = current_person\n elsif blog?\n post = @blog.posts.new(params[:post])\n end\n post\n end", "def create\n \t# create the post\n \t# the following is the short version of getting data\n \t@post = Post.new(params[:post])\n \t# the following is the long version of getting data\n \t# @post = Post.new(params[:title], params[:url], params[:description])\n \t# @post.user_id = 1 #TODO: add in real user after we have authentication\n # @post.user_id = current_user.id\n @post.user_id = session[:user_id]\n \tif @post.save\n \t\tflash[:notice] = \"Post was created!\"\n \t\tredirect_to posts_path # \"/posts/#{@post_id}\"\n \telse # validation failure\n render :new\n end\n end", "def create\n @myblog = Myblog.new(myblog_params)\n\n respond_to do |format|\n if @myblog.save\n format.html { redirect_to @myblog, notice: 'Myblog was successfully created.' }\n format.json { render :show, status: :created, location: @myblog }\n else\n format.html { render :new }\n format.json { render json: @myblog.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @post = Post.new\n @post.title = params[:title]\n @post.description = params[:description]\n @post.save\n redirect_to post_path(@post)\n end", "def create_post\n\t\tassignment_id = params[:assignment_id]\n\n\t\tassignment = Assignment.find(assignment_id)\n\t\tgroup_id = assignment.group_id\n\n\t\t\n\t\tpost = Post.create({\n\t\t\tcontent: params[:content],\n\t\t\tpublished_at: DateTime.now\n\t\t\t})\n\n\t\tassignment.posts << post\n\t\tassignment.save\n\n\t\tpost.author = @authenticated_user\n\t\tpost.save\n\n\t\tredirect_to group_db_show_url(group_id)\n\t\treturn\n\tend", "def create\n @post = Post.new\n @post.PostID = params[:id].to_i\n @post.title = params[:title]\n\n r = PostRepository.new\n\n respond_to do |format|\n if r.save(@post)\n flash[:notice] = 'Post was successfully created.'\n format.html { redirect_to(@post) }\n format.xml { render :xml => @post, :status => :created, :location => @post }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @post.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n\n\t\t@post = Post.new(post_params)\n\t\tif @post.tag == \"\"\n\t\t\tredirect_to :back\n\t\telsif @post.body == \"\"\n\t\t\tredirect_to :back\n\t\telse\n\t\t\[email protected]\n\t\t\tredirect_to \"#\"\n\t\t\tflash[:info] = \"Post submited!\"\n\t\tend\n\tend", "def create\n # @post = current_user.posts.new()\n @post = Post.new\n @post.user_id = 1\n @post.content = params[:content]\n @post.anonymous_flag = params[:password]\n @post.weight = 1 #本体自身权重\n @post.epoch_time = Time.now.to_i\n @post.hot = RankingUtils.post_hot(@post.weight, @post.epoch_time)\n @post.publish_date = DateUtils.to_yyyymmdd(Date.today)\n is_post_saved = @post.save\n # 创建问题主题关联\n is_post_theme_saved = save_post_theme\n # 创建用户行为(发布问题)\n is_activities_saved = save_activities(@post.id, \"Post\", 3)\n\n if is_post_saved and is_post_theme_saved and is_activities_saved\n render :json => {:code => ReturnCode::S_OK }\n else\n render :json => {:code => ReturnCode::FA_WRITING_TO_DATABASE_ERROR }\n end\n end", "def create\n\t\t@post = Post.new(params[:post].permit(:title, :text))\n\t\t\n\t\tif @post.save\n\t\t\n\t\t\t#tell browser to issue anothe request\n\t\t\tredirect_to @post\n\t\telse\n\t\t\n\t\t\t#render is used so that @post is passed back to the\n\t\t\t#new template when it is rendered\n\t\t\trender 'new'\n\t\tend\n\tend", "def new\n # responsbile for creating a new instance of a post\n @post = Post.new\n end", "def create\n @post = Post.create!(params[:post]) \n end", "def create\n @post = Post.new(params[:post])\n authorize! :create, @post\n unless params[:author].blank?\n @post.users << User.find(params[:author])\n end\n logger.debug \"Going to save #{Rails.logger.level} #{@post.title.inspect} #{params[:post]}\"\n respond_to do |format|\n if @post.save_post?\n logger.debug \"Saved successfully #{@post.title.inspect}\"\n format.html { redirect_to(@post, :notice => 'Post was successfully created.') }\n format.xml { render :xml => @post, :status => :created, :location => @post }\n else\n logger.debug \"Could not save #{@post.title.inspect}\"\n flash[:alert] = \"Slug cannot be created\"\n format.html { render 'new' }\n format.xml { render :xml => @post.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create_post(message, author_name, author_email, author_url = nil, ip_address = nil, created_at = nil)\n new_post_hash = API.create_post(forum_key, id, message, author_name, author_email, author_url, ip_address, created_at)\n new_post = Post.new(new_post_hash.merge(default_hash))\n @posts << new_post if @posts\n new_post\n end", "def create\n # @blog_entry = Blog::Entry.find(params[:entry_id])\n @blog_article = Blog::Article.new(blog_article_params)\n @blog_article.user = current_user\n\n respond_to do |format|\n if @blog_article.save\n format.html { redirect_to @blog_article, notice: 'Article was successfully created.' }\n format.json { render :show, status: :created, location: @blog_article }\n else\n format.html { render :new }\n format.json { render json: @blog_article.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @blog.author = current_person\n @blog.published_at = Time.now if params[:commit] == 'Publish now'\n \n respond_to do |format|\n if @blog.save\n format.html { redirect_to @blog, notice: \"Blog was successfully created.\" }\n format.json { render :show, status: :created, location: @blog }\n else\n format.html { render :new }\n format.json { render json: @blog.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n params[:post_date] = DateTime.now\n @post = Roxiware::Blog::Post.new({:person_id=>current_user.person.id,\n :post_date=>DateTime.now.utc,\n :blog_class=>(params[:blog_class] || \"blog\"),\n :post_content=>\"Content\",\n :post_title=>\"Title\",\n :comment_permissions=>\"default\",\n :post_status=>\"publish\"}, :as=>\"\")\n\n if((@role == \"super\") || (@role == \"admin\"))\n params[:blog_post][:post_content] = Sanitize.clean(params[:blog_post][:post_content], Roxiware::Sanitizer::EXTENDED_SANITIZER)\n else\n params[:blog_post][:post_content] = Sanitize.clean(params[:blog_post][:post_content], Roxiware::Sanitizer::BASIC_SANITIZER)\n end\n\n respond_to do |format|\n @post.assign_attributes(params[:blog_post], :as=>@role)\n if @post.save\n format.html { redirect_to @post, :notice => 'Blog post was successfully created.' }\n format.json { render :json => @post.ajax_attrs(@role) }\n else\n format.html { redirect_to @post, :alert => 'Failure in creating blog post.' }\n format.json { render :json=>report_error(@post)}\n end\n end\n end", "def create\n Post.create(params[:post])\n redirect_to new_post_path\n end", "def new\n @post = Post.new()\n end", "def new\n @post = Post.new()\n end", "def new\n @post = Post.new()\n end", "def create\n @resource = Post.find_by(user_id: current_user.id, url: params[\"post\"][\"url\"])\n if @resource.present?\n # refer article recentry or not\n @refer_recently = @resource.created_at > DateTime.now - 7.days\n else\n @resource = Post.create!(user_id: current_user.id, url: params[\"post\"][\"url\"], title: params[\"post\"][\"title\"])\n UsersPost.create! user_id: current_user.id, post_id: @resource.id\n @refer_recently = false\n end\n end", "def create\n isSavePost = PostService.createPost(post_params)\n if isSavePost\n redirect_to posts_path\n else\n render :new\n end\n end", "def create\n @post = postable.posts.build(params[:post])\n @post.author = current_agent\n\n respond_to do |format|\n if @post.save\n flash[:success] = t('post.created')\n format.html { redirect_to(polymorphic_path([ postable.container, postable ], :anchor => dom_id(@post))) }\n format.xml { render :xml => @post, :status => :created, :location => @post }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @post.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @blog = Blog.new(blog_params) #these are the allowable fields - set below\n\n respond_to do |format|\n if @blog.save\n # any reference to @blog will mean the same as blog_path(@blog)\n format.html { redirect_to @blog, notice: 'Blog was successfully created.' }\n #format.json { render :show, status: :created, location: @blog }\n else\n format.html { render :new }\n #format.json { render json: @blog.errors, status: :unprocessable_entity }\n end\n end\n end", "def new \n @post = Post.new\n end", "def createPost(post_params)\n post = Post.new(post_params)\n post.status = 1\n isSavePost = PostRepository.createPost(post)\n end", "def create\n @blog = Blog.new(params[:blog])\n\n respond_to do |format|\n if @blog.save\n format.html { redirect_to @blog, notice: 'Blog was successfully created.' }\n format.json { render json: @blog, status: :created, location: @blog }\n else\n format.html { render action: \"new\" }\n format.json { render json: @blog.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @blog = Blog.new\n end", "def new\n @blog = Blog.new\n end", "def create\n @post = Post.new(params[:post])\n @post.published = true\n\n respond_to do |format|\n if @post.save\n flash[:notice] = 'Запись успешно создана.'\n format.html { redirect_to(@post) }\n format.xml { render :xml => @post, :status => :created, :location => @post }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @post.errors, :status => :unprocessable_entity }\n end\n end\n end", "def new\n\t\t@blog = Blog.new\n\tend", "def create\n @post = Post.new(params[:post])\n\n if @post.save\n redirect_to posts_path, :notice => t(:post_added)\n else\n render :action => \"new\"\n end\n end", "def new\n @post = Post.new()\n end", "def new\n\t @post = Post.new\n\tend", "def create_blog!(attrs)\n attrs[:user_id] = self.id\n BlogCreationForm.create(attrs)\n end", "def create\n @post = Post.create!(post_params)\n end", "def create\n\t\t@post = Post.new(post_params)\n\t\tif @post.save\n\t\t\tredirect_to @post, notice: \"Hello Shoaib You are article is created\"\n\t\telse\n\t\t\trender 'new', notice: \"Oh not I unable to save your post\"\n\t\tend\n\tend", "def create\n @post = Post.new(post_params)\n\n end", "def create\n # @post = Post.new(params.require(:post).permit(:title, :description))\n @post = Post.new(post_params(:title, :description))\n @post.save\n redirect_to post_path(@post)\n end", "def create\n @post = Post.new(params[:post])\n\n respond_to do |format|\n if @post.save\n save_notice(\"文章创建成功,可以 <a href=\\\"#{url_for(:controller => \"/posts\", :action => \"show\", :slug => @post.slug)}\\\" target=\\\"_blank\\\">点击这里</a> 查看\")\n format.html { redirect_to :controller => \"posts\", :action => \"index\" }\n format.xml { render :xml => @post, :status => :created, :location => @post }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @post.errors, :status => :unprocessable_entity }\n end\n end\n end", "def new\r\n @post = Post.new\r\n end", "def create\n @post = Post.new(params[:post])\n if @post.save\n redirect_to post_path(@post)\n else\n render :action => \"new\"\n end\n end", "def create\n \t# collects nested attributes, for post & comment, from params\n new_post = params.require(:post).permit(:body, :link, comments_attributes: [:body])\n\n \tpost = Post.create(new_post)\n \tredirect_to post_path(post.id)\n end" ]
[ "0.8236085", "0.8045422", "0.80337363", "0.8011226", "0.7987851", "0.7932013", "0.7822921", "0.77063316", "0.763832", "0.75655353", "0.756449", "0.756449", "0.75231963", "0.7517265", "0.7493231", "0.745449", "0.745114", "0.745114", "0.7430354", "0.7428026", "0.7416549", "0.741419", "0.7413098", "0.7397657", "0.73938966", "0.73938966", "0.73869526", "0.73858076", "0.7385773", "0.738244", "0.73823154", "0.73721445", "0.735493", "0.73482156", "0.734475", "0.7334693", "0.73090154", "0.7297035", "0.728821", "0.7287029", "0.7278981", "0.72677845", "0.72582114", "0.72401345", "0.723353", "0.72172964", "0.72065204", "0.72012943", "0.7198803", "0.7196672", "0.71799517", "0.71745336", "0.7164091", "0.71564525", "0.71384734", "0.713159", "0.71224254", "0.7121306", "0.71197504", "0.7089745", "0.706561", "0.70591784", "0.7049989", "0.7048618", "0.70460516", "0.70430994", "0.7032443", "0.702308", "0.7019794", "0.7010271", "0.7002518", "0.6990745", "0.69866467", "0.69862944", "0.69734323", "0.69665813", "0.69665813", "0.69665813", "0.69571877", "0.6952571", "0.6952386", "0.6935517", "0.69322264", "0.6927085", "0.69267", "0.69236076", "0.69236076", "0.69220734", "0.6920874", "0.691879", "0.689498", "0.6881457", "0.68809575", "0.68741393", "0.68689275", "0.6868266", "0.68674415", "0.68662655", "0.68643737", "0.68635386", "0.6857335" ]
0.0
-1
Execute block after some time
def after(delay_sec, &block) raise "Missing implementation 'after'" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def after(interval, &block); end", "def with_timeout(time, &block)\n Timeout.timeout(time) do\n block.call(self)\n end\n end", "def timeout_after(time); end", "def on_timeout(options = {})\n timeout = options[:timeout] || 0.5\n\n sleep timeout\n\n yield\n end", "def run_after(delay, &block)\n NSTimer.scheduledTimerWithTimeInterval( delay,\n target: block,\n selector: \"call:\",\n userInfo: nil,\n repeats: false)\n end", "def done delay=nil, &block\n if delay\n EM.add_timer delay, &block\n else\n block.call\n end\n end", "def yield\n pause if time_up?\n end", "def eventually(label, &block)\n current_time = Time.now\n timeout_treshold = current_time + TIMEOUT\n while (block.call == false) && (current_time <= timeout_treshold) do\n sleep 5\n current_time = Time.now\n end\n if (current_time > timeout_treshold)\n fail \"Action '#{label}' did not resolve within timeout: #{TIMEOUT}s\"\n end\nend", "def wait(timeout, &block)\n end_time = @end_time || (current_time + timeout)\n loop do\n yield(block)\n @remaining_time = end_time - current_time\n break if @remaining_time.negative?\n end\n end", "def run_nonblock(&block)\n @timeout = 0\n run &block\n end", "def after!(time, &block)\n Delay.new(@native, time, &block)\n end", "def after_limit\n response = yield\n update!(response)\n sleep(wait_time)\n response\n end", "def do_later\n at_exit{ yield }\n end", "def perform_after(time, block=nil)\n task = Concurrent::ScheduledTask.new(time) do\n block = ->(){ yield } unless block\n self.async.perform_now block\n end\n task.execute\n task\n end", "def after_wait(&block)\n @after_wait_block = block\n end", "def defer(&block)\n timeout(0, &block)\n end", "def call(timeout)\n @time_out_at = read_clock + timeout\n\n yield\n end", "def brute_wait(delay)\n sleep(delay)\n end", "def wait_until(timeout=10, &block)\n time = Time.now\n success = false\n until success\n if (Time.now - time) >= timeout\n raise \"Waited for #{timeout} seconds, but block never returned true\"\n end\n sleep 0.5\n success = yield\n end\n end", "def timeout!; end", "def after(&block)\n handle(1, &block)\n end", "def delay; end", "def delay; end", "def delay; end", "def add_timeout(seconds, &block)\n raise_not_implemented\n end", "def timeout; end", "def timeout; end", "def timeout; end", "def completed_after(ms)\n raise \"Cannot redefine completion block\" if @complete_block\n @complete_block = Proc.new do\n time = ms\n time = rand(3.seconds) if ms == :random_period\n @timer_name = \"completed_after_#{Time.now.to_i}\"\n after(time, :name => @timer_name, :persistent => true) { complete_run }\n end\n end", "def sleep(dur=0) end", "def send_blocking_command(command, _timeout, &block); end", "def after(milliseconds, &block)\n Thread.new do\n sleep(milliseconds/1000.0)\n block.call\n end\n end", "def timed(message=nil, &block)\n start!\n out(message) if message\n yield\n done!\n end", "def finish_in(seconds, &block)\n @explicit_finish = true\n EM.add_timer(seconds) do\n block.call if block\n EM.stop\n end\n end", "def timeout(ms = 0)\n `setTimeout(function(){#{yield}},#{ms})`\n end", "def wait; end", "def wait; end", "def wait; end", "def sleep\n sleep_after(0)\n end", "def time_block\n start = Time.now\n yield\n time = Time.now - start\n puts \"Block took basically 0 time\" if time < 0.001\n raise \"Block took #{time} to execute\" if time > 0.001\nend", "def later(tick, &block)\n Bukkit.getScheduler.scheduleSyncDelayedTask(@plugin, block, tick)\n end", "def go_then_wait\n finish_time = Time.now.in_time_zone('London') + MINIMUM_WAIT_TIME.second\n\n yield\n ensure\n sleep (finish_time - Time.now.in_time_zone('London')).clamp(0, MINIMUM_WAIT_TIME).seconds\n end", "def after_block_boundary?; end", "def on_timeout\n trigger\n reset\n end", "def finish\n @time = 0.0\n @trigger.()\n end", "def end_after_delay\n # this surrenders Thread execution, so Heroku might charge less, \n # time limit is in minutes (so we multiply by 60)\n sleep self.time_limit * 60 \n self.end!\n end", "def implicit_wait=(seconds); end", "def with_timeout( timeout, &block )\n expire = Time.now + timeout.to_f\n sleepy = @sleep_in_ms / 1000.to_f()\n # this looks inelegant compared to while Time.now < expire, but does not oversleep\n loop do\n return true if block.call\n log :debug, \"Timeout for #{@key}\" and return false if Time.now + sleepy > expire\n sleep(sleepy)\n # might like a different strategy, but general goal is not use 100% cpu while contending for a lock.\n end\n end", "def run_command interval, duration\n (duration / interval).times do \n yield\n sleep(interval)\n end\nend", "def wait_for(timeout = 30, &block)\n start = Time.now\n while true\n raise RuntimeError, \"Timed out waiting for event\" if Time.now - start > timeout\n\n break if yield\n\n sleep(0.1)\n end\n end", "def deliver_later_with_additional_delay\n yield.deliver_later(wait: 1.second)\n end", "def wait!\n now = Time.now.utc.to_i\n duration = (reset.to_i - now) + 1\n\n sleep duration if duration >= 0\n\n yield if block_given?\n\n duration\n end", "def delay\n sleep(2)\n end", "def wait_for_seconds\n\t\tsleep(1 * rand + 1)\n\tend", "def delay_1() sleep(3) end", "def on_timeout\n trigger\n reset\n end", "def wait!\n sleep(@sleep)\n end", "def timeout_at; end", "def wait_until(timeout=20, &block)\n time_to_stop = Time.now + timeout\n until yield do\n sleep(0.1) # much less cpu stress\n break if Time.now > time_to_stop\n end\nend", "def delay_query\n @time ||= Time.now\n delay = 0.5\n wait_time = delay - (Time.now - @time).to_f\n sleep(wait_time) unless wait_time <= 0\n x = yield\n @time = Time.now\n return x\n end", "def trigger\n @timed_out = false\n @expires = Time.now + @period\n unless @thread\n @thread = Thread.new do\n begin\n begin\n sleepytime = @expires - Time.now\n while sleepytime > 0.0\n sleep(sleepytime)\n sleepytime = @expires - Time.now\n end\n @timed_out = true\n @expires += @period if @repeats\n @block.call if @block\n end while @repeats\n rescue StopTimerException\n @expires=nil\n ensure\n @thread = nil\n end\n end\n end\n end", "def schedule(time, callback); end", "def task_run_later(&block) \n task_later(&block).start\n end", "def extended_timeout\n puts \"Please wait, this could take a few minutes ...\"\n old_timeout = ::DTK::Client::Conn.get_timeout()\n ::DTK::Client::Conn.set_timeout(EXTENDED_TIMEOUT)\n result = yield\n ::DTK::Client::Conn.set_timeout(old_timeout)\n result\n end", "def after(delay, &block)\n DelayedExecution.after(delay, &block)\n end", "def wait\n sleep 0.0001\n end", "def regularly(time, &block)\n Thread.new do\n while true\n sleep time\n synchronize &block\n end\n end\n end", "def sleep_some_time\n sleep 1\n end", "def perform_sleepily!(timeout, &block)\n @on_complete = block if block_given?\n Thread.new {\n before_send.call(self) if before_send\n \n nap = sleepy.get(\"#{round_time(Time.new.to_i, timeout)}:#{uri}\") rescue nil\n \n unless nap.blank?\n STDERR.puts \"Return cached result #{nap.inspect}\"\n nap\n else\n req = http.request(request)\n \n response = Response.new(req, self)\n begin\n if response.redirected?\n response = response.follow_redirect\n else\n on_complete.call(response) if on_complete\n response\n end\n if response.code && response.code == 200\n sleepy.set(\"#{round_time(Time.new.to_i, timeout)}:#{uri}\", response)\n sleepy.set(\"0:#{uri}\", response)\n end\n rescue\n response = sleepy.get(\"0:#{uri}\") rescue nil\n end\n response\n end\n }\n end", "def delay(seconds); end", "def delay(seconds); end", "def yield\n wait\n callback\n end", "def next (timeout)\n _c_await_next timeout\n end", "def future(&block)\n EM.next_tick { block.call }\n end", "def long_calculation\n sleep(5) # do some complex operation like hit an API\nend", "def delay(time)\n @context.backend.delay(time)\n end", "def do_later(random_sleep_max_time = SLEEP_GENERIC, multiplier = 1, min_base = 0, &block)\n Thread.start do\n random_sleep(random_sleep_max_time, multiplier, min_base)\n block.call\n end\n end", "def checkTimeout_Flooding()\n\tsleep(2);\n\tputs \"time out!\";\n\n\nend", "def now_and_after(interval, &block); end", "def schedule_call(&block)\n EM::next_tick(&block)\n end", "def sleep_loop\n (1..30).each { |_| some_method }\n end", "def _delay\n class << self; self; end.module_eval { yield }\n end", "def send_blocking_command(command, _timeout, &block)\n send_command(command, &block)\n end", "def wait\n\tend", "def timer\n # 2. start executing the method\n start_time = Time.now\n yield # 3. jump out of 'timer', start execuding the block\n\n # 6. continue executing the method as per usual\n end_time = Time.now\n\n puts \"Elapsed time: #{end_time - start_time} s\"\nend", "def wait_for_launching\n sleep @delay\n end", "def timeout\n super\n end", "def timeout\n super\n end", "def simulate_work(time)\n return\n sleep time unless Softcover::test?\n end", "def wait(timeout = nil)\n synchronize do\n touch\n # TODO interruptions ?\n super timeout if incomplete?\n self\n end\n end", "def shutdown_after(timeout); end", "def snooze(t)\n sleep(t)\n end", "def blocks() end", "def after(duration_ms)\n Thread.new() do\n begin\n sleep(duration_ms/1000.0)\n yield unless self.connected?()\n rescue Exception => e\n $stdout.puts \"#{e} :\\n #{e.backtrace.join(\"\\n \")}\"\n end\n end\n end", "def then(&block)\n start_time = @_last_timer[2].nil? ? @_last_timer[1] : @_last_timer[2]\n @_timers << [@_last_timer[0], start_time, nil, block]\n end", "def wait\n true\n end", "def wait(duration)\n for i in 0...duration\n update_basic\n end\n end", "def after(time, options = {}, &block)\n if options[:name]\n return if timer_exists?(options[:name]) && options[:preserve]\n stop_timer(options[:name])\n end\n\n ms = $window.frame # Gosu::milliseconds()\n @_last_timer = [options[:name], ms + time, nil, block]\n @_timers << @_last_timer\n self\n end", "def wait_for(&block)\n Retriable.retriable tries: 15, base_interval: 0.05, max_interval: 1.second do\n raise \"Exceeded max retries while waiting for block to pass\" unless block.call\n end\n end", "def time\n @began = Time.now\n yield\n ensure\n self.info \"Finished in %.1f sec.\" % (Time.now - @began)\n end" ]
[ "0.734226", "0.7173641", "0.7108369", "0.69678694", "0.6843299", "0.6763355", "0.67600554", "0.67062706", "0.669345", "0.66597766", "0.6650306", "0.66306275", "0.6605968", "0.66040796", "0.6571928", "0.65424556", "0.65280503", "0.6525048", "0.64668745", "0.6466149", "0.64620775", "0.645845", "0.645845", "0.645845", "0.6450275", "0.6439942", "0.6439942", "0.6439942", "0.6433245", "0.6426887", "0.64134973", "0.63664913", "0.63425803", "0.63267785", "0.6325054", "0.632332", "0.632332", "0.632332", "0.6282926", "0.62571913", "0.6251985", "0.6251562", "0.62485385", "0.6243283", "0.6241567", "0.62362176", "0.62259454", "0.6225093", "0.6217138", "0.62049603", "0.6204203", "0.62016505", "0.6188907", "0.61766666", "0.61556715", "0.6154076", "0.6153465", "0.6149268", "0.614844", "0.61459297", "0.6138867", "0.6138628", "0.61352867", "0.6128789", "0.6127904", "0.6127343", "0.6123612", "0.6119588", "0.61086303", "0.6101665", "0.6101665", "0.61016023", "0.6100756", "0.6082911", "0.60786146", "0.6066087", "0.6040584", "0.6025142", "0.6024594", "0.60009843", "0.6000027", "0.59930617", "0.5986985", "0.59867567", "0.5980975", "0.59739304", "0.5972021", "0.5972021", "0.5968954", "0.5965664", "0.5965389", "0.59412074", "0.59351736", "0.59261554", "0.5923056", "0.5911144", "0.59093595", "0.59087557", "0.58906347", "0.589035" ]
0.6165447
54
Periodically call block every interval_sec
def every(interval_sec, &block) raise "Missing implementation 'every'" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def interval duration, &block\n `setInterval(function() { #{block.call} }, duration * 1000)`\n end", "def every(interval, &block)\n Timer.new(self, interval, true, block)\n end", "def periodically(interval, &block)\n EventMachine::PeriodicTimer.new(interval, &block)\n end", "def periodically(interval, &block)\n EventMachine::PeriodicTimer.new(interval, &block)\n end", "def sleep_loop\n (1..30).each { |_| some_method }\n end", "def run_command interval, duration\n (duration / interval).times do \n yield\n sleep(interval)\n end\nend", "def poll(interval, &block)\n until @progress =~ /(COMPLETED|ERROR)/ do\n update\n yield self if block_given?\n sleep(interval)\n end\n nil\n end", "def every(name, interval = 60, initial = nil, &block)\n Thread.new(initial) { |context|\n while true\n Kernel.sleep(interval)\n MObject.debug(\"every(#{name}): fires - #{context}\")\n begin\n if ((context = block.call(context)) == nil)\n break\n end\n rescue Exception => ex\n bt = ex.backtrace.join(\"\\n\\t\")\n MObject.error(\"every(#{name})\",\n \"Exception: #{ex} (#{ex.class})\\n\\t#{bt}\")\n end\n end\n MObject.debug(\"every(#{name}): finishes\")\n }\n end", "def schedule_every(n, &block)\r\n\r\n while true do\r\n before = Time.now\r\n\r\n block.call\r\n \r\n elapsed = Time.now - before\r\n interval = n - elapsed\r\n \r\n @logger.debug \"orders processing/delivery take #{elapsed} seconds.\"\r\n \r\n sleep(interval) if interval > 0\r\n end\r\n\r\nend", "def after(interval, &block); end", "def repeat_call(repetitions, interval)\n repetitions.times do\n yield\n sleep(interval)\n end\nend", "def every(milliseconds, &block)\n Thread.new do\n loop do\n block.call\n sleep(milliseconds/1000.0)\n end\n end\n end", "def onTimeout(interval, &block)\n \n raise ArgumentError unless interval.kind_of? Numeric\n \n ref = {:interval => interval.to_i, :block => block}\n \n with_mutex do \n if @queue.empty?\n @queue << ref\n else \n @queue.each.with_index do |v, i|\n if v[:interval] >= interval\n v[:interval] -= interval\n @queue.insert(i, ref) \n break\n else\n ref[:interval] -= v[:interval] \n if @queue.last == v\n @queue << ref\n break\n end\n end\n end\n end \n @update.push ref \n end\n \n ref\n end", "def poll(interval = 1.0)\n first = true\n loop do\n start = Time.now\n each_member { poll! } unless first\n first = false\n yield\n delay = Time.now - start\n if delay >= interval\n sleep interval\n else\n sleep interval - delay\n end\n end\n end", "def regularly(time, &block)\n Thread.new do\n while true\n sleep time\n synchronize &block\n end\n end\n end", "def sleepy_run\n cleanup if should_cleanup?\n sleep(@interval)\n increment_cycle\n end", "def every n, &block\n loop do\n log_duration { yield }\n sleep n \n end\n end", "def run\n Thread.new(interval, server) do |i, s|\n loop do\n s.refresh!\n sleep(i)\n end\n end\n end", "def ticker(interval)\n loop do\n sleep(interval)\n @q.push(:nudge)\n end\n end", "def run\n loop do\n tick\n sleep settings.service.polling_interval\n end\n end", "def run\n loop do\n tick\n sleep settings.service.polling_interval\n end\n end", "def now_and_after(interval, &block); end", "def watcher\n zero_start = true \n loop do\n @polling_time.each do |time|\n break if @end_flag\n sleep_interval(time, zero_start)\n zero_start = false\n yield\n end\n break if @end_flag\n end\n end", "def interval_sleep(sec)\n unless SELF_PIPE.empty?\n # mimic sleep with a timeout on IO.select, listening for signals setup in #setup_signal_handlers\n return unless IO.select([ SELF_PIPE[0] ], nil, nil, sec)\n\n signal = SELF_PIPE[0].getc.chr\n\n return if signal == IMMEDIATE_RUN_SIGNAL # be explicit about this behavior\n\n # we need to sleep again after reconfigure to avoid stampeding when logrotate runs out of cron\n if signal == RECONFIGURE_SIGNAL\n reconfigure\n interval_sleep(sec)\n end\n else\n sleep(sec)\n end\n end", "def sleep_time\n 60\n end", "def sleep(dur=0) end", "def poll(seconds = 5)\n count = seconds * 100\n\n while count > 0 && !yield\n count -= 1\n sleep 0.01\n end\n\n raise 'Poll timeout.' unless yield\nend", "def every(interval, &block)\n action = Action.new({\n :interval => interval,\n :recur => true\n },\n &block\n )\n to_run.push(action)\n reset\n action\n end", "def trigger\n @timed_out = false\n @expires = Time.now + @period\n unless @thread\n @thread = Thread.new do\n begin\n begin\n sleepytime = @expires - Time.now\n while sleepytime > 0.0\n sleep(sleepytime)\n sleepytime = @expires - Time.now\n end\n @timed_out = true\n @expires += @period if @repeats\n @block.call if @block\n end while @repeats\n rescue StopTimerException\n @expires=nil\n ensure\n @thread = nil\n end\n end\n end\n end", "def step\t\n\t\t@tick += 1\n\t\t\n\t\tif @tick == @interval\n\t\t\[email protected]\n\t\t\treset\n\t\tend\n\tend", "def run_after(delay, &block)\n NSTimer.scheduledTimerWithTimeInterval( delay,\n target: block,\n selector: \"call:\",\n userInfo: nil,\n repeats: false)\n end", "def after(interval, &block)\n Timer.new(self, interval, false, block)\n end", "def poll_every_n_seconds(send,n=1)\n\t\t\ttimer = Timers::Group.new\n\t\t\tevery_seconds = timer.every(n) { poll(send) }\n\t\t\tloop { timer.wait } \n\t\tend", "def sleep\n sleep_after(0)\n end", "def start\n raise \"the interval has been aborted\" if aborted?\n return unless stopped?\n\n @stopped = false\n\n @id = `#@window.setInterval(#@block, #@every * 1000)`\n end", "def worker_check_interval(interval); end", "def poll_every_n_minutes(send,n=1)\n\t\t\ttimer = Timers::Group.new\n\t\t\tevery_seconds = timer.every(60*n) { poll(send) }\n\t\t\tloop { timer.wait } \n\t\tend", "def once_per_frame\n last = Time.now\n while true\n yield\n now = Time.now\n _next = [last + (1 / 60), now].max\n sleep(_next - now)\n last = _next\n end\nend", "def once_per_frame\n last = Time.now\n while true\n yield\n now = Time.now\n _next = [last + (1 / 60), now].max\n sleep(_next - now)\n last = _next\n end\nend", "def after(interval, &block)\n action = Action.new(\n {:interval => interval},\n &block\n )\n to_run.push(action)\n reset\n action\n end", "def watch_interval; end", "def wait(duration)\n for i in 0...duration\n update_basic\n end\n end", "def add_timeout(interval, &block)\n @timer.add(interval, &block)\n end", "def every(interval_sec, &block)\n # to allow canceling the periodic timer we need to\n # hand back a reference to it which responds to 'cancel'\n # As this is getting rather complex when allowing for\n # registration before the EM is up and running, we simply throw\n # and exception at this time.\n raise \"Can't handle 'every' registration before the EM is up\" unless EM.reactor_running?\n # if EM.reactor_running?\n # EM.add_periodic_timer(interval_sec, &block)\n # else\n # @deferred << lambda do\n # EM.add_periodic_timer(interval_sec, &block)\n # end\n # end\n t = EM.add_periodic_timer(interval_sec) do\n begin\n block.call(t)\n rescue => ex\n error \"Exception '#{ex}'\"\n debug \"#{ex}\\n\\t#{ex.backtrace.join(\"\\n\\t\")}\"\n end\n end\n t\n end", "def wait_until_true timeout=3, interval=0.1\n time_limit = Time.now + timeout\n loop do\n result = yield\n return if result || Time.now >= time_limit\n sleep interval\n end\n end", "def watch_delay\n @conn.watch(Web.keys[:delay]) do\n yield if block_given?\n end\n end", "def sleep_some_time\n sleep 1\n end", "def create_periodic_timer(interval, &block)\n Timer.new(self, interval, :periodic => true, &block)\n end", "def every(seconds, &block)\n if !seconds.is_a?(Numeric) || seconds <= 0\n raise ArgumentError, 'Positive number of seconds is expected as an argument'\n end\n wrap_block = proc do\n @every_block_opts ||= {}\n opts = @every_block_opts[block.object_id] || {}\n next_run_at = opts[:next_run_at]\n next if next_run_at && next_run_at >= Time.now\n block.call\n opts[:next_run_at] = Time.now + seconds\n @every_block_opts[block.object_id] = opts\n end\n register_event_handler(:every, wrap_block)\n end", "def wait(interval)\n wq = Queue.new\n onTimeout(interval) do\n wq.push nil\n end\n wq.pop\n end", "def schedule_call(&block)\n EM::next_tick(&block)\n end", "def brute_wait(delay)\n sleep(delay)\n end", "def implicit_wait=(seconds); end", "def run()\n while(true)\n cycle() ;\n sleep(2) ;\n end\n end", "def with_period_of(seconds)\n raise ArgumentError, \"No block given!\" unless block_given?\n seconds = seconds.to_i\n\n # Main loop, does not stop until the loop enters the shutdown mode\n loop do\n # Run user's code\n yield\n\n # Stop if we're in shutdown mode\n if shutdown?\n debug(\"Shutdown: stopping the loop\")\n break\n end\n\n # Sleep before the next iteration\n sleep_with_shutdown_support(seconds)\n end\n end", "def work(interval = 5.0)\n end", "def sleepfor \n\ttimetorun = $hours * 60 * 60\n\tsleep(timetorun)\nend", "def start\n Thread.new(interval, pool) do |i, p|\n while (true)\n sleep(i)\n p.reap\n end\n end\n end", "def start\r\n loop do\r\n t1 = Time.now\r\n update\r\n t2 = Time.now\r\n update_duration = t2 - t1\r\n \r\n milliseconds = (@update_interval/1000 - update_duration)\r\n sleep(milliseconds) if milliseconds > 0\r\n end\r\n end", "def fire_and_forget(opts = {})\n interval = opts[:interval].is_a?(Fixnum) ? opts[:interval] : nil\n loop do\n begin\n self.work_once\n rescue @@EOQ => e\n break\n end\n sleep(interval) if interval\n end\n end", "def every(interval, unit = :minutes, options = {}, &block)\n raise \"Not a Integer: #{interval}\" unless interval.is_a? Integer\n raise \"Interval less than 1\" if interval < 1\n\n opts = {\n run_at_start: true\n }.merge(options)\n\n case unit\n when :min, :mins, :minute, :minutes\n when :hr, :hrs, :hour, :hours, :horse\n interval *= 60\n else\n raise \"Unknown unit: #{unit}\"\n end\n $bot[:periodic] << {\n interval: interval,\n remaining: opts[:run_at_start] ? 0 : interval,\n block: block\n }\n end", "def wait_for_seconds\n\t\tsleep(1 * rand + 1)\n\tend", "def dot_and_sleep(interval)\n print('.')\n sleep(interval)\n end", "def blocking_thread\n poller_thread\n end", "def lock_for(duration)\n @mutex.synchronize { sleep duration }\n end", "def waiting actual\n interval = 0.5\n i = 0\n while true\n if actual == yield\n return 'true!!'\n end\n i += interval\n puts '.'\n sleep i\n end\n 'false!!'\nend", "def loop(num_ticks, tick_time)\n @tick_time = tick_time\n \n (0..num_ticks).each do |tick|\n time = java.lang.System.currentTimeMillis\n\n @tick = tick\n yield tick\n\n sleep_time = @tick_time - (java.lang.System.currentTimeMillis - time)\n java.lang.Thread.sleep(sleep_time) if sleep_time > 0\n end\n end", "def periodically_trigger_task(task_name = nil, interval = 1, &block)\n periodically(interval) do\n trigger_task(task_name, &block)\n end\n end", "def periodically_trigger_task(task_name = nil, interval = 1, &block)\n periodically(interval) do\n trigger_task(task_name, &block)\n end\n end", "def initialize(interval, callback=nil, &blk)\n fire = proc {\n (callback || blk).call\n trigger(:fired)\n }\n @timer = NSTimer.scheduledTimerWithTimeInterval(interval,target: fire, selector: 'call:', userInfo: nil, repeats: false)\n end", "def while_time_remaining(&block)\n @interval = Hitimes::Interval.new\n @interval.start\n\n while time_remaining?\n yield @remaining\n end\n ensure\n @interval.stop\n @interval = nil\n end", "def prepare_interval(period, &block); end", "def do_later(random_sleep_max_time = SLEEP_GENERIC, multiplier = 1, min_base = 0, &block)\n Thread.start do\n random_sleep(random_sleep_max_time, multiplier, min_base)\n block.call\n end\n end", "def schedule(time, callback); end", "def delay_flush\n loop do\n begin\n flush if interval_ready?\n sleep(0.01)\n rescue => e\n $stderr.puts(\"at=start-error error=#{e.message}\") if ENV['DEBUG']\n end\n end\n end", "def while(frames, &block)\n frames.times do\n block.call\n wait_internal\n end\n end", "def setInterval(seconds = 0, *args, id: nextTimerID__, now: false, &block)\n return unless block\n time = Time.now.to_f\n block.call(*args) if now\n setInterval__ id, time, seconds, args, &block\n end", "def interval\n 30.seconds\n end", "def every (seconds)\n\t\t@every = seconds\n\n\t\trestart\n\tend", "def call\n SCC::Logger.info \"[TIMER] Starting timer for #{duration} seconds, with #{interval} second intervals.\"\n call_count.times do |n|\n yield\n\n SCC::Logger.info \"[TIMER] Finished #{n+1} of #{call_count} calls.\"\n\n if (n+1) != call_count\n SCC::Logger.info \"[TIMER] Waiting #{interval} seconds till next call.\"\n sleep interval\n end\n end\n end", "def wait\n loop do sleep 1 end\n end", "def now_and_every(interval, recur = T.unsafe(nil), &block); end", "def run (&code_block)\n raise ArgumentError, 'run requires a code block to execute.' unless block_given?\n @count=0\n\n start_time = Time.now\n\n (1..@max_count).each do |x|\n @count = x\n code_block.call\n\n sleep @interval_seconds unless @count == @max_count\n end\n\n ensure\n @running_time = Time.now - start_time unless start_time.nil?\n end", "def wait_while(cycle)\n while yield\n sleep(cycle)\n end\n end", "def sit\n\t\twhile connected?\n\t\t\tif block_given?\n\t\t\t\tyield\n\t\t\telse\n\t\t\t\tsleep(0.1)\n\t\t\tend\n\t\tend\n\tend", "def wait_for_running\n while !self.running?\n sleep Profitbricks::Config.polling_interval\n end\n end", "def long_calculation\n sleep(5) # do some complex operation like hit an API\nend", "def poll\n sleep 10\n while true\n sleep 2\n @poll_counter = @poll_counter - 1\n if @poll_counter < 0\n begin\n send(\" \\t \")\n rescue\n Thread.new {@exception_block.call if @exception_block}\n break\n end\n end\n end\n end", "def wait!\n sleep(@sleep)\n end", "def poll\n max = max_interval\n step = interval_step\n interval = min_interval\n\n while !@abort do\n message_count = 0\n message_count += receive_messages || 0\n message_count += receive_replies || 0\n message_count += receive_tweets || 0\n\n interval = message_count > 0 ? min_interval : [interval + step, max].min\n\n log.debug \"Sleeping for #{interval}s\"\n sleep interval\n end\n end", "def watch_interval=(_arg0); end", "def periodically(period = 0.5, maximum = nil, &block)\n @timers << hq.periodic_action(period, maximum, &block)\n end", "def loop(break_condition=-1, timeout=0, &blck)\n self.break_condition = block_given? ? blck : break_condition\n self.timeout_period = timeout\n self\n end", "def wait(duration)\n for i in 0...duration\n update_basic(false, true, true)\n end\n end", "def repeat_message(n)\n\tloop do\n\t\tbefore = Time.now\n\t\tyield\n\t\tinterval = n-(Time.now-before)\n\t\tsleep(interval) if interval > 0\n\tend\nend", "def heartbeat\n until @stop\n @empty.each(&:wakeup)\n @full.each(&:wakeup)\n sleep 0.1\n end\n end", "def delay(seconds); end", "def delay(seconds); end", "def pause_after_run\n sleep @sleep_seconds\n end", "def sleep\n #sleeps\n puts \"*sleeps*\"\n end" ]
[ "0.71171546", "0.69778156", "0.69612396", "0.69612396", "0.6935349", "0.69055146", "0.6846257", "0.6805411", "0.6782046", "0.6715308", "0.66548204", "0.66058785", "0.6601802", "0.658373", "0.6582203", "0.65142286", "0.6505487", "0.64913076", "0.6463782", "0.64613104", "0.64613104", "0.6436771", "0.641055", "0.63835573", "0.6368308", "0.63472235", "0.63415843", "0.6324477", "0.63066566", "0.6302775", "0.6295825", "0.6289152", "0.62673056", "0.6240908", "0.6233726", "0.621359", "0.6127285", "0.6115719", "0.6115719", "0.60919166", "0.6089362", "0.607732", "0.6062032", "0.605858", "0.60538906", "0.6049933", "0.6045705", "0.6029019", "0.6027533", "0.60257256", "0.6010117", "0.59929717", "0.5983128", "0.5980475", "0.59747654", "0.59666175", "0.596645", "0.59576935", "0.5955917", "0.5940035", "0.5931032", "0.59281546", "0.59257543", "0.5922003", "0.59150195", "0.59143955", "0.5912254", "0.59082127", "0.59082127", "0.59052557", "0.58887434", "0.58811337", "0.5879851", "0.5869323", "0.5869127", "0.5866598", "0.5862147", "0.58580273", "0.58555406", "0.5850959", "0.58405864", "0.58339626", "0.5831002", "0.5821539", "0.5817402", "0.5799384", "0.5798562", "0.57976663", "0.5795086", "0.57844615", "0.57784593", "0.57702756", "0.57668483", "0.57573587", "0.57524145", "0.5742888", "0.57389104", "0.57389104", "0.57315934", "0.5729935" ]
0.6637515
11
Call 'block' in the context of a separate thread.
def defer(&block) raise "Missing implementation 'defer'" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def runblock\r\n\t\t\[email protected]\r\n\t\tend", "def runblock\r\n\t\t\[email protected]\r\n\t\tend", "def thread(&block)\n warn 'A Block is Needed' unless block_given?\n Java::JavaLang::Thread.new(&block).start\n end", "def call_block\n @_block.call(self) if @_block\n end", "def synchronize_block(&block)\n @lock.synchronize(&block)\n end", "def run_block\n if @block\n _block = @block\n @block = nil\n instance_eval &_block\n true\n end\n end", "def run\n block.call\n end", "def execute_in_main_thread(&block)\n EventMachine.schedule(&block)\n end", "def sync(&block)\r\n thread_lock.synchronize &block\r\n end", "def __run_block\n # This may not catch concurrent calls (unless surrounded by a mutex), but\n # it's not worth trying to protect against that. It's enough to just check for\n # multiple non-concurrent calls.\n ::Kernel.raise Error, \"Cannot run async block multiple times\" unless block = @block\n\n @block = nil\n\n begin\n block.call\n rescue ::Exception => e\n WrappedException.new(e)\n end\n end", "def block\n # beware of deadlocks, we can't join our own thread\n ::EM.reactor_thread.join if ::EM.reactor_thread && !::EM::reactor_thread?\n end", "def run_block_proc\n yield\nend", "def run(&block); end", "def call(*, &block)\n lock_instance.execute(&block)\n end", "def run(&block)\n end", "def run_block\n yield\nend", "def record_block\n @block = true\n end", "def callBlock\n yield # Invokes block\n yield # Invokes block again\nend", "def run\n if @block.arity >= 1\n @block.call self\n else\n @block.call\n end\n end", "def callBlock\n yield\n yield\nend", "def call_block\n\tputs \"Start of method\"\n\tyield\n\tyield\n\tputs \"End of method\"\nend", "def run_nonblock(&block)\n @timeout = 0\n run &block\n end", "def call_block(&block)\n block.call\nend", "def execute_block(block)\n case\n when block.is_a?(Symbol)\n send(block)\n when block.respond_to?(:call) && (block.arity == 1 || block.arity == -1)\n block.call(self)\n else\n block\n end \n end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def sync( &block )\n LOCK.synchronize &block\n rescue ThreadError\n yield\n end", "def blocks() end", "def call_block\n puts 'Start'\n yield\n yield\n puts 'End'\nend", "def perform(&block); end", "def call_block\r\n puts \"Start of method\"\r\n yield\r\n yield\r\n puts \"End of method\"\r\nend", "def call(*args, &block)\n @block.call *args, &block\n end", "def run &block\n worker = launch &block\n exit worker.wait\n end", "def asynchronous_block (&block)\n Proxy.new(nil, nil, nil, &block)\n end", "def __run__()\n begin\n begin\n @lock.send nil\n @result = @block.call(*@args)\n ensure\n @lock.receive\n unlock_locks\n @joins.each { |join| join.send self }\n end\n rescue Die\n @exception = nil\n rescue Exception => e\n # I don't really get this, but this is MRI's behavior. If we're dying\n # by request, ignore any raised exception.\n @exception = e # unless @dying\n ensure\n @alive = false\n @lock.send nil\n end\n\n if @exception\n if abort_on_exception or Thread.abort_on_exception\n Thread.main.raise @exception\n elsif $DEBUG\n STDERR.puts \"Exception in thread: #{@exception.message} (#{@exception.class})\"\n end\n end\n end", "def execute(&block)\n\tblock.call\nend", "def execute(&block)\n\tblock.call\nend", "def execute(&block)\n\tblock.call\nend", "def execute(&block)\n\tblock.call\nend", "def block!(handle, offset, length, mask, &callback)\n wait_for(block(handle, offset, length, mask, &callback))\n end", "def execute(&block)\n\tblock\nend", "def execute(&block)\n\tblock\nend", "def execute(block)\n block.call\nend", "def execute(block)\n block.call\nend", "def execute(block)\n block.call\nend", "def execute(block)\n block.call\nend", "def execute(block)\n block.call\nend", "def execute(block)\n block.call\nend", "def execute(block)\n block.call\nend", "def run(&blk)\n raise ArgumentError, \"must pass a block argument\" unless block_given?\n raise \"#{self} was killed\" if killed?\n consumer_thread\n top_loop(&blk)\n @killed = true\n end", "def synchronize(&block)\n @mutex.synchronize(&block)\n end", "def run_block\n @_block_content = nil\n unless block.blank?\n @_block_content = if view.parent.present?\n capture(view, &block)\n else\n ## We've been called directly from a controller.\n yield(view)\n end\n end\n end", "def block!\n self.blocked = Time.now\n self.save\n Token.block_access!(client_uri, resource_owner_uri)\n Authorization.block_access!(client_uri, resource_owner_uri)\n end", "def set_block &b\n @block = b\n end", "def call_async(&block)\n async_queue.push [:call, nil, block]\n end", "def call(*args)\n block.call(*args)\n end", "def future(&block)\n EM.next_tick { block.call }\n end", "def execute(&block)\n block.call\n puts \"End of block\"\nend", "def call(*args)\n block.call(*args) if block\n end", "def send_blocking_command(command, _timeout, &block); end", "def action_callback(options)\n # Wait until the calling thread goes to sleep.\n while options[:thread].status == \"run\"\n sleep 0.1\n end\n\n # Run the block.\n if options[:thread].status == \"sleep\"\n # Call the callback.\n options[:block].call\n end\n rescue => e\n Log.exception(e)\n ensure\n # Wake up the thread.\n if options[:thread].status == \"sleep\"\n options[:thread].run\n end\n end", "def thread_safe(&block)\n JSwing::SwingUtilities.invokeAndWait(block)\nend", "def run_stored_block\n self.run_in_context @stored_block if @stored_block\n end", "def receive_block(&block)\n block.call # same as yield\nend", "def synchronize\n lock(&block)\n end", "def execute_block(&block)\n @currently_executing_block = true\n output \"tell session id #{name}_tty\"\n self.instance_eval(&block)\n output \"end tell\"\n @currently_executing_block = false\n end", "def loop(&block)\n block ||= Proc.new { pending_requests.any? }\n session.loop{Rex::ThreadSafe.sleep(0.5); block.call }\n end", "def call_this_block\n yield \"tweet\"\nend", "def call_this_block\n yield \"tweet\"\nend", "def use(&block)\n @mutex.synchronize { yield @obj, @mutex }\n end", "def run(*args, &blk)\n raise \"No block was given.\" if !blk\n \n block = Tpool::Block.new(\n :args => args,\n :blk => blk,\n :tpool => self,\n :thread_starts => [Thread.current]\n )\n @queue << block\n \n begin\n Thread.stop\n rescue Exception\n #Its not possible to stop main thread (dead-lock-error - sleep it instead).\n sleep 0.1 while !block.done?\n end\n \n return block.res\n end", "def call_block\n puts \"start\"\n yield \"foobar\" if block_given?\n puts \"end\"\nend", "def synchronize(&block)\n @mutex.synchronize(&block)\n end", "def process(&block); end", "def run\n @ctx.call(self,&@blk) if @blk\n end", "def in_background(&block)\n @threads ||= []\n thread = Thread.new(&block)\n thread.abort_on_exception = true\n @threads << thread\n thread\n end", "def use_block(obj, &method)\n\tmethod.call\nend", "def execute(&block)\n # block\n block.call\nend", "def command_thread(&block)\n start_service\n Ricer::Thread.execute {\n begin\n yield\n rescue StandardError => e\n reply_exception(e)\n ensure\n stopped_service\n end\n }\n end", "def async_exec(&block)\n @swt_display.asyncExec do\n execs_in_progress << :async_exec\n begin\n result = block.call\n ensure\n execs_in_progress.pop\n end\n end\n end", "def call_block \n yield('hello', 99) \nend", "def call_block \n yield('hello', 99) \nend", "def run\n puts \"Asked to run w/o breakpoint\"\n Breakpoints.main_thread = ::Thread.current\n\n @thread ||= ::Thread.new do\n @block_to_run.call\n end\n end" ]
[ "0.70992064", "0.70992064", "0.70462257", "0.6973938", "0.6843728", "0.67711735", "0.6705492", "0.6687097", "0.6685909", "0.6606101", "0.660264", "0.6529717", "0.6526353", "0.65163046", "0.6489503", "0.64863855", "0.6451361", "0.6406846", "0.6395413", "0.6382068", "0.63734394", "0.6368307", "0.63666415", "0.6360789", "0.6316107", "0.6316107", "0.6316107", "0.6316107", "0.6316107", "0.6316107", "0.6316107", "0.6316107", "0.6316107", "0.6316107", "0.6316107", "0.6316107", "0.6316107", "0.6316107", "0.6316107", "0.6316107", "0.6316107", "0.6316107", "0.6316107", "0.6316107", "0.6304053", "0.62638915", "0.6238016", "0.62134033", "0.6207019", "0.62015516", "0.61886525", "0.6178744", "0.6137566", "0.61306465", "0.61306465", "0.61306465", "0.61296165", "0.6127078", "0.61263", "0.61263", "0.6124215", "0.6124215", "0.6124215", "0.6124215", "0.6124215", "0.6124215", "0.6124215", "0.611848", "0.61068416", "0.6096326", "0.60924953", "0.60842323", "0.6076409", "0.6075828", "0.60596156", "0.60467744", "0.60418254", "0.6039269", "0.60371596", "0.6036983", "0.6031381", "0.6028831", "0.60265446", "0.60224813", "0.601394", "0.60002613", "0.60002613", "0.5985694", "0.5976652", "0.5964284", "0.59600824", "0.59453994", "0.5922382", "0.59211165", "0.5916848", "0.59161735", "0.5905517", "0.5873608", "0.586964", "0.586964", "0.5858841" ]
0.0
-1
Block calling thread until eventloop exits
def join() raise "Missing implementation 'join'" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run_event_loop\n while true\n event = @event_queue.pop\n @worker_pool.dispatch event\n \n if @stopped\n Thread.stop;\n return\n end\n end\n end", "def wake_event_loop!\n super\n end", "def wake_event_loop!\n super\n end", "def join\n require \"thwait\"\n ThreadsWait.new(@event_loop).join\n end", "def go\n while(true)\n process_event(wait_for_event)\n end\n end", "def wait_while\n while yield\n wait\n end\n end", "def wait_for_shutdown\n @rufus.join\n end", "def wait\n loop do sleep 1 end\n end", "def wait_while\n while yield\n\twait\n end\n end", "def wait\n @thread.join\n end", "def wait; end", "def wait; end", "def wait; end", "def run_async\n stop = false\n\n puts \"Press Crtl+C to quit...\"\n trap('INT') do\n puts \"\\nFinish...\"\n stop = true\n end\n\n while !stop\n Orocos::Async.step\n sleep(0.01)\n end\n\nend", "def wait_while\n while yield\n wait\n end\n end", "def join\n @listen_thread.join if @listen_thread\n sleep 0.5\n raise @thread_error if @thread_error\n end", "def wait_end()\n begin\n loop do\n sleep(TICK/1000.0) while (self.connected?() rescue nil)\n break\n end\n rescue Exception => e\n end\n end", "def wait\n # Here we use a loop-sleep combination instead of using\n # ThreadPoolExecutor's `wait_for_termination`. See issue #21 for more\n # information.\n loop do\n break if @executor.shutdown?\n sleep 0.1\n end\n end", "def running?; !!@thread end", "def run_loop\n end", "def wait\n\tend", "def wait_async(time)\n EM::Synchrony.sleep(time)\nend", "def initiate_event_loop!\n @active = true\n\n @thread = Thread.new do\n while @active\n @session_mutex.synchronize do\n @session.process(@loop_wait)\n end\n Thread.pass\n end\n end\n end", "def initiate_event_loop!\n @active = true\n\n @thread = Thread.new do\n while @active\n @session_mutex.synchronize do\n @session.process(@loop_wait)\n end\n Thread.pass\n end\n end\n end", "def wait_for_event\n q = @vm.eventQueue()\n while(true)\n event_set = q.remove()\n it = event_set.iterator()\n while(it.hasNext)\n event = it.next\n $DEBUG and puts(\"Received an event: #{event.java_class}\")\n @frame = AndroidDebug::Jpda::Frame.new(event.thread.frame(0), event.location)\n @this = @frame.this\n return(AndroidDebug::Jpda::Event.new(event))\n end \n end\n end", "def listenForeverBlocking\r\n unless EventMachine::reactor_running?\r\n debug \"running SINGLE THREAD!\"\r\n EM::run {\r\n EM::PeriodicTimer.new(300, proc { debug 'listener 300s ping'}) # this is actually never cancelled (yet)\r\n listenForeverLoopingOnErrorNonBlocking\r\n }\r\n debug \"listener EM done--EM.run ended\"\r\n else\r\n debug \"aww still multiple threaded [but still EM]...!\"\r\n listenForeverLoopingOnErrorNonBlocking\r\n sleep\r\n end\r\n end", "def endless_loop?; end", "def execute_in_main_thread(&block)\n EventMachine.schedule(&block)\n end", "def initiate_event_loop!\n @active = true\n\n @thread = Thread.new do\n while @active\n @session_mutex.synchronize do\n @session.process(@loop_wait)\n end\n Thread.pass\n end\n end\n end", "def wait_until\n until yield\n wait\n end\n end", "def wait_until\n until yield\n\twait\n end\n end", "def waitUntil\n until yield\n sleep 0.5\n end\nend", "def exit\n @main_loop = false\n end", "def join\n return if not running?\n @thread.join\n end", "def wait_until(ev)\n if inside_control?\n raise ThreadMismatch, \"cannot use #wait_until in execution threads\"\n end\n\n Roby.condition_variable(true) do |cv, mt|\n caller_thread = Thread.current\n # Note: no need to add the caller thread in waiting_threads,\n # since the event will become unreachable if the execution\n # thread quits\n\n mt.synchronize do\n done = false\n once do\n ev.if_unreachable(true) do |reason, event|\n mt.synchronize do\n done = true\n caller_thread.raise UnreachableEvent.new(event, reason)\n end\n end\n ev.on do |ev|\n mt.synchronize do\n done = true\n cv.broadcast\n end\n end\n yield if block_given?\n end\n\n while !done\n cv.wait(mt)\n end\n end\n end\n end", "def wait\n true\n end", "def wait_done\n sleep 0.01 until done?\n end", "def wait\n self.listener.wait if self.listener\n end", "def event_loop\n Qwirk.logger.debug \"#{self}: Starting receive loop\"\n @start_worker_time = Time.now\n until @stopped || (config.stopped? && @impl.ready_to_stop?)\n Qwirk.logger.debug \"#{self}: Waiting for read\"\n @start_read_time = Time.now\n msg = @impl.receive_message\n if msg\n @start_processing_time = Time.now\n Qwirk.logger.debug {\"#{self}: Done waiting for read in #{@start_processing_time - @start_read_time} seconds\"}\n delta = config.timer.measure do\n @processing_mutex.synchronize do\n on_message(msg)\n @impl.acknowledge_message(msg)\n end\n end\n Qwirk.logger.info {\"#{self}::on_message (#{'%.1f' % delta}ms)\"} if self.config.log_times\n Qwirk.logger.flush if Qwirk.logger.respond_to?(:flush)\n end\n end\n Qwirk.logger.info \"#{self}: Exiting\"\n rescue Exception => e\n @status = \"Terminated: #{e.message}\"\n Qwirk.logger.error \"#{self}: Exception, thread terminating: #{e.message}\\n\\t#{e.backtrace.join(\"\\n\\t\")}\"\n ensure\n @status = 'Stopped'\n Qwirk.logger.flush if Qwirk.logger.respond_to?(:flush)\n config.worker_stopped(self)\n end", "def run\r\n catch :quit do\r\n loop do\r\n # Prepare events\r\n @event_queue.fetch_sdl_events\r\n @event_queue << @clock.tick\r\n\r\n # Handle events\r\n @event_queue.each do |event|\r\n yield event if block_given?\r\n # Stop this program if the user closes the window\r\n throw :quit if event.is_a? Rubygame::Events::QuitRequested\r\n end\r\n end\r\n end\r\n end", "def flush()\n return if @loop.nil?\n @waiting_thread = Thread.current\n begin\n @loop.wakeup()\n rescue\n # ignore\n end\n while busy?\n sleep(2.0)\n end\n @waiting_thread = nil\n end", "def wait_until_running\n until @running\n ControllableThread.sleep POLL_INTERVAL\n check_key\n end\n end", "def join_async_world\n ap4r_helper.wait_all_done\n end", "def wait_for_message\r\n Fiber.yield while $game_message.busy?\r\n end", "def wait_until_ready\n # this method may be left unimplemented if that is applicable\n end", "def wait_until_exit!\n trap_signals!\n\n until @outbound_pool.wait_for_termination(60)\n debug_log Ambo.random_beep_boops\n end\n end", "def wait_until\n until yield\n wait\n end\n end", "def start_main_loop_thread\n current_generation = @main_loop_generation\n @main_loop_thread = create_thread_and_abort_on_exception do\n main_loop\n end\n @main_loop_thread_lock.synchronize do\n while @main_loop_generation == current_generation\n @main_loop_thread_cond.wait(@main_loop_thread_lock)\n end\n end\n end", "def wait_on_access\n if @server.running?\n @running_threads -= 1\n #puts \"waiting\"\n @condvar.wait(@mutex)\n #puts \"woken\"\n @running_threads += 1\n end\n end", "def wait!\n sleep(@sleep)\n end", "def blocking_thread\n poller_thread\n end", "def dispatch event\n raise Exception.new(\"workers cannot dispatch while blocked\") if blocked?\n @event = event\n @is_running = true\n \n self.resume event\n end", "def waiting_for_thread(skip_event = false)\n events << [:waiting, current_cycle_count] unless skip_event\n wait\n end", "def eventLoop\n @executor.executeQueuedTasks\n end", "def wait_async\n @wait_async = true\n end", "def wait\n # it might be awaken by some other futures!\n mutex.synchronize{ condv.wait(mutex) until done? } unless done?\n end", "def run\n Thread.start do\n begin\n while true\n main_loop\n end\n ensure\n @protocol.close if @protocol\n end\n end\n end", "def infinite_loop?; end", "def wait_until_not_full; end", "def join\n @cond.wait if not finished?\n end", "def wait_running(blocking = false)\n true\n end", "def blocking_thread\n worker_thread\n end", "def wait_for_running\n while !self.running?\n sleep Profitbricks::Config.polling_interval\n end\n end", "def wait\n @timer_thread.join\n end", "def check_again\n if @thread.alive?\n wait 20\n else\n 'done'\n end\n end", "def wait_for_agent_infinite_tracer_thread_to_close\n timeout_cap(3.0) do\n while Thread.list.select{|t| t.status == \"run\"}.size > 1\n sleep(0.01)\n end\n sleep(0.01)\n end\n end", "def waiting; end", "def waiting; end", "def wait_until_available\n return unless @locked\n\n @mutex.lock\n @mutex.unlock\n end", "def wait_and_proceed\n Thread.new do\n sleep TIMEOUT\n proceed\n end\n end", "def wait_until_ready\n # this method may be left unimplemented if that is applicable log\n end", "def wait_until_without_failing(timeout=UI_UPDATE_DELAY)\n start = Time.now\n while ((Time.now - start) <= timeout)\n break if yield\n sleep(WaitHelpers::LOOP_DELAY)\n end\nend", "def await_instruction\n # Sleep this thread. The main thread will wake us up when there is an instruction\n # to perform.\n @waiting = true\n Thread.stop\n\n # Main thread has woken us up, so execute the current instruction.\n if @instruction\n @instruction.call\n @instruction = nil\n end\n\n # Continue execution of the thread until a socket callback fires, which will\n # trigger then method again and send us back to sleep.\n end", "def start_event_thread\n return false if @event_thread\n @event_thread = Thread.new(&method(:event_thread_body))\n end", "def run\n return unless @mutex.try_lock\n until @event_queue.empty? or not @running\n if @event_queue.length > 1\n log \"#{@event_queue} commands in queue\", Logger::Medium\n end\n handle_event(@event_queue.pop)\n end\n @mutex.unlock\n end", "def await\n @thread.join\n @object\n end", "def run\n loop do\n break unless app_loop\n end\n end", "def block\n # beware of deadlocks, we can't join our own thread\n ::EM.reactor_thread.join if ::EM.reactor_thread && !::EM::reactor_thread?\n end", "def wait\n @notifier.wait if @notifier\n end", "def wait\n @notifier.wait if @notifier\n end", "def process_events\n while Gtk.events_pending?\n Gtk.main_iteration\n end\nend", "def delayed_exit\n sleep 0.1\n exit\n end", "def process_events\n while (Gtk.events_pending?)\n Gtk.main_iteration\n end\n end", "def wait_until_available\n return unless locked?\n\n @mutex.synchronize {}\n end", "def thread; end", "def thread; end", "def thread; end", "def event_machine(&block)\n if EM.reactor_running?\n # puts \"Reactor is running!\"\n block.call\n else\n # puts \"Reactor is NOT running!\"\n Thread.new {EM.run}\n EM.next_tick(block)\n end\n end", "def wait\n sleep 0.0001\n end", "def yield\n wait\n callback\n end", "def waitQuit()\n @device.waitQuit() ;\n end", "def wait_for_callback\n turnstile.wait unless paused\n end", "def wait\n if defined? @result\n return @result\n else\n @waiters << Eventlet.current\n Eventlet.sleep\n end\n end", "def dispatch_events!\n loop do\n event = @event_queue.pop\n\n callbacks.each do |callback|\n Thread.new do\n callback.call event\n end\n end\n end\n end", "def wait(timeout=60)\n countdown = timeout.to_f\n\n while countdown > 0\n if @zmq_thread and @zmq_thread.alive?\n sleep 0.1\n countdown = countdown - 0.1\n else\n break\n end\n end\n\n super()\n end", "def wait\n 0\n end", "def wait_until\n poll do\n transition! if yield\n end\n end", "def start_thread\n @thread = @options[:sync] || Thread.new do\n begin\n loop do\n @status = :waiting_for_event\n process_event\n @event_queue.wait_for_new_event\n end\n rescue => e\n CFSM.logger.fatal \"#{e.class}: #{$!}\"\n Thread.main.raise e\n end\n end\n end", "def wait(arg0)\n end", "def endless_loop\n loop { yield }\n end", "def wait_for_ready\n sleep 0.1 until ready?\n end" ]
[ "0.68293977", "0.67887175", "0.67887175", "0.67329645", "0.66352224", "0.65671515", "0.65609515", "0.65282714", "0.6523593", "0.65003353", "0.647598", "0.647598", "0.647598", "0.64626145", "0.6436229", "0.6388149", "0.6352852", "0.63288015", "0.6325552", "0.63198155", "0.63035345", "0.6302157", "0.6299237", "0.6299237", "0.62890935", "0.62886953", "0.6263728", "0.62605524", "0.62311256", "0.6230082", "0.6213834", "0.6209649", "0.6203803", "0.62033415", "0.62020206", "0.6188834", "0.6184941", "0.61705387", "0.61532414", "0.6142469", "0.6135976", "0.6133723", "0.6128216", "0.6126875", "0.6123103", "0.61107004", "0.61106426", "0.6109066", "0.6108349", "0.61060834", "0.6104635", "0.609868", "0.60879827", "0.6087645", "0.60863525", "0.6072096", "0.60707515", "0.6050772", "0.60487586", "0.6032147", "0.60270685", "0.60188997", "0.6014193", "0.60114557", "0.60066545", "0.59989035", "0.599283", "0.599283", "0.598855", "0.59795094", "0.5973574", "0.59557205", "0.5950702", "0.5946893", "0.59286755", "0.5923991", "0.59132", "0.5906736", "0.59063274", "0.59063274", "0.59058315", "0.589605", "0.5895407", "0.5890115", "0.5851449", "0.5851449", "0.5851449", "0.5851014", "0.5839272", "0.5835498", "0.58309215", "0.58287644", "0.5818235", "0.5814564", "0.5809962", "0.58080107", "0.58024544", "0.5801062", "0.5794104", "0.5790262", "0.57882243" ]
0.0
-1
Calling 'block' before stopping eventloop
def on_stop(&block) @@on_stop_proc << block end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def wake_event_loop!\n super\n end", "def wake_event_loop!\n super\n end", "def stop\n @event_loop.stop\n end", "def stop\n yield\n end", "def force_end\n stop\n end", "def stop(&block)\n yield(DONE)\n puts stop_message\n end", "def run_nonblock(&block)\n @timeout = 0\n run &block\n end", "def stop(&block)\n @stop = block\n end", "def endless_loop?; end", "def blocks() end", "def after_block_boundary?; end", "def block\n true\n end", "def block\n # beware of deadlocks, we can't join our own thread\n ::EM.reactor_thread.join if ::EM.reactor_thread && !::EM::reactor_thread?\n end", "def record_block\n @block = true\n end", "def stop\n @loop = nil\n end", "def stop\n stop_control_loop\n end", "def on_stop &block\n block_given? ? @on_stop = block : @on_stop\n end", "def stop\n EM.stop_event_loop\n end", "def run_block\n if @block\n _block = @block\n @block = nil\n instance_eval &_block\n true\n end\n end", "def stop; end", "def stop; end", "def stop; end", "def stop; end", "def stop; end", "def stop; end", "def stop; end", "def exit\n @main_loop = false\n end", "def runblock\r\n\t\t\[email protected]\r\n\t\tend", "def runblock\r\n\t\t\[email protected]\r\n\t\tend", "def will_run_block\n # when you use yield, you'll call the block\n yield\n puts 'End'\nend", "def endless_loop\n loop { yield }\n end", "def stop\n @stop_loop.call\n end", "def stop!; end", "def parentloop; end", "def run(&block); end", "def run_loop\n end", "def post_loop; end", "def stop &block\n @actions[:stop] = block\n end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def block; end", "def busy( &block )\n self.connection.status( :dnd, \"Working...\" )\n yield\n self.connection.status( :chat, \"JabberListener waiting for instructions\" )\n end", "def run(&block)\n end", "def run\r\n catch :quit do\r\n loop do\r\n # Prepare events\r\n @event_queue.fetch_sdl_events\r\n @event_queue << @clock.tick\r\n\r\n # Handle events\r\n @event_queue.each do |event|\r\n yield event if block_given?\r\n # Stop this program if the user closes the window\r\n throw :quit if event.is_a? Rubygame::Events::QuitRequested\r\n end\r\n end\r\n end\r\n end", "def delayed_exit\n sleep 0.1\n exit\n end", "def stop!\n synchronize do\n @thread.exit if @thread\n\n yield if block_given?\n end\n\n true\n end", "def onStopped(&block)\n\t\t\tif block_given?\n\t\t\t\t@on_stopped = block\n\t\t\t\tself\n\t\t\telse\n\t\t\t\t@on_stopped\n\t\t\tend\n\t\tend", "def stopping; end", "def stop\n\t\t# No-op\n\tend", "def stop\n true\n end", "def stop\n true\n end", "def stop\n true\n end", "def stop\n true\n end", "def stop_condition(&block)\n self.gracefully_stop_mark = block\n end", "def post_block\n end", "def post_block\n end", "def run(&blk)\n raise ArgumentError, \"must pass a block argument\" unless block_given?\n raise \"#{self} was killed\" if killed?\n consumer_thread\n top_loop(&blk)\n @killed = true\n end", "def stop(&blk)\n if blk\n Smith.stop(true, &blk)\n else\n Smith.stop(true)\n end\n end", "def stop()\n if @started\n @@count -= 1\n\n if @@count == 0 and Thread.current == Thread.main\n EM.stop_event_loop()\n @@thread.join\n @@thread = nil\n end\n end\n\n super()\n end", "def stop\n yield self if block_given?\n http.post('/__admin/shutdown', '')\n end", "def stop\n end", "def stop\n end", "def stop\n end", "def stop\n end", "def stop\n end", "def stop\n end", "def stop\n end", "def stop\n end", "def stop\n end", "def stop\n end", "def stop\n end", "def stop\n end", "def loop( &block )\n sanity_check\n @connection.loop(&block)\n end", "def sit\n\t\twhile connected?\n\t\t\tif block_given?\n\t\t\t\tyield\n\t\t\telse\n\t\t\t\tsleep(0.1)\n\t\t\tend\n\t\tend\n\tend", "def blocks; end", "def blocks; end", "def blocks; end", "def stopping &block\n if stopped?\n block.call\n else\n stopping_hooks << block\n end\n end", "def control_loop\n if stopping?\n unsubscribe!\n stopped!\n else\n attempt_recovery if paused?\n sleep(3)\n end\n end", "def stop\n # no-op\n end", "def run_block\n yield\nend", "def call_block\n puts 'Start'\n yield\n yield\n puts 'End'\nend", "def stop_thread; end" ]
[ "0.7116361", "0.7116361", "0.69921833", "0.6747109", "0.66986316", "0.6674539", "0.6607117", "0.6583129", "0.65828156", "0.65612423", "0.65526044", "0.6534999", "0.6524514", "0.65144956", "0.6511241", "0.64684826", "0.6467592", "0.64470565", "0.6444129", "0.6431461", "0.6431461", "0.6431461", "0.6431461", "0.6431461", "0.6431461", "0.6431461", "0.64123803", "0.64109993", "0.64109993", "0.6395163", "0.63868266", "0.63747835", "0.6364082", "0.6339815", "0.6332467", "0.63282603", "0.6311956", "0.6298366", "0.6293755", "0.6293755", "0.6293755", "0.6293755", "0.6293755", "0.6293755", "0.6293755", "0.6293755", "0.6293755", "0.6293755", "0.6293755", "0.6293755", "0.6293755", "0.6293755", "0.6293755", "0.6293755", "0.6293755", "0.6293755", "0.6293755", "0.6293755", "0.6270318", "0.6267009", "0.62649494", "0.6244925", "0.6216243", "0.6215297", "0.619139", "0.61845875", "0.6171826", "0.6171826", "0.6171826", "0.6171826", "0.6167506", "0.61573267", "0.61573267", "0.6150541", "0.61176294", "0.6108321", "0.6106987", "0.6095054", "0.6095054", "0.6095054", "0.6095054", "0.6084882", "0.6084882", "0.6084882", "0.6084882", "0.6084882", "0.6084882", "0.6084882", "0.6084882", "0.60796124", "0.60723424", "0.6068283", "0.6068283", "0.6068283", "0.60657275", "0.60621756", "0.6035078", "0.6030801", "0.60301167", "0.60110855" ]
0.6135901
74
Calling 'block' when having trapped an INT signal
def on_int_signal(&block) # trap(:INT) warn "Missing implementation 'on_int_signal'" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def handle_interrupt; end", "def on_interrupt(&block)\n trap(\"INT\") { yield \"SIGINT\" }\n trap(\"QUIT\") { yield \"SIGQUIT\" }\n trap(\"TERM\") { yield \"SIGTERM\" }\n end", "def interrupt_handler\n signal_handler(2)\n end", "def run(&block)\n raise \"#{self} cannot run; it was permanently killed.\" if @dead\n \n super do |socket, revents|\n if socket == @int_sock_rep || socket == @int_sock_pull\n key, * = socket.recv_array\n kill = key == \"KILL\"\n blocking = socket == @int_sock_rep\n \n # Call the user block of #interrupt and store the return value\n unless kill\n result = @interruptions.pop.call\n @outerruptions.push result if blocking\n end\n \n # Call the user block of #run\n block.call nil, nil if block\n \n # Send a response if the interruption was blocking\n socket.send_array [\"OKAY\"] if blocking\n \n if kill\n @int_sock_rep.close\n @int_sock_pull.close\n @dead = true\n end\n else\n block.call socket, revents if block\n end\n end.tap do |hash|\n hash.delete @int_sock_rep\n hash.delete @int_sock_pull\n end\n end", "def with_repl_like_sigint\n orig_handler = trap(\"INT\") { raise Interrupt }\n yield\n rescue Interrupt\n puts(\"^C\")\n retry\n ensure\n trap(\"INT\", orig_handler)\n end", "def trap_sigint(metabolizer, &block)\n trap 'SIGINT', proc {\n block.call if block\n metabolizer.stop\n }\nend", "def interrupt; end", "def signal\n end", "def isolate_from_interrupts\n instance.signal_received = false\n instance.isolate_signals = true\n result = yield\n instance.isolate_signals = false\n result\n end", "def interrupt!\n @mutex.synchronize do\n case @blocked\n when NOT_YET, BLOCKED\n @result = INTERRUPTED\n @cond.broadcast\n else\n return\n end\n end\n end", "def interrupt!\n @mutex.synchronize do\n case @blocked\n when NOT_YET, BLOCKED\n @result = INTERRUPTED\n @cond.broadcast\n else\n return\n end\n end\n end", "def signal; end", "def signal; end", "def register_signals\n trap(:INT) { debug \"Recieved INT\"; exit!}\n end", "def interrupt?; end", "def signal_handle\n unless @context.ignore_sigint?\n print \"\\nabort!\\n\" if @context.verbose?\n exit\n end\n\n case @signal_status\n when :IN_INPUT\n print \"^C\\n\"\n raise RubyLex::TerminateLineInput\n when :IN_EVAL\n IRB.irb_abort(self)\n when :IN_LOAD\n IRB.irb_abort(self, LoadAbort)\n when :IN_IRB\n # ignore\n else\n # ignore other cases as well\n end\n end", "def isolate_from_interrupts; end", "def handler_for signal, &block\n interrupt_handler[signal] = block\n end", "def trap(signal, &block)\n if Signal.list.include?(signal)\n Kernel.trap(signal, &block) unless Merb.disabled?(:signals)\n end\n end", "def signal(signum = nil, callback = nil, &block)\n callback ||= block\n handle = Signal.new(@loop)\n handle.progress callback if callback\n handle.start(signum) if signum\n handle\n end", "def push_interrupt(e); end", "def detect_interruption\n trap('INT') do\n interrupted!\n puts\n puts 'Hold on, let me finish this file...'\n end\n end", "def execute_INT(operand)\n\t\tperform_interrupt_for operand.value\n\tend", "def signal_received; end", "def supervise\n HANDLED_SIGNALS.each { |signal| trap_signal(signal) }\n end", "def add_sigint_handler\n trap 'INT' do\n puts '\\nCancelled. Bye Bye!'\n exit!\n end\n end", "def reenable_on_interrupt; end", "def handle_interrupt\n case @interrupt\n when :signal\n Process.kill('SIGINT', Process.pid)\n when :exit\n exit(130)\n when Proc\n @interrupt.call\n when :noop\n return\n else\n raise InputInterrupt\n end\n end", "def interrupt\n user_interrupt or true\n end", "def pop_interrupt; end", "def set_signal_trap\n finalizer = Proc.new { finalize }\n Signal.trap(:INT, finalizer)\n end", "def expect_interrupt\n\tbegin\n\t\tyield\n\trescue Interrupt\n\t\tputs # to separate the typed `^C` from upcoming program output or shell prompt\n\tend\nend", "def reenable_on_interrupt=(_arg0); end", "def signal\n @cmd_result.signal\n end", "def interrupt\n current_context.interrupt\n end", "def add_irb_trap\n Merb.trap(\"INT\") do\n if @interrupted\n Merb.logger.warn! \"Interrupt received a second time, exiting!\\n\"\n exit\n end\n\n @interrupted = true\n Merb.logger.warn! \"Interrupt a second time to quit.\"\n Kernel.sleep 1.5\n ARGV.clear # Avoid passing args to IRB\n\n if @irb.nil?\n require \"irb\"\n IRB.setup(nil)\n @irb = IRB::Irb.new(nil)\n IRB.conf[:MAIN_CONTEXT] = @irb.context\n end\n\n Merb.trap(:INT) { @irb.signal_handle }\n catch(:IRB_EXIT) { @irb.eval_input }\n\n Merb.logger.warn! \"Exiting from IRB mode back into server mode.\"\n @interrupted = false\n add_irb_trap\n end\n end", "def initialize_sighandlers\n trap(:INT) do\n stop\n exit\n end\n end", "def trap\n\t\t\t\ttask = Task.current\n\t\t\t\ttask.annotate(\"waiting for signal #{@name}\")\n\t\t\t\t\n\t\t\t\tnotification = Notification.new\n\t\t\t\t@notifications << notification\n\t\t\t\t\n\t\t\t\twhile true\n\t\t\t\t\tnotification.wait\n\t\t\t\t\tyield\n\t\t\t\tend\n\t\t\tensure\n\t\t\t\tif notification\n\t\t\t\t\tnotification.close\n\t\t\t\t\[email protected](notification)\n\t\t\t\tend\n\t\t\tend", "def trap_signals\n %w(TERM INT).each do |signal|\n trap(signal) { stop }\n end\n end", "def trap(sig)\n ::Signal.trap(sig) do\n puts\n puts \"Interrupt! Signals from an interruption are not thread-safe.\"\n \n call_prompt = true\n 3.times do\n print \"Wait for thread-safe break? (y/n): \"\n\n case gets.strip\n when /^y(es)?$/i\n puts \"waiting for break...\"\n app.pq(self, [])\n call_prompt = false\n break\n\n when /^no?$/i\n break\n end\n end\n\n if call_prompt\n call([])\n end\n end\n end", "def handle_signal( signal )\n\t\tself.log.info \"Handling %p signal.\" % [ signal ]\n\t\tcase signal\n\t\twhen :INT, :TERM, :HUP\n\t\t\tself.stop\n\t\telse\n\t\t\tsuper\n\t\tend\n\tend", "def watch\n begin\n yield\n rescue Interrupt\n puts\n exit 0\n end\n end", "def signal\n @result.signal\n end", "def signal\n MultiProcessing.try_handle_interrupt(RuntimeError => :never) do\n begin\n @waiting_pout.read_nonblock 1\n @signal_pin.syswrite 1\n return true\n rescue Errno::EAGAIN\n return false\n end\n end\n end", "def initialize\n self.isolate_signals = false\n self.signal_received = false\n self.reenable_on_interrupt = false\n\n Signal.trap('INT') do\n if isolate_signals\n self.signal_received = true\n else\n if reenable_on_interrupt\n self.reenable_on_interrupt = false\n self.isolate_signals = true\n end\n\n raise Interrupt # Allow interrupt to propagate to code\n end\n end\n end", "def signal_received=(_arg0); end", "def user_interrupt\n write 'Terminating' # XXX get rid of this\n stoploop\n end", "def signal(signum = nil)\n handle = Signal.new(@reactor)\n handle.progress &Proc.new if block_given?\n handle.start(signum) if signum\n handle\n end", "def on_int(token)\n log \"INT: '#{token}'\"\n super(token)\n end", "def signal\r\n Ragweed::Wrap32::set_event(@h)\r\n end", "def signal_queue; end", "def test_ask_sigint\n start_process do\n begin\n Prompt.ask('q')\n rescue Interrupt\n @ret.write(Marshal.dump(:SIGINT))\n end\n end\n\n sleep(0.05)\n Process.kill('INT', @pid)\n\n assert_result(\"? q\\n> \", \"^C\\n\", :SIGINT)\n end", "def _interrupt\n\t\tbegin\n\t\t\tuser_want_abort?\n\t\trescue Interrupt\n\t\t\t# The user hit ctrl-c while we were handling a ctrl-c, send a\n\t\t\t# literal ctrl-c to the shell. XXX Doesn't actually work.\n\t\t\t#$stdout.puts(\"\\n[*] interrupted interrupt, sending literal ctrl-c\\n\")\n\t\t\t#$stdout.puts(run_cmd(\"\\x03\"))\n\t\tend\n\tend", "def trap_signals\n Signal.trap('INT') do\n say \"\\nQuitting...\", :red\n Kernel.exit\n end\n end", "def sigHandler\n Signal.trap(\"INT\") { stop }\n Signal.trap(\"TERM\") { stop }\n end", "def signal\n c = nil\n @mutex.synchronize do\n c = @count\n @count += 1\n if c < 0 \n t = @sleeping.shift\n t.wakeup if t\n end\n end\n end", "def handle_interrupt\n system \"SIGINT received, starting shutdown\"\n @shutdown = true\n end", "def within_preserved_state\n lock.synchronize do\n begin\n interactor.stop if interactor\n @result = yield\n rescue Interrupt\n # Bring back Pry when the block is halted with Ctrl-C\n end\n\n interactor.start if interactor && running\n end\n\n @result\n end", "def signal_status(status)\n return yield if @signal_status == :IN_LOAD\n\n signal_status_back = @signal_status\n @signal_status = status\n begin\n yield\n ensure\n @signal_status = signal_status_back\n end\n end", "def run_interrupted; end", "def handles_interrupts?\n handles_signal?(2)\n end", "def on_term_signal(&block)\n # trap(:TERM) {}\n warn \"Missing implementation 'on_term_signal'\"\n end", "def signal\n @condition.signal\n true\n end", "def signal\n @condition.signal\n true\n end", "def interrupt!\n @interrupted = true\n end", "def test_ask_free_form_sigint\n start_process do\n Prompt.ask('q')\n rescue Interrupt\n @ret.write(Marshal.dump(:SIGINT))\n end\n\n sleep(0.05)\n Process.kill('INT', @pid)\n\n assert_result(\"? q\\n> \", \"^C\\n\", :SIGINT)\n end", "def do_with_interrupt_handling\n yield if block_given?\n rescue StandardError => e\n warn HighLine.color(\"\\nAborting, fatal #{e.class}, #{e} at #{error_call_site(e)}\", :red)\n Kernel.exit(3)\n rescue Interrupt\n warn HighLine.color(\"\\nAborting, interrupt received\", :red)\n Kernel.exit(2)\n rescue RuntimeError => e\n warn HighLine.color(\"\\nAborting, fatal unhandled error, #{e} at #{error_call_site(e)}\", :red)\n Kernel.exit(1)\n end", "def trap_signal(signal)\n trap(signal) do\n notice_signal(signal)\n (@callbacks[signal] || []).each(&:call)\n end\n end", "def interrupt(ex)\n raise ex\n end", "def doge_control_signal\r\n end", "def trap_resume\n Curses.raw\n old_cont = trap 'CONT' do Curses.doupdate end\n\n yield\n\n ensure\n Curses.noraw\n trap 'CONT', old_cont\n end", "def interrupt\n\t\t\t@interrupted = true\n\t\t\t@selector&.wakeup\n\t\tend", "def trap_signals\n # kill\n trap('INT') do\n puts('INT received')\n notify_subscribers('INT received')\n stop\n end\n\n # kill\n trap('TERM') do\n puts('TERM received')\n notify_subscribers('TERM received')\n stop\n end\n\n # graceful\n trap('QUIT') do\n puts('QUIT received')\n notify_subscribers('QUIT received')\n stop_soft\n end\n\n # reset things\n trap('HUP') do\n puts('HUP received')\n notify_subscribers('HUP received')\n reset\n end\n end", "def trap_signals\n trap(\"TERM\") do\n\tself.fatal!(\"TERM received\",1)\n end\n\n trap(\"INT\") do\n\tself.fatal!(\"Interrupt signal received.\", 2)\n end\n end", "def trap_signals\n trap(\"TERM\") do\n\tself.fatal!(\"TERM received\",1)\n end\n\n trap(\"INT\") do\n\tself.fatal!(\"Interrupt signal received.\", 2)\n end\n end", "def on_connection_interruption(&block)\n self.redefine_callback(:after_connection_interruption, &block)\n end", "def interrupt\n\t\t\t@interrupted = true\n\t\t\[email protected]\n\t\tend", "def setup_signals\n @trapped_count ||= 0\n\n stopper = proc do\n @trapped_count += 1\n stop!\n\n # Reset count after 5 seconds\n EM.add_timer(5) { @trapped_count = 0 }\n end\n\n trap('INT') do\n stopper.call\n abort \"Multiple INT signals trapped; aborting!\" if @trapped_count > 1\n end\n\n trap('TERM') { stopper.call }\n\n unless !!RUBY_PLATFORM =~ /mswin|mingw/\n trap('QUIT') { stop! }\n trap('HUP') { restart }\n end\n end", "def setup_traps\n\t\tSignal.trap(\"TERM\") { yield }\n\t\tSignal.trap(\"INT\") { yield }\n\tend", "def dont_interupt\n @interuptable = false\n @enqueued = []\n # rubocop: disable Style/GuardClause\n if block_given?\n yield\n allow_interuptions\n end\n # rubocop: enable Style/GuardClause\n end", "def on_program_interrupt(&callback)\n @on_signal << callback\n self\n end", "def signalProcessCompletion()\n $incidentCount +=1;\n puts $incidentCount;\nend", "def notice_signal(signal)\n Thread.new do\n Karafka.monitor.instrument('process.notice_signal', caller: self, signal: signal)\n end\n end", "def signal\n @monitor.mon_check_owner\n @cond.signal\n end", "def interrupt\n SQLite::API.interrupt( @handle )\n end", "def without_instrumentation\n Appsignal::Transaction.current.pause! if Appsignal::Transaction.current\n yield\n ensure\n Appsignal::Transaction.current.resume! if Appsignal::Transaction.current\n end", "def without_instrumentation\n Appsignal::Transaction.current.pause! if Appsignal::Transaction.current\n yield\n ensure\n Appsignal::Transaction.current.resume! if Appsignal::Transaction.current\n end", "def setup_traps\n ['INT', 'TERM'].each do |sig|\n old = trap(sig) do\n un_register\n @amq.instance_variable_get('@connection').close do\n EM.stop\n old.call if old.is_a? Proc\n end\n end\n end\n true\n end", "def execute_with_rescue\n yield if block_given?\n rescue Interrupt\n rescue_interrupt\n rescue => error\n log_error(error)\n end", "def execute_INTO\n\t\t# Overflow interrupt is Type 4\n\t\tperform_interrupt_for 4\n\tend", "def shutdown\n @signal_squash.call\n end", "def signal_access\n @condvar.signal unless @running_threads >= @max_threads\n end", "def signaled?(*) end", "def handle_old_mode\n disable_trap if @mode == Modes::SIGNAL\n end", "def interrupt!\n @interrupted = true\n logger.info \"Interrupted, terminating...\"\n end", "def accept_nonblock\r\n end", "def handle_signal(sig)\n case sig\n when :TERM\n handle_term_signal\n when :INT\n handle_interrupt\n when :HUP\n handle_hangup\n when *HANDLED_SIGNALS\n handle_signal_forward(sig)\n else\n system \"unhandled signal #{sig}\"\n end\n end", "def no_interrupt_if_interactive\n trap(\"INT\") {} if interactive\n end", "def interruptible?; event(:stop).controlable? end", "def fake_sig(sig) # :nodoc:\n old_cb = trap(sig, \"IGNORE\")\n old_cb.call\n ensure\n trap(sig, old_cb)\n end" ]
[ "0.6885704", "0.68752533", "0.6866606", "0.6766158", "0.6764911", "0.674545", "0.6618631", "0.6530069", "0.65250605", "0.6524573", "0.6524573", "0.65173304", "0.65173304", "0.64479595", "0.6390202", "0.6321037", "0.6320309", "0.62833625", "0.62184215", "0.62168443", "0.61884224", "0.6168848", "0.6166608", "0.61598223", "0.61390823", "0.6098163", "0.60889184", "0.6083065", "0.6057544", "0.60395503", "0.60392797", "0.60124063", "0.6006157", "0.6004547", "0.5995423", "0.5992671", "0.59801364", "0.59774476", "0.59481376", "0.5944065", "0.59349096", "0.5934745", "0.59313995", "0.5913966", "0.5900594", "0.5896535", "0.58848184", "0.58558816", "0.58550423", "0.58508563", "0.58489496", "0.58270776", "0.5818262", "0.57995284", "0.57916373", "0.5776023", "0.576667", "0.5765671", "0.5756641", "0.575128", "0.5716072", "0.571487", "0.5680035", "0.5680035", "0.5643562", "0.5627027", "0.56262267", "0.56161946", "0.5605334", "0.5604824", "0.5600253", "0.5589469", "0.5563811", "0.55340916", "0.55340916", "0.55241686", "0.550039", "0.5496241", "0.54612076", "0.54593503", "0.5440534", "0.5415287", "0.54118013", "0.5400241", "0.5389037", "0.5388741", "0.5388741", "0.537913", "0.5361753", "0.5361071", "0.53538334", "0.5324796", "0.5313765", "0.531171", "0.53104955", "0.529885", "0.529608", "0.5292791", "0.5291491", "0.52904826" ]
0.8010634
0
Calling 'block' when having trapped a TERM signal
def on_term_signal(&block) # trap(:TERM) {} warn "Missing implementation 'on_term_signal'" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def stop(&block)\n yield(DONE)\n puts stop_message\n end", "def signal; end", "def signal; end", "def signal_received; end", "def trap_sigint(metabolizer, &block)\n trap 'SIGINT', proc {\n block.call if block\n metabolizer.stop\n }\nend", "def user_interrupt\n write 'Terminating' # XXX get rid of this\n stoploop\n end", "def on_interrupt(&block)\n trap(\"INT\") { yield \"SIGINT\" }\n trap(\"QUIT\") { yield \"SIGQUIT\" }\n trap(\"TERM\") { yield \"SIGTERM\" }\n end", "def stop\n sig :TERM\n end", "def interrupt?; end", "def signal_queue; end", "def signal\n end", "def shutdown\n @signal_squash.call\n end", "def signal_stop\n @stop_requested = true\n end", "def on_finish &block\n signal_connect \"finish\" do \n block.call()\n end\n end", "def trap_signals\n Signal.trap(\"TERM\") { stop! }\n Signal.trap(\"INT\") { manual_stop! }\n end", "def trap_signals\n Signal.trap('INT') do\n say \"\\nQuitting...\", :red\n Kernel.exit\n end\n end", "def interrupt; end", "def trap_signals\n [:INT, :QUIT, :TERM].each do |sig|\n trap(sig) do\n ProcessManager::Log.info \"#{description}: Received #{sig} - setting internal shutting down flag and possibly finishing last run\"\n stop_thread = Thread.new {stop}\n stop_thread.join\n end\n end\n # make sure we do not handle children like the master process\n trap(:CHLD, 'DEFAULT')\n end", "def handle_interrupt; end", "def trap(signal, &block)\n if Signal.list.include?(signal)\n Kernel.trap(signal, &block) unless Merb.disabled?(:signals)\n end\n end", "def do_later\n at_exit{ yield }\n end", "def at_exit(&block); end", "def trap_signals\n %w(TERM INT).each do |signal|\n trap(signal) { stop }\n end\n end", "def trap\n\t\t\t\ttask = Task.current\n\t\t\t\ttask.annotate(\"waiting for signal #{@name}\")\n\t\t\t\t\n\t\t\t\tnotification = Notification.new\n\t\t\t\t@notifications << notification\n\t\t\t\t\n\t\t\t\twhile true\n\t\t\t\t\tnotification.wait\n\t\t\t\t\tyield\n\t\t\t\tend\n\t\t\tensure\n\t\t\t\tif notification\n\t\t\t\t\tnotification.close\n\t\t\t\t\[email protected](notification)\n\t\t\t\tend\n\t\t\tend", "def on_bad_exit(&block)\n @bad_exit_block = block\n end", "def stop\n\t\tC.glyr_signal_exit(to_native)\n\tend", "def handle_term_signal\n system \"SIGTERM received, starting shutdown\"\n @shutdown = true\n end", "def trap(sig)\n ::Signal.trap(sig) do\n puts\n puts \"Interrupt! Signals from an interruption are not thread-safe.\"\n \n call_prompt = true\n 3.times do\n print \"Wait for thread-safe break? (y/n): \"\n\n case gets.strip\n when /^y(es)?$/i\n puts \"waiting for break...\"\n app.pq(self, [])\n call_prompt = false\n break\n\n when /^no?$/i\n break\n end\n end\n\n if call_prompt\n call([])\n end\n end\n end", "def force_end\n stop\n end", "def sigHandler\n Signal.trap(\"INT\") { stop }\n Signal.trap(\"TERM\") { stop }\n end", "def on_finish &block\n signal_connect \"finish\" do\n block.call\n end\n end", "def signal\n @cmd_result.signal\n end", "def stop_condition(&block)\n self.gracefully_stop_mark = block\n end", "def stop\n yield\n end", "def stop &block\n if block_given? then\n @stopped = true\n @suspended = false\n # Wait till something is put on the dead queue...\n # This stops us from acquiring the mutex until after @thread\n # has processed @stopped set to true.\n sig = @dead.deq\n # The cron thread should be dead now, or wrapping up (with the\n # acquired mutex)... \n @mutex.synchronize {\n while @thread.alive?\n sleep 0.2\n end\n block.call(self)\n }\n end\n end", "def watch\n begin\n yield\n rescue Interrupt\n puts\n exit 0\n end\n end", "def delayed_exit\n sleep 0.1\n exit\n end", "def signal_handle\n unless @context.ignore_sigint?\n print \"\\nabort!\\n\" if @context.verbose?\n exit\n end\n\n case @signal_status\n when :IN_INPUT\n print \"^C\\n\"\n raise RubyLex::TerminateLineInput\n when :IN_EVAL\n IRB.irb_abort(self)\n when :IN_LOAD\n IRB.irb_abort(self, LoadAbort)\n when :IN_IRB\n # ignore\n else\n # ignore other cases as well\n end\n end", "def on_termination_signal( signo )\n\t\tself.log.warn \"Terminated (%p)\" % [ signo ]\n\t\tself.shutdown\n\tend", "def interrupt_handler\n signal_handler(2)\n end", "def disable_until_finished_or_interrupted; end", "def interruptible?; event(:stop).controlable? end", "def terminate!() end", "def stop_safely\n if EM.reactor_running?\n ::EM.add_timer(0.2) { \n ::AMQP.stop { \n ::EM.stop\n puts \"\\n\"\n }\n }\n end\n end", "def isolate_from_interrupts\n instance.signal_received = false\n instance.isolate_signals = true\n result = yield\n instance.isolate_signals = false\n result\n end", "def terminates\n event :failed, command: true, terminal: true\n interruptible\n end", "def signal_received=(_arg0); end", "def trap_signals\n # kill\n trap('INT') do\n puts('INT received')\n notify_subscribers('INT received')\n stop\n end\n\n # kill\n trap('TERM') do\n puts('TERM received')\n notify_subscribers('TERM received')\n stop\n end\n\n # graceful\n trap('QUIT') do\n puts('QUIT received')\n notify_subscribers('QUIT received')\n stop_soft\n end\n\n # reset things\n trap('HUP') do\n puts('HUP received')\n notify_subscribers('HUP received')\n reset\n end\n end", "def terminate() end", "def capture_signals\n %w[ TERM INT QUIT HUP ].each do |signal|\n Signal.trap(signal) { stop }\n end\n end", "def register_signals\n trap(\"TERM\") { self.bunny.stop }\n trap(\"INT\") { self.bunny.stop }\n end", "def halt\n @executor.shutdown\n end", "def supervise\n HANDLED_SIGNALS.each { |signal| trap_signal(signal) }\n end", "def stop(&block)\n @stop = block\n end", "def after_exit(&block)\n end", "def trap_deferred(signal)\n trap(signal) do |sig_nr|\n if @waiting_for_reaper && [:INT, :TERM].include?(signal)\n log \"Recieved #{signal}: short circuiting QUIT waitpid\"\n raise QuitNowException\n end\n if sig_queue.size < SIG_QUEUE_MAX_SIZE\n sig_queue << signal\n awaken_master\n else\n log \"ignoring SIG#{signal}, queue=#{sig_queue.inspect}\"\n end\n end\n end", "def catch_halt\n catch :halt do\n yield\n end\n end", "def shutdown!\n _shutdown 'SIGKILL' unless dead?\n end", "def terminate\n @queue << \"terminate\"\n end", "def setup_traps\n ['INT', 'TERM'].each do |sig|\n old = trap(sig) do\n un_register\n @amq.instance_variable_get('@connection').close do\n EM.stop\n old.call if old.is_a? Proc\n end\n end\n end\n true\n end", "def run_nonblock(&block)\n @timeout = 0\n run &block\n end", "def halt; end", "def halt; end", "def pop_interrupt; end", "def trap_signals\n [\"TERM\", \"INT\"].each do |signal|\n trap(signal) do\n @thin_server.stop! if @thin_server\n EM.stop\n end\n end\n end", "def run_interrupted; end", "def run(&block)\n raise \"#{self} cannot run; it was permanently killed.\" if @dead\n \n super do |socket, revents|\n if socket == @int_sock_rep || socket == @int_sock_pull\n key, * = socket.recv_array\n kill = key == \"KILL\"\n blocking = socket == @int_sock_rep\n \n # Call the user block of #interrupt and store the return value\n unless kill\n result = @interruptions.pop.call\n @outerruptions.push result if blocking\n end\n \n # Call the user block of #run\n block.call nil, nil if block\n \n # Send a response if the interruption was blocking\n socket.send_array [\"OKAY\"] if blocking\n \n if kill\n @int_sock_rep.close\n @int_sock_pull.close\n @dead = true\n end\n else\n block.call socket, revents if block\n end\n end.tap do |hash|\n hash.delete @int_sock_rep\n hash.delete @int_sock_pull\n end\n end", "def handle_signal( signal )\n\t\tself.log.info \"Handling %p signal.\" % [ signal ]\n\t\tcase signal\n\t\twhen :INT, :TERM, :HUP\n\t\t\tself.stop\n\t\telse\n\t\t\tsuper\n\t\tend\n\tend", "def setup_traps\n\t\tSignal.trap(\"TERM\") { yield }\n\t\tSignal.trap(\"INT\") { yield }\n\tend", "def expect_interrupt\n\tbegin\n\t\tyield\n\trescue Interrupt\n\t\tputs # to separate the typed `^C` from upcoming program output or shell prompt\n\tend\nend", "def trap_resume\n Curses.raw\n old_cont = trap 'CONT' do Curses.doupdate end\n\n yield\n\n ensure\n Curses.noraw\n trap 'CONT', old_cont\n end", "def terminate\n wrap_chain(:stop) do\n if (EventMachine.reactor_running?)\n EventMachine.stop_event_loop\n end\n\n @state = :terminated\n end\n end", "def trap_signals\n trap(\"TERM\") do\n\tself.fatal!(\"TERM received\",1)\n end\n\n trap(\"INT\") do\n\tself.fatal!(\"Interrupt signal received.\", 2)\n end\n end", "def trap_signals\n trap(\"TERM\") do\n\tself.fatal!(\"TERM received\",1)\n end\n\n trap(\"INT\") do\n\tself.fatal!(\"Interrupt signal received.\", 2)\n end\n end", "def close\r\n # Apparently works as expected even without a mutex\r\n # (which can't be used inside a method invoked by Signal.trap).\r\n @done = true\r\n @thd.join unless @thd.nil?\r\n @serial.close unless @serial.nil?\r\n end", "def stop!; end", "def terminate\n @state = :terminating\n cancel_timer\n true\n end", "def stop\n return if done?\n _kill 15 # never negative!\n @stdout_handler.stop\n @stderr_handler.stop\n sleep 0.05\n @pid == waitpid\n close\n end", "def stop(&blk)\n if blk\n Smith.stop(true, &blk)\n else\n Smith.stop(true)\n end\n end", "def wakeup() end", "def trap_exit_signal(signal)\n trap(signal) {\n Serv::Initializer.stop!\n }\n end", "def unbind(reason)\n super\n @queue.each {|job| job.fiber.resume(:kill) }\n end", "def add_sigint_handler\n trap 'INT' do\n puts '\\nCancelled. Bye Bye!'\n exit!\n end\n end", "def signal_wait_until(pr, &block)\n #NOTE: busy waiting!!!\n while true do\n torrent = yield\n break if pr.call torrent\n end\n end", "def initialize_sighandlers\n trap(:INT) do\n stop\n exit\n end\n end", "def c_quit()\n \tputs \"closing mucs\"\n\tcb = proc { \n\t\tsleep 5\n\t\treturn 1\n\t}\n#\[email protected] {|chan,mucobj| }\n#\tEventMachine::defer (cb, proc {|r| on_quit(r)})\n\tend", "def handle_old_mode\n disable_trap if @mode == Modes::SIGNAL\n end", "def set_signal_trap\n finalizer = Proc.new { finalize }\n Signal.trap(:INT, finalizer)\n end", "def after_block_boundary?; end", "def stop; end", "def stop; end", "def stop; end", "def stop; end", "def stop; end", "def stop; end", "def stop; end", "def request_termination\n @sigterm_timer.cancel if @sigterm_timer\n ::Process.kill('TERM', @pid) rescue nil\n end", "def halt\n\t\tself.shutting_down = true\n\t\tself.consumer.channel.close\n\tend", "def run_async\n stop = false\n\n puts \"Press Crtl+C to quit...\"\n trap('INT') do\n puts \"\\nFinish...\"\n stop = true\n end\n\n while !stop\n Orocos::Async.step\n sleep(0.01)\n end\n\nend", "def signaled?(*) end" ]
[ "0.6559758", "0.65104586", "0.65104586", "0.63744813", "0.6368255", "0.63094765", "0.62805223", "0.6260285", "0.6246381", "0.6223103", "0.62135446", "0.6194549", "0.6158756", "0.61425453", "0.6135079", "0.6112899", "0.6101866", "0.60911554", "0.60883284", "0.60726094", "0.6055065", "0.6050916", "0.60384405", "0.6036926", "0.6027699", "0.6002289", "0.5988657", "0.5971712", "0.5970988", "0.5969589", "0.595791", "0.59519726", "0.59431595", "0.5935311", "0.59251964", "0.5921418", "0.5917164", "0.59136343", "0.5909944", "0.58865863", "0.58697194", "0.5853349", "0.58529615", "0.58455", "0.58304614", "0.5826905", "0.58086896", "0.5807721", "0.5801178", "0.5793666", "0.5760496", "0.5758592", "0.5748395", "0.57468075", "0.57384574", "0.5734326", "0.5734154", "0.5728948", "0.57258636", "0.57198185", "0.57108045", "0.57043904", "0.57043904", "0.5695739", "0.5690288", "0.5689486", "0.5687459", "0.56872416", "0.5686564", "0.568288", "0.5680315", "0.56801534", "0.56738156", "0.56738156", "0.5662728", "0.5649332", "0.5647447", "0.56422836", "0.5637825", "0.5634418", "0.56338066", "0.5633727", "0.5632966", "0.5632935", "0.56223476", "0.5615951", "0.561414", "0.5607922", "0.56043655", "0.5604005", "0.5604005", "0.5604005", "0.5604005", "0.5604005", "0.5604005", "0.5604005", "0.560329", "0.5595002", "0.55908227", "0.55839247" ]
0.71361977
0
Use callbacks to share common setup or constraints between actions.
def set_lessor @lessor = Lessor.find(params[:id]) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_required_actions\n # TODO: check what fields change to asign required fields\n end", "def action_hook; end", "def run_actions; end", "def define_action_hook; end", "def actions; end", "def define_action_helpers\n if super && action == :save\n @instance_helper_module.class_eval do\n define_method(:valid?) do |*args|\n self.class.state_machines.fire_event_attributes(self, :save, false) { super(*args) }\n end\n end\n end\n end", "def add_actions; end", "def callbacks; end", "def callbacks; end", "def setup *actions, &proc\n (@setup_procs ||= []) << [proc, actions.size > 0 ? actions : [:*]]\n end", "def define_action_helpers; end", "def post_setup\n end", "def action_methods; end", "def action_methods; end", "def action_methods; end", "def before_setup; end", "def action_run\n end", "def execute(setup)\n @action.call(setup)\n end", "def define_action_helpers?; end", "def set_actions\n actions :all\n end", "def action_done(action)\n dispatch = { :migrate => :done_migrating, :map => :done_mapping, :reduce =>\n :done_reducing, :finalize => :done_finalizing } \n self.send dispatch[action[:action]], action\n end", "def dependencies action, &block\n @actions.each do |other|\n if action[:requires].include? other[:provide]\n block.call other\n end\n end\n end", "def setup!\n return unless @setup_procs\n http_actions = actions\n @setup_procs.each do |setup_proc|\n proc, actions = setup_proc\n @setup__actions = actions.map do |action|\n\n action.is_a?(Regexp) ?\n http_actions.select { |a| a.to_s =~ action } :\n action.is_a?(String) && action =~ /\\A\\./ ?\n http_actions.map { |a| a.to_s << action if format?(a).include?(action) }.compact :\n action\n\n end.flatten\n self.class_exec &proc\n @setup__actions = nil\n end\n @setup_procs = nil\n end", "def before_actions(*logic)\n self.before_actions = logic\n end", "def setup_handler\n end", "def set_action(opts)\n opts = check_params(opts,[:actions])\n super(opts)\n end", "def setup(action)\n @targets.clear\n unless action.item.target_filters.empty?\n @targets = SES::TargetManager.make_targets(action)\n else\n item = action.item\n if item.for_opponent?\n @targets = $game_troop.alive_members\n elsif item.for_dead_friend?\n @targets = $game_party.battle_members.select { |actor| actor.dead? }\n else\n $game_party.battle_members.select { |actor| actor.alive? }\n end\n end\n @item_max = @targets.size\n create_contents\n refresh\n show\n activate\n end", "def action; end", "def action; end", "def action; end", "def action; end", "def action; end", "def workflow\n end", "def revisable_shared_setup(args, block)\n class << self\n attr_accessor :revisable_options\n end\n options = args.extract_options!\n self.revisable_options = Options.new(options, &block)\n \n self.send(:include, Common)\n self.send(:extend, Validations) unless self.revisable_options.no_validation_scoping?\n self.send(:include, WithoutScope::QuotedColumnConditions)\n end", "def setup\n @action = SampleActionAndroid.new(os_name: 'android',\n app_name: APP_PATH)\n end", "def before(action)\n invoke_callbacks *self.class.send(action).before\n end", "def process_action(...)\n send_action(...)\n end", "def before_dispatch(env); end", "def after_actions(*logic)\n self.after_actions = logic\n end", "def setup\n # override and do something appropriate\n end", "def setup(client)\n return unless @setup\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n actions.each do |action|\n action.execute(client)\n end\n self\n end", "def setup(_context)\n end", "def setup(resources) ; end", "def validate_actions\n errors.add(:base, :should_give_at_least_one_action) if !manage? && !forecasting? && !read? && !api?\n end", "def setup\n @resource_config = {\n :callbacks => {\n :before_create => nil,\n :after_create => nil,\n :before_update => nil,\n :after_update => nil,\n :before_destroy => nil,\n :after_destroy => nil,\n },\n :child_assoc => nil,\n :model => nil,\n :parent => nil,\n :path => nil,\n :permission => {},\n :properties => {},\n :relation => {\n :create => nil,\n :delete => nil,\n },\n :roles => nil,\n }\n end", "def determine_valid_action\n\n end", "def process_shared\n handle_taxes\n handle_shippings\n create_adjustments_from_params\n handle_status\n handle_inventory_refunds\n handle_payment_transactions\n order.updater.update\n end", "def startcompany(action)\n @done = true\n action.setup\n end", "def init_actions\n am = action_manager()\n am.add_action(Action.new(\"&Disable selection\") { @selection_mode = :none; unbind_key(32); bind_key(32, :scroll_forward); } )\n am.add_action(Action.new(\"&Edit Toggle\") { @edit_toggle = !@edit_toggle; $status_message.value = \"Edit toggle is #{@edit_toggle}\" })\n end", "def event_callbacks(event, metadata={})\n case event\n when :reset, :review\n if confirmed\n update_attributes(confirmed: false)\n end\n when :confirm\n confirm\n # trigger :order for all applicable items\n # NOTE: :order event is common to both physical and digital items\n items.each do |i|\n if i.event_permitted(:order)\n user_id = last_transition.user_id\n i.trigger!(:order, { order_id: id, user_id: user_id })\n end\n end\n when :complete_work\n request = metadata[:request]\n work_complete_notification(request)\n when :close\n close\n end\n if event != :close && !open\n reopen\n end\n end", "def setup_action\n return unless PONY::ERRNO::check_sequence(current_act)\n new_sequence = @action_sequence[@sequence_index+1...@action_sequence.size]\n @sequence_index = 0\n new_sequence = DND::SkillSequence::ACTS[@acts[1]] + new_sequence\n execute_sequence\n end", "def define_tasks\n define_weave_task\n connect_common_tasks\n end", "def setup(&block)\n define_method(:setup, &block)\n end", "def setup\n transition_to(:setup)\n end", "def setup\n transition_to(:setup)\n end", "def action\n end", "def setup( *args )\n\t\t\tself.class.setupBlocks.each {|sblock|\n\t\t\t\tdebugMsg \"Calling setup block method #{sblock}\"\n\t\t\t\tself.send( sblock )\n\t\t\t}\n\t\t\tsuper( *args )\n\t\tend", "def config(action, *args); end", "def setup\n @setup_proc.call(self) if @setup_proc\n end", "def before_action \n end", "def setup_callbacks\n defined_callbacks.each do |meth|\n unless respond_to?(\"call_#{meth}_callbacks\".to_sym)\n self.class.module_eval <<-EOE\n def call_#{meth}_callbacks(*args)\n plugin_store.each {|a| a.call_#{meth}_callbacks(*args) } if respond_to?(:plugin_store) && plugin_store\n self.send :#{meth}, *args if respond_to?(:#{meth})\n end\n EOE\n end\n end\n end", "def action\n end", "def matt_custom_action_begin(label); end", "def setup\n # override this if needed\n end", "def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend", "def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend", "def action(options,&callback)\n new_action = Action===options ? options : Action.new(options,&callback)\n # replace any with (shared name/alias or both default) + same arity\n @actions.delete_if do |existing_action|\n ((existing_action.names & new_action.names).size > 0 ||\n existing_action.default? && new_action.default?) &&\n existing_action.required.size == new_action.required.size &&\n existing_action.optional.size <= new_action.optional.size\n end\n @actions = (@actions + [new_action]).sort\n new_action\n end", "def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action\n end", "def after(action)\n invoke_callbacks *options_for(action).after\n end", "def pre_task\n end", "def setup(server)\n server.on('beforeMethod', method(:before_method), 10)\n end", "def add_actions\n attribute = machine.attribute\n name = self.name\n \n owner_class.class_eval do\n define_method(name) {self.class.state_machines[attribute].events[name].fire(self)}\n define_method(\"#{name}!\") {self.class.state_machines[attribute].events[name].fire!(self)}\n define_method(\"can_#{name}?\") {self.class.state_machines[attribute].events[name].can_fire?(self)}\n end\n end", "def init_actions\n @select_action = SelectAction.new\n @endpoint_mouse_action = EndpointMouseAction.new\n @move_action = MoveAction.new\n end", "def setup_signals; end", "def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend", "def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend", "def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action.respond_to?('weak!') ? action.weak! : action\n end", "def initialize(*args)\n super\n @action = :set\nend", "def after_set_callback; end", "def setup\n #implement in subclass;\n end", "def lookup_action; end", "def setup &block\n if block_given?\n @setup = block\n else\n @setup.call\n end\n end", "def setup_action\n return TSBS.error(@acts[0], 1, @used_sequence) if @acts.size < 2\n actions = TSBS::AnimLoop[@acts[1]]\n if actions.nil?\n show_action_error(@acts[1])\n end\n @sequence_stack.push(@acts[1])\n @used_sequence = @acts[1]\n actions.each do |acts|\n @acts = acts\n execute_sequence\n break if @break_action\n end\n @sequence_stack.pop\n @used_sequence = @sequence_stack[-1]\n end", "def release_actions; end", "def around_hooks; end", "def save_action; end", "def setup(easy)\n super\n easy.customrequest = @verb\n end", "def action_target()\n \n end", "def setup\n callback(:setup) do\n notify(:setup)\n migration_check.last_deployed_commit\n end\n end", "def setup\n return unless @setup\n\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n run_actions_and_retry(actions)\n self\n end", "def before_setup\n # do nothing by default\n end", "def my_actions(options)\n @setup = false\n get_template_part(\"custom_used\",\"action_users\",true)\n end", "def default_action; end", "def setup(&blk)\n @setup_block = blk\n end", "def callback_phase\n super\n end", "def advice\n end", "def _handle_action_missing(*args); end", "def duas1(action)\n action.call\n action.call\nend", "def shared_action(name, &block)\n @controller.shared_actions[name] = block\n end", "def before_action action, &block\n @audience[:before][action] ||= Set.new\n @audience[:before][action] << block\n end", "def setup_initial_state\n\n state_a = State.new(\"a\", 0)\n state_b = State.new(\"b\", 0)\n state_c = State.new(\"c\", 10)\n\n move_to_b = Action.new(\"move_to_b\", 1, state_b)\n\n move_to_c = Action.new(\"move_to_c\", 1, state_c)\n\n state_a.actions = [move_to_b, move_to_c]\n\n return state_a\n \nend" ]
[ "0.6163163", "0.6045976", "0.5946146", "0.591683", "0.5890051", "0.58349305", "0.5776858", "0.5703237", "0.5703237", "0.5652805", "0.5621621", "0.54210985", "0.5411113", "0.5411113", "0.5411113", "0.5391541", "0.53794575", "0.5357573", "0.53402257", "0.53394014", "0.53321576", "0.53124547", "0.529654", "0.5296262", "0.52952296", "0.52600986", "0.52442724", "0.52385926", "0.52385926", "0.52385926", "0.52385926", "0.52385926", "0.5232394", "0.523231", "0.5227454", "0.52226824", "0.52201617", "0.5212327", "0.52079266", "0.52050185", "0.51754695", "0.51726824", "0.51710224", "0.5166172", "0.5159343", "0.51578903", "0.51522785", "0.5152022", "0.51518047", "0.51456624", "0.51398855", "0.5133759", "0.5112076", "0.5111866", "0.5111866", "0.5110294", "0.5106169", "0.509231", "0.50873137", "0.5081088", "0.508059", "0.50677156", "0.50562143", "0.5050554", "0.50474834", "0.50474834", "0.5036181", "0.5026331", "0.5022976", "0.5015441", "0.50121695", "0.5000944", "0.5000019", "0.4996878", "0.4989888", "0.4989888", "0.49864885", "0.49797225", "0.49785787", "0.4976161", "0.49683493", "0.4965126", "0.4958034", "0.49559742", "0.4954353", "0.49535993", "0.4952725", "0.49467874", "0.49423352", "0.49325448", "0.49282882", "0.49269363", "0.49269104", "0.49252945", "0.4923091", "0.49194667", "0.49174926", "0.49173003", "0.49171105", "0.4915879", "0.49155936" ]
0.0
-1
Only allow a trusted parameter "white list" through.
def lessor_params params.require(:lessor).permit(:name, :street_address, :secondary_address, :city, :region, :postal_code) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def allowed_params\n ALLOWED_PARAMS\n end", "def expected_permitted_parameter_names; end", "def param_whitelist\n [:role, :title]\n end", "def default_param_whitelist\n [\"mode\"]\n end", "def permitir_parametros\n \t\tparams.permit!\n \tend", "def permitted_params\n []\n end", "def strong_params\n params.require(:user).permit(param_whitelist)\n end", "def strong_params\n params.require(:listing_member).permit(param_whitelist)\n end", "def filtered_parameters; end", "def permitted_strong_parameters\n :all #or an array of parameters, example: [:name, :email]\n end", "def parameters_list_params\n params.require(:parameters_list).permit(:name, :description, :is_user_specific)\n end", "def parameter_params\n params.require(:parameter).permit(:name, :description, :param_code, :param_value, :active_from, :active_to)\n end", "def param_whitelist\n whitelist = [\n :description,\n :progress,\n :kpi_id\n ]\n \n unless action_name === 'create'\n whitelist.delete(:kpi_id)\n end\n \n whitelist\n end", "def param_whitelist\n whitelist = [\n :username, :name,\n :parent_id,\n :headline, :description, :video,\n :policy, :signup_mode, :category,\n :website, :facebook, :twitter, :linkedin,\n :founded_at,\n privacy: [\n :events,\n :resources\n ],\n permission: [\n :profile,\n :members,\n :children,\n :statistics,\n :posts,\n :listings,\n :resources,\n :events\n ],\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n \n if action_name === 'update'\n whitelist.delete(:parent_id)\n unless current_user.role_in(@community) === 'owner'\n whitelist.delete(:privacy)\n whitelist.delete(:permission)\n end\n end\n \n whitelist\n end", "def param_whitelist\n [:rating, :review]\n end", "def valid_params?; end", "def permitted_params\n declared(params, include_missing: false)\n end", "def permitted_params\n declared(params, include_missing: false)\n end", "def get_params\n\t\treturn ActionController::Parameters.new(self.attributes).permit(\"account_id\", \"title\", \"category\", \"introduction\", \"tags\", \"segment_type\", \"visible\", \"status\", \"main_image\")\n\tend", "def filter_parameters; end", "def filter_parameters; end", "def strong_params\n params.require(:team_member).permit(param_whitelist)\n end", "def strong_params\n params.require(:community).permit(param_whitelist)\n end", "def check_params; true; end", "def valid_params_request?; end", "def strong_params\n params.require(:experience).permit(param_whitelist)\n end", "def allowed_params\n params.require(:user).permit(:username, :email, :password, :password_confirmation)\n end", "def list_params\n params.permit(:name)\n end", "def check_params\n true\n end", "def grant_params\n @whitelisted = params.require(:grant).permit(:name, :description, :agency_id, :acronym)\n end", "def safe_params\n resurce_name = self.class.resource_name\n params_method_name = \"#{resurce_name}_params\".to_sym\n if params[resurce_name]\n if respond_to?(params_method_name) || private_methods.include?(params_method_name)\n send(params_method_name)\n else\n raise ActiveModel::ForbiddenAttributesError, \"Please, define the '#{params_method_name}' method in #{self.class.name}\"\n end\n end\n end", "def additional_permitted_params\n []\n end", "def strong_params\n params.require(:education).permit(param_whitelist)\n end", "def resource_params\n params[resource_singular_name].try(:permit, self.class.param_whitelist)\n end", "def allow_params_authentication!; end", "def param_whitelist\n [\n :title,\n :description,\n :organization,\n :team_id,\n :started_at,\n :finished_at,\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n end", "def param_whitelist\n if @user.present? && current_user != @user\n return [:followed]\n end\n \n whitelist = [\n :username, :email, :password,\n :first_name, :last_name,\n :birthday, :gender,\n :headline, :biography, :ask_about, :focus,\n :website, :facebook, :linkedin, :twitter, :github,\n roles: [],\n skills: [],\n interests: [],\n privacy: { contact: [] },\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n \n if action_name === 'update'\n whitelist.delete(:email)\n whitelist.delete(:password)\n end\n \n whitelist\n end", "def person_params\n # params whitelist does *not* include admin, sub, remember_token\n # TBD: share this whitelist with the list used by configuration_permitted_parameters\n # TBD: should current_password be on this list? -- for now, leaving off, since it seems to work without\n # NOTE: do not include 'admin' in this list!\n params.require(:person).permit(\n :name, \n :email, \n :description,\n :password, \n :password_confirmation\n )\n end", "def paramunold_params\n params.require(:paramunold).permit!\n end", "def param_params\n params.require(:param).permit(:param_category_id, :param_table_id, :name, :english_name, :weighting, :description)\n end", "def quote_params\n params.permit!\n end", "def list_params\n params.permit(:list_name)\n end", "def allowed_params(parameters)\n parameters.select do |name, values|\n values.location != \"path\"\n end\n end", "def all_params; end", "def permitted_resource_params\n params[resource.object_name].present? ? params.require(resource.object_name).permit! : ActionController::Parameters.new\n end", "def source_params\n params.require(:source).permit(all_allowed_params)\n end", "def user_params\n end", "def params; end", "def params; end", "def params; end", "def params; end", "def params; end", "def params; end", "def params; end", "def params; end", "def params; end", "def params; end", "def params; end", "def params; end", "def params; end", "def params; end", "def params; end", "def params; end", "def params; end", "def get_allowed_parameters\n return _get_specific_action_config(:allowed_action_parameters, :allowed_parameters)&.map(&:to_s)\n end", "def permitted_params\n @wfd_edit_parameters\n end", "def user_params\r\n end", "def param_whitelist\n whitelist = [\n :comment,\n :old_progress, :new_progress,\n :metric_id\n ]\n \n unless action_name === 'create'\n whitelist.delete(:metric_id)\n end\n \n whitelist\n end", "def query_param\n\t\tparams.permit(:first_name, :last_name, :phone)\n\tend", "def whitelisted_user_params\n params.require(:user).\n permit( :first_name, :last_name, :email,:password,:password_confirmation,:birthday,:gender)\n end", "def filter_params\n\t\treturn params[:candidate].permit(:name_for_filter)\n\tend", "def user_params\n params.permit(:id, :email, :password, :nickname, :status, :avatar, :flat_picture, :flatsharing_id, :member,\n :user, :color, :solde)\n end", "def get_params\n\t\t\n\t\treturn ActionController::Parameters.new(self.attributes).permit(:first_name, :last_name, :email, :provider)\n\n\tend", "def devise_filter\r\n logger.debug(\"In devise_filter =>PARAMS: #{params.inspect}\")\r\n\r\n # White list for sign_up\r\n devise_parameter_sanitizer.for(:sign_up) { |u| u.permit(user_whitelist) }\r\n\r\n # White list for account update\r\n devise_parameter_sanitizer.for(:account_update) { |u| u.permit(user_whitelist, :current_password) }\r\n\r\n # White list for Invitation creation\r\n devise_parameter_sanitizer.for(:invite) { |u| u.permit(:account_type, :email, :invitation_token)}\r\n\r\n # White list for accept invitation\r\n devise_parameter_sanitizer.for(:accept_invitation) { |u| u.permit(user_whitelist, :invitation_token)}\r\n\r\n end", "def valid_params(params)\n params.permit(:user_id, :photo_id, :originX, :originY, :width, :height)\n end", "def valid_parameters\n sort_symbols(@interface.allowed_parameters)\n end", "def params_permit\n params.permit(:id)\n end", "def allowed_params\n params.require(:allowed).permit(:email)\n end", "def allowed_params\n params.require(:user).permit(:email, :password, :role, :first_name, :last_name, :password_confirmation)\n end", "def allowed_params\n params.require(:user).permit(:email, :password, :role, :first_name, :last_name, :password_confirmation)\n end", "def filter_params\n params.permit(*resource_filter_permitted_params)\n end", "def community_params\n params.permit(:profile_image, :name, :description, :privacy_type, :viewed_by, {tags: []}, {features: []}, {admins: []}, :members, :location, :beacon, :creator, :ambassadors, :current_events, :past_events, :feed, :category, :address, :allow_member_post_to_feed, :allow_member_post_to_events)\n end", "def specialty_params\n\t\tparams.require(:specialty).permit(*Specialty::DEFAULT_ACCESSIBLE_ATTRIBUTES)\n\tend", "def authorize_params\n super.tap do |params|\n %w[display scope auth_type].each do |v|\n if request.params[v]\n params[v.to_sym] = request.params[v]\n end\n end\n end\n end", "def feature_params_filter\n params.require(:feature).permit(:name, :cat, :lower, :upper, :opts, :category, :description, :company, :active, :unit, :icon)\n end", "def available_activity_params\n # params.require(:available_activity).permit(:type,:geometry,:properties)\n whitelisted = ActionController::Parameters.new({\n type: params.require(:available_activity)[:type],\n geometry: params.require(:available_activity)[:geometry].try(:permit!).to_h,\n properties: params.require(:available_activity)[:properties].try(:permit!).to_h\n }).try(:permit!)\n end", "def argument_params\n params.require(:argument).permit(:name)\n end", "def user_params_pub\n\t \tparams[:user].permit(:hruid)\n\t end", "def strong_params\n params.require(:success_metric).permit(param_whitelist)\n end", "def property_params\n params.permit(:name, :is_available, :is_approved, :owner_id)\n end", "def restricted_params\n #params.require(self.controller_name.classify.underscore.to_sym).permit([])\n raise(\"No strong params set, override restricted_params method in your controller. E.g. params.require(:model).permit(:attribute1, :attribute2)\")\n end", "def sponsor_params\n params.require(:sponsor).permit(WHITE_LIST)\n end", "def whitelist_person_params\n params.require(:person).permit(:family, :pre_title, :given_name, :dates, :post_title, :epithet, :dates_of_office, same_as: [], related_authority: [], altlabel: [], note: []) # Note - arrays need to go at the end or an error occurs!\n end", "def parameters\n nil\n end", "def user_params \n \tparams.require(:user).permit(:name, :email, :password, :password_confirmation)# preventing CSTR\n end", "def sequence_param_whitelist\n default_param_whitelist << \"show_index\"\n end", "def resource_filter_permitted_params\n raise(NotImplementedError, 'resource_filter_permitted_params method not implemented')\n end", "def normal_params\n reject{|param, val| param_definitions[param][:internal] }\n end", "def validate_search_inputs\n @whitelisted = params.fetch(:user, nil)\n if @whitelisted.blank?\n render_error(400, \"#{I18n.t('general_error.params_missing_key')}\": [I18n.t('general_error.params_missing_value', model: \"review\")])\n return\n else\n @whitelisted = @whitelisted.permit(:name, :uen, :description)\n end\n end", "def special_device_list_params\n params.require(:special_device_list).permit(:name)\n end", "def pull_request_params\n whitelist = [\n :url,\n :id,\n :html_url,\n :diff_url,\n :patch_url,\n :issue_url,\n :number,\n :state,\n :locked,\n :title\n ]\n params.require(:pull_request).permit(whitelist)\n end" ]
[ "0.7121987", "0.70541996", "0.69483954", "0.6902367", "0.6733912", "0.6717838", "0.6687021", "0.6676254", "0.66612333", "0.6555296", "0.6527056", "0.6456324", "0.6450841", "0.6450127", "0.6447226", "0.6434961", "0.64121825", "0.64121825", "0.63913447", "0.63804525", "0.63804525", "0.6373396", "0.6360051", "0.6355191", "0.62856233", "0.627813", "0.62451434", "0.6228103", "0.6224965", "0.6222941", "0.6210244", "0.62077755", "0.61762565", "0.61711127", "0.6168448", "0.6160164", "0.61446255", "0.6134175", "0.6120522", "0.6106709", "0.60981655", "0.6076113", "0.60534036", "0.60410434", "0.6034582", "0.6029977", "0.6019861", "0.6019158", "0.6019158", "0.6019158", "0.6019158", "0.6019158", "0.6019158", "0.6019158", "0.6019158", "0.6019158", "0.6019158", "0.6019158", "0.6019158", "0.6019158", "0.6019158", "0.6019158", "0.6019158", "0.6019158", "0.60184896", "0.60157263", "0.6005857", "0.6003803", "0.60012573", "0.59955895", "0.5994598", "0.5993604", "0.5983824", "0.5983166", "0.5977431", "0.597591", "0.5968824", "0.5965953", "0.59647584", "0.59647584", "0.59566855", "0.59506303", "0.5950375", "0.59485626", "0.59440875", "0.5930872", "0.5930206", "0.5925668", "0.59235454", "0.5917905", "0.59164816", "0.5913821", "0.59128743", "0.5906617", "0.59053683", "0.59052664", "0.5901591", "0.58987755", "0.5897456", "0.58970183", "0.58942604" ]
0.0
-1
Return list of jobs.
def jobs @jobs || {} end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def jobs\n self.ListJobs.first.map { |j| map_job(j) }\n end", "def all_jobs\n\n find_jobs()\n end", "def jobs(opts = {})\n api(\n @client.list_jobs(\n @project_id,\n deep_symbolize_keys(opts)\n )\n )\n end", "def jobs\n command('.jobs').split(\"\\n\")\n end", "def jobs(opts = {})\n api(api_method: @bq.jobs.list,\n parameters: opts)\n end", "def get_jobs_list(status = :all, page = 1, reload = false)\n Bitmovin::Job.list(status, page, reload)\n end", "def as_jobs\n @jobs ||= find_latest_completed_jobs\n end", "def list_jobs(json_payload={})\n conn = @client.get do |req|\n req.url '/api/v2/job/list?'\n req.headers[\"Authorization\"] = @token\n req.params = json_payload\n end\n conn.body\n end", "def list_jobs(args)\n Morpheus::Cli::BackupJobsCommand.new.list(args)\n end", "def find_jobs(params:)\n response = HTTParty.get(\"#{@host}/api/jobs\", query: params)\n\n return response[\"jobs\"] \n end", "def jobs\n return nil unless @rufus\n return @saved_job_list if !running\n\n @rufus.jobs.collect do |job|\n {\n :job => job,\n :job_spec_id => job.handler.job_spec_id,\n :job_spec_name => job.handler.job_spec_name,\n :launched_job => job.handler.launched_job,\n :running => job.running?,\n :last_time => job.last_time,\n :next_time => job.next_time,\n :opts => job.opts,\n :scheduled_at => job.scheduled_at,\n :unscheduled_at => job.unscheduled_at,\n :id => job.id,\n :tags => job.tags,\n :last_work_time => job.last_work_time,\n :mean_work_time => job.mean_work_time\n }\n end\n end", "def listjobs(project=self.project)\n get('listjobs.json', project: project).reject{|k,v| k=='status'}\n end", "def list_jobs\n jobs = if unsafe_params[:editable]\n Job.editable_by(@context).accessible_by_private\n else\n Job.accessible_by(@context)\n end\n\n if unsafe_params[:scopes].present?\n check_scope!\n jobs = jobs.where(scope: unsafe_params[:scopes])\n end\n\n if unsafe_params[:space_uid].present?\n jobs = jobs.terminal\n end\n\n result = jobs.eager_load(user: :org).order(id: :desc).map do |job|\n describe_for_api(job, unsafe_params[:describe])\n end\n\n render json: result\n end", "def getCurrentJobs\n getJobs('0/')\n end", "def jobs\n\t\t# ...\n\tend", "def index\n @jobs = Job.all\n end", "def jobs\r\n @parser.jobs\r\n end", "def jobs\n doc = Nokogiri::XML open(@url)\n\n doc.search('//job').map { |node|\n Job.new(attributes_from(node))\n }\n end", "def jobs\n @jobs ||= Beaneater::Jobs.new(self)\n end", "def list_jobs\n @glue_client.list_jobs\n rescue Aws::Glue::Errors::GlueException => e\n @logger.error(\"Glue could not list jobs: \\n#{e.message}\")\n raise\n end", "def jobs_without_template\n []\n end", "def jobs\r\n end", "def print_jobs_list(opts = {})\n data, _status_code, _headers = print_jobs_list_with_http_info(opts)\n data\n end", "def jobs\n JobPolicy::Scope.new(self, Job).resolve\n end", "def index\n jobs_index(Job.all)\n end", "def jobs\n raise NotImplementedError\n end", "def active_jobs\n result = Array.new\n self.jobs.each do |j|\n if j.private?\n result << j\n elsif j.active?\n result << j\n end\n end\n result\n end", "def index\n @jobs = Job.with_hires(nil).all\n end", "def job_items\n job_arguments(1)\n end", "def get_monitors\n return @jobs\n end", "def jobs(&block)\n @jobs_array = (@jobs_array || []) << block\n end", "def all\n Array(@@job_scheduler_instance)\n end", "def get_jobs_by_client_id(client_id)\n jobs = []\n for job in get_data(\"jobs\")\n if client_id == job[\"client_id\"]\n jobs.push(job)\n end\n end\n\n return jobs\n end", "def list_jobs(project_id, opts = {})\n data, _status_code, _headers = list_jobs_with_http_info(project_id, opts)\n return data\n end", "def index\n @jobs = Job.all\n end", "def index\n @jobs = Job.all\n end", "def index\n @jobs = Job.all\n end", "def index\n @jobs = Job.all\n end", "def index\n @jobs = Job.all\n end", "def index\n @jobs = Job.all\n end", "def list\n puts \"\\n\\n#{Job.count} job postings found\\n\\n\"\n Job.list.map do |job|\n puts \"#{job.id}. #{job.firstline}\"\n end\n puts menu\n end", "def index\n @jobs = PeriodicJob.list params[:page], current_user.row_limit\n end", "def search(*args)\n\t\t\targs = args[0] if args.is_a?(Array) && args.count == 1\n\n\t \tcb_response = self.api_get(Cb.configuration.uri_job_search, :query => args)\n\t \tjson_hash = JSON.parse(cb_response.response.body)\n\n\t \tpopulate_from json_hash, \"ResponseJobSearch\"\n\n\t \tjobs = []\n\t \tjson_hash[\"ResponseJobSearch\"][\"Results\"][\"JobSearchResult\"].each do |cur_job|\n\t \t\tjobs << CbJob.new(cur_job)\n\t \tend\n\n\t \treturn jobs\n\t\tend", "def jobs\n ApplicationJob.descendants\nend", "def jobs(params={})\n @radius = params[:radius] if params[:radius]\n @zipcode = params[:zipcode] if params[:zipcode]\n return @jobs if (@jobs && !params[:force])\n page = params[:page] || 1\n per_page = params[:per_page] || 100\n @jobs = get_jobs(page,per_page)\n end", "def public_api_get_jobs(modified, api_key, opts = {})\n data, _status_code, _headers = public_api_get_jobs_with_http_info(modified, api_key, opts)\n return data\n end", "def get_jobs(url)\n result = JSON.parse(get_data(url))\n job_list = []\n result[\"jobs\"].each do |job|\n job = JenkinsJob.new job[\"name\"], job[\"color\"], job[\"url\"]\n job_list << job\n end\n job_list\nend", "def index\n @job_requests = JobRequest.all\n end", "def scheduler_agent_jobs\n jobs(tag: SCHEDULER_AGENT_TAG)\n end", "def index\n @job_managers = JobManager.where(job_id: @job.id)\n end", "def current_cron_jobs\n client.get_cron_jobs(label_selector)\n end", "def list_jobs all: nil, token: nil, max: nil, filter: nil, min_created_at: nil, max_created_at: nil,\n parent_job_id: nil\n # The list operation is considered idempotent\n min_creation_time = Convert.time_to_millis min_created_at\n max_creation_time = Convert.time_to_millis max_created_at\n execute backoff: true do\n service.list_jobs @project, all_users: all, max_results: max,\n page_token: token, projection: \"full\", state_filter: filter,\n min_creation_time: min_creation_time, max_creation_time: max_creation_time,\n parent_job_id: parent_job_id\n end\n end", "def list(status = :all, page = 1, reload = false)\n var_name = :\"@#{status}_list#{ page }\"\n\n val = instance_variable_get var_name\n\n return val if val && !reload\n\n get = Net::HTTP::Get.new \"/api/jobs/#{ page }/#{ status }\", initheaders = headers\n\n response = Bitmovin.http.request get\n\n json = prepare_response_json(response.body)\n \n value_to_set = json[:jobs].map { |p| Bitmovin::Job.new(p) }\n\n instance_variable_set var_name, value_to_set\n end", "def index\n @jobs = Job.all\n # @jobs = ScriptedClient::Job.all\n end", "def job(job_name)\n jobs job_name\n end", "def possible_jobs\n \n\n job = params[:job].titleize\n location = params[:location].titleize\n\n results = Cb.job.search({ location: location,\n keywords: job })\n jobs = results.model.jobs\n @job_data = {\n job: job,\n location: location,\n job_results: jobs\n }\n end", "def rocket_job_active_workers(server_name = nil)\n return [] if !running? || (server_name && !worker_on_server?(server_name))\n [ActiveWorker.new(worker_name, started_at, self)]\n end", "def jobs\n\t\t@jobs = Job.where(user_id: @company.user_id)\n\t\trespond_to do |format|\n\t\t\tformat.js\n\t\tend\n\tend", "def index\n @db_jobs = DbJob.all\n end", "def job_history\n history = []\n record = nil\n\n command('llist jobs').split(\"\\n\").each do |line|\n next unless line.index ': '\n key, value = line.split(': ', 2)\n key.strip!\n value.chomp!\n\n if key == 'JobId'\n record = { key => value }\n history << record\n elsif record\n record[key] = value\n end\n end\n\n history\n end", "def job(id, options = {})\n objectify get(\"/job/#{id}\", options)['joblist']['job']\n end", "def index\n @scrapper_jobs = ScrapperJob.all\n end", "def index\n @job_applications = @job_applications_active\n end", "def available_jobs\n active_jobs + eligible_jobs\n end", "def index\n @job_applications = JobApplication.all\n end", "def index\n @job_schedulers = JobScheduler.all\n end", "def get_all_scheduled_jobs(opts = {})\n data, _status_code, _headers = get_all_scheduled_jobs_with_http_info(opts)\n data\n end", "def index\n @kheer_jobs = ::KheerJob.all\n end", "def list_jobs(username, password, uuid = nil)\n jobs = get_json('jobs.json', username, password)\n puts \"\"\n jobs[\"jobs\"].each do |job|\n next if uuid && job['uuid'] != uuid\n if job['jobURL']\n job.merge!(get_json(job['jobURL'], username, password, ''))\n end\n puts summarise_job(job, 2)\n puts \"\"\n end\n del = jobs['delivered']\n puts \"#{del['jobCount']} jobs, #{del['activityCount']} activities delivered since #{del['since']}\"\nend", "def index\n @act_jobs = ActJob.all\n end", "def find_all_space_webhook_jobs(params={}, headers=default_headers)\n # Make the initial request of pages jobs\n response = find_space_webhook_jobs(params, headers)\n # Build Submissions Array\n jobs = response.content[\"webhookJobs\"]\n # if a next page token exists, keep retrieving jobs and add them to the results\n while (!response.content[\"nextPageToken\"].nil?)\n params['pageToken'] = response.content[\"nextPageToken\"]\n response = find_space_webhook_jobs(params, headers)\n # concat the jobs\n jobs.concat(response.content[\"webhookJobs\"] || [])\n end\n final_content = {\"webhookJobs\" => jobs, \"nextPageToken\" => nil }\n # Return the results\n response.content=final_content\n response.content_string=final_content.to_json\n response\n end", "def index\n @jobs = Job.page(params[:page])\n end", "def list options = {}, &block\n options.to_options!\n Bj.transaction(options) do\n options.delete :rails_env\n table.job.all(options)\n end\n end", "def index\n @pending_jobs = PendingJob.all\n end", "def index\n @jobs = Job.all\n @paginated_jobs = @jobs.paginate(:page => params[:page], :per_page => Settings.Pagination.NoOfEntriesPerPage)\n end", "def find_jobs (tag=nil)\n\n jobs = @cron_jobs.values + @non_cron_jobs.values\n jobs = jobs.select { |job| job.has_tag?(tag) } if tag\n jobs\n end", "def all(filters = self.filters)\n self.filters = filters\n data = service.list_jobs(vault.id, self.filters).body['JobList']\n load(data)\n end", "def active_job_candidate_list\n @job_candidates = JobCandidate.active_job_candidate_list(current_candidate)\n end", "def available_jobs\n @jobs = Job.all.select { |j| !j.agents.include?(current_user) && j.start_date >= Date.today }\n end", "def get_jobs\n coll = @db.collection(@coll_scheduled_jobs)\n event_job_map = {}\n jobs = coll.find.map do |entry|\n event_job_map[entry[\"eventId\"]] = entry\n entry\n end\n return jobs, event_job_map\n end", "def index\n @job_results = JobResult.all\n end", "def application_jobs\n rails_eager_load\n all_jobs = []\n all_jobs << sidekiq_jobs\n all_jobs << active_jobs if defined?(::ActiveJob)\n all_jobs = all_jobs.flatten\n all_jobs.delete_if { |klass| IGNORED_CLASSES.include?(klass.to_s) }\n end", "def job_executions(id, options = {})\n r = get(\"/job/#{id}/executions\", options)['result']['executions']\n objectify r\n end", "def get_task_list_by_job_id(job_id)\n list_tasks = []\n for task in get_data(\"tasks\")\n if job_id == task[\"job_id\"]\n list_tasks.push(task)\n end\n end\n\n return list_tasks\n end", "def index\n \t@jobs = Job.all\n \t# if i wanted to find all of my jobs\n \t# @jobs = current_user.jobs.all\n end", "def getDeadJobs\n getJobs('1/')\n end", "def workflow_run_jobs(repo, run_id, options = {})\n paginate \"#{Repository.path repo}/actions/runs/#{run_id}/jobs\", options do |data, last_response|\n data.jobs.concat last_response.data.jobs\n end\n end", "def index\n # Fetches space jobs.\n if params[:space_id]\n jobs = []\n if find_user_space\n jobs = @space.jobs.\n eager_load(:app, user: :org, analysis: :workflow).\n includes(:taggings).\n search_by_tags(params.dig(:filters, :tags)).\n order(order_from_params).page(page_from_params).per(PAGE_SIZE)\n jobs.each { |job| job.current_user = @context.user }\n\n jobs = JobService::JobsFilter.call(jobs, params[:filters])\n\n sync_jobs(jobs)\n end\n\n page_dict = pagination_dict(jobs)\n\n render json: jobs, root: \"jobs\", meta: count(page_dict[:total_count]).\n merge({ pagination: page_dict }), adapter: :json\n else\n # Fetches all user 'private' jobs.\n jobs = Job.\n editable_by(@context).\n accessible_by_private.\n eager_load(:app, user: :org, analysis: :workflow).\n includes(:taggings).\n search_by_tags(params.dig(:filters, :tags)).\n order(ORDER_GROUP_FIELDS)\n jobs = JobService::JobsFilter.call(jobs, params[:filters])\n\n render_jobs_list(jobs)\n end\n end", "def index\n @create_jobs = CreateJob.all\n end", "def render_jobs_list(jobs)\n workflow_with_jobs = []\n workflow_batch = {}\n\n jobs.each do |job|\n analysis = job&.analysis\n job.current_user = current_user\n workflow = analysis&.workflow\n slot = workflow_with_jobs.last\n\n if slot.nil? || slot[:analysis_dxid] != analysis&.dxid ||\n slot[:workflow]&.dxid != workflow&.dxid\n workflow_with_jobs << { analysis_dxid: analysis&.dxid,\n batch_id: analysis&.batch_id,\n workflow: workflow, jobs: [job] }\n fill_batch_with_workflows(workflow_batch, workflow, analysis)\n else\n slot[:jobs] << job\n end\n end\n\n workflow_with_jobs.map! do |slot|\n if slot[:workflow].nil?\n slot[:jobs].map do |job|\n job_serialized = JobSerializer.new(job)\n job_serialized.launched_on = job.analysis&.created_at || job.created_at\n job_serialized\n end\n else\n slot[:workflow].current_user = current_user\n\n workflow_serialized = WorkflowSerializer.new(slot[:workflow])\n number_workflows_in_batch(workflow_batch[slot[:batch_id]],\n workflow_serialized, slot[:analysis_dxid])\n workflow_serialized.jobs = slot[:jobs].map do |job|\n job_serialized = JobSerializer.new(job)\n\n launched_on = job.analysis&.created_at || job.created_at\n job_serialized.launched_on = launched_on\n if workflow_serialized.launched_on.nil? ||\n launched_on < workflow_serialized.launched_on\n workflow_serialized.launched_on = launched_on\n end\n\n job_serialized\n end\n\n workflow_serialized.launched_on ||= Time.current\n workflow_serialized\n end\n end.flatten!\n\n page_array = paginate_array(sort_array_by_fields(workflow_with_jobs))\n page_meta = pagination_meta(workflow_with_jobs.count)\n page_meta[:count] = page_meta.dig(:pagination, :total_count)\n\n render json: { jobs: page_array, meta: page_meta }, adapter: :json\n end", "def multiget(*jids)\n results = JSON.parse(@client.call('multiget', *jids))\n results.map do |data|\n Job.new(@client, data)\n end\n end", "def index\n @findjobs = Findjob.all\n end", "def index\n @import_jobs = ImportJob.all\n end", "def standalone(jobs)\n jobs.map! { |job| { result: job } }\n end", "def save_job_list\n @saved_job_list = jobs\n end", "def index\n @worker_jobs = WorkerJob.all.page(params[:page])\n end", "def index\n @job_groups = JobGroup.all\n end", "def get_jobs(from, to)\n\n job_info = get_job_info(from)\n total_page = job_info[:x_total_pages].to_i\n new_to = (to == nil || to < total_page) ? to : total_page\n puts \">> total page : \" + total_page.to_s\n\n jobs = []\n (from..new_to).each do |page|\n job_api = \"#{BASE_URL}/v4/projects/#{PROJECT_ID}/jobs?page=#{page}&per_page=#{PER_PAGE}\"\n puts \">>start:page:\" + page.to_s\n\n begin\n response = RestClient::Request.new(\n :method => :get,\n :url => job_api,\n :verify_ssl => false,\n :headers => {\"PRIVATE-TOKEN\" => API_TOKEN}\n ).execute\n\n if response != nil && response.code == 200\n res = JSON.parse(response.to_str)\n jobs += res\n end\n\n rescue RestClient::ExceptionWithResponse => err\n puts \"jobs error: #{err.response}\"\n end\n end\n\n jobs\nend", "def index\n @jobapplications = Jobapplication.all\n end", "def queue\n workers = %x[ps axe | grep delayed_job].split(\"delayed_job\").length\n j = { :workers => workers, :jobs => Delayed::Job.all }\n render :json=>j\n end" ]
[ "0.8126788", "0.8117135", "0.80994934", "0.8092008", "0.80488855", "0.78076774", "0.7758362", "0.7710676", "0.76651984", "0.7636592", "0.76280385", "0.7369261", "0.7353377", "0.7346987", "0.7333097", "0.72216576", "0.72106916", "0.7198069", "0.71886104", "0.71610457", "0.7153168", "0.71259713", "0.7091921", "0.7032792", "0.7022679", "0.699884", "0.6992888", "0.69166744", "0.68842995", "0.6881684", "0.6839471", "0.67855465", "0.677041", "0.6767466", "0.6766093", "0.6766093", "0.6766093", "0.6766093", "0.6766093", "0.6766093", "0.6762317", "0.67575324", "0.671674", "0.6710227", "0.6700473", "0.6694814", "0.66915286", "0.66869485", "0.66561466", "0.6644754", "0.66343904", "0.66244924", "0.66184694", "0.6618306", "0.65941447", "0.65817887", "0.6564617", "0.65585184", "0.65256333", "0.6525319", "0.652527", "0.651153", "0.6469595", "0.6467061", "0.64488685", "0.6442617", "0.64058757", "0.63953495", "0.63949144", "0.6394426", "0.6380954", "0.6367941", "0.63585985", "0.6347992", "0.6338273", "0.6336413", "0.6295305", "0.6293285", "0.6290221", "0.62790596", "0.6271462", "0.62667525", "0.62481403", "0.6225302", "0.6223414", "0.6220807", "0.6207899", "0.6195752", "0.6185296", "0.6174249", "0.6172654", "0.6169892", "0.616525", "0.61625874", "0.6159562", "0.6158012", "0.61568576", "0.61342305", "0.61282223", "0.6125244" ]
0.7759051
6
Enqueue a job into a particular queue.
def enqueue(job) @jobs ||= {} @jobs[job.key] = {} self.save end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def enqueue_to(queue, job_class, *args)\n with_error_handling(\"Unable to enqueue #{job_class} job\", args: args) do\n Ladon.q.enqueue_to(queue, job_class, *args)\n end\n end", "def enqueue(job)\n enqueue_at(job, nil)\n end", "def enqueue(job)\n sqs.send_message(\n queue_url: get_queue_url(job),\n message_body: MultiJson.dump(job.serialize)\n )\n end", "def enqueue(job_id)\n create_job_record(job_id)\n BEANSTALK_QUEUE.yput(:job_id => job_id)\n @last_queried[job_id] = Time.now\n end", "def enqueue(*args)\n enqueue_job(new_job(*args))\n end", "def enqueue(job)\n build_worker(job).schedule\n end", "def enqueue(queueKey, work_unit)\n queues[queueKey].send_message(work_unit)\n end", "def enqueue(queueKey, workUnit)\n self.queues[queueKey].send_message(workUnit)\n end", "def queue(job_id)\n redis do |conn|\n conn.lpush(key.queued, job_id)\n end\n end", "def queue(queue, klass, **options)\n check_exists\n SideJob.queue(queue, klass, options.merge({parent: self, by: \"job:#{id}\"}))\n end", "def enqueue(active_job)\n enqueue_at(active_job, nil)\n end", "def enqueue_at(job, timestamp)\n queue = queues.fetch(job.queue_name) do\n raise Error, \"Unknown queue #{job.queue_name} — did you forget to add it to the adapter whitelist?\"\n end\n serialized_job = self.class.encode_job(job)\n attributes = timestamp && { \"timestamp\" => Integer(timestamp) }\n message = queue.topic.publish(serialized_job, attributes)\n # NOTE: This is not strictly necessary, but it feels\n # consistent with how decode_message works.\n job.provider_job_id = message.message_id\n end", "def enqueue_at(time, job_class, *args)\n with_error_handling(\"Unable to enqueue #{job_class} job\", args: args) do\n Ladon.q.enqueue_at(time, job_class, *args)\n end\n end", "def enqueue_job(job, event)\n delayed_job = job.queue\n\n logger.info \"Queued #{event.event_description} as job #{delayed_job.id}\"\n\n {\n 'status' => 'ok',\n 'job_id' => delayed_job.id,\n 'queue' => delayed_job.queue,\n 'priority' => delayed_job.priority,\n 'created_at' => delayed_job.created_at,\n }\n end", "def enqueue_at(_job, _timestamp)\n raise NotImplementedError, 'This queueing backend does not support scheduling jobs.'\n end", "def enqueue_job!(job, wait_until)\n job.run_callbacks :enqueue do\n job.enqueue!\n job.save! do\n Burstflow::Worker.set(wait_until: wait_until).perform_later(workflow.id, job.id)\n end\n end\n end", "def enqueue_at(job, timestamp)\n delay = timestamp.to_i - Time.current.to_i\n sqs.send_message(\n queue_url: get_queue_url(job),\n message_body: MultiJson.dump(job.serialize),\n delay_seconds: delay,\n )\n end", "def submit(job)\n if @queue.length >= QUEUE_SIZE\n job[:callback].call(:error, 'System busy; ask again later.')\n return\n end\n\n @queue.push(job)\n end", "def enqueue_in(delay, job_class, *args)\n with_error_handling(\"Unable to enqueue #{job_class} job\", args: args) do\n Ladon.q.enqueue_in(delay, job_class, *args)\n end\n end", "def add_to_queue\n if !queue_id.blank? && cancelling?\n # cancelling items already have a valid job payload on the queue - do not add again\n return\n end\n\n payload = AnalysisJobsItem.create_action_payload(analysis_job, audio_recording)\n\n result = nil\n error = nil\n\n begin\n result = BawWorkers::Jobs::Analysis::Job.action_enqueue(payload)\n\n # the assumption here is that result is a unique identifier that we can later use to interrogate the message queue\n self.queue_id = result\n rescue StandardError => e\n # NOTE: exception used to be swallowed. We might need better error handling here later on.\n Rails.logger.error \"An error occurred when enqueuing an analysis job item: #{e}\"\n raise\n end\n\n @enqueue_results = { result: result, error: error }\n end", "def enqueue\n # We need to save before passing to perform_later b/c perform_later will need our ID.\n # For this reason, the job_id col can't have a null constraint.\n save! unless persisted?\n job = job_class.constantize.perform_later(self, **job_params)\n update!(job_id: job.job_id, provider_job_id: job.provider_job_id)\n end", "def enqueue_at_with_queue(queue, timestamp, klass, *args)\n return false unless plugin.run_before_schedule_hooks(klass, *args)\n\n if Resque.inline? || timestamp.to_i < Time.now.to_i\n # Just create the job and let resque perform it right away with\n # inline. If the class is a custom job class, call self#scheduled\n # on it. This allows you to do things like\n # Resque.enqueue_at(timestamp, CustomJobClass, :opt1 => val1).\n # Otherwise, pass off to Resque.\n if klass.respond_to?(:scheduled)\n klass.scheduled(queue, klass.to_s, *args)\n else\n Resque::Job.create(queue, klass, *args)\n end\n else\n delayed_push(timestamp, job_to_hash_with_queue(queue, klass, args))\n end\n\n plugin.run_after_schedule_hooks(klass, *args)\n end", "def enqueue!\n return nil unless persisted?\n \"#{job_type.camelize}Job\".constantize.perform_later(self)\n end", "def enqueue(job:, method:, args:, delay: nil)\n meta = { \"enqueued_at\" => Time.now.to_i }\n task = Task.new(self, generate_task_id, job, method, args, meta)\n client_push(task)\n end", "def enqueue_to(queue, klass, *args); end", "def enqueue(worker)\n jid = SecureRandom.hex(16)\n\n args = worker.class.xque_attributes.each_with_object({}) do |name, hash|\n hash[name] = worker.send(:\"#{name}\")\n end\n\n job = JSON.generate(\n jid: jid,\n class: worker.class.name,\n args: args,\n expiry: Integer(worker.class.xque_options[:expiry]),\n created_at: Time.now.utc.iso8601\n )\n\n @enqueue_script ||= <<~SCRIPT\n local queue_name, jid, job = ARGV[1], ARGV[2], ARGV[3]\n\n redis.call('hset', 'xque:jobs', jid, job)\n redis.call('lpush', 'xque:queue:' .. queue_name, jid)\n SCRIPT\n\n @redis.eval(@enqueue_script, argv: [@queue_name, jid, job])\n\n jid\n end", "def add(job)\n if @workers.empty?\n @work_queue.insert(0, job)\n else\n worker = @workers.pop()\n ask_worker(worker, job)\n end\n end", "def queue(\n queue_name,\n url: nil,\n method: nil,\n reservation_sid: nil,\n post_work_activity_sid: nil,\n **keyword_args\n )\n append(Queue.new(\n queue_name,\n url: url,\n method: method,\n reservation_sid: reservation_sid,\n post_work_activity_sid: post_work_activity_sid,\n **keyword_args\n ))\n end", "def enqueue(item)\n\t\t@queue << item\n\tend", "def enqueue(queue_name, message)\n Fiber.new do\n Henchman.enqueue(queue_name, message)\n end.resume\n end", "def enqueue\n Karafka.logger.info(\"Enqueuing #{self.class} - #{params}\")\n Karafka::Worker.perform_async(params)\n end", "def requeue\n Sidekiq.redis { |conn| conn.rpush(QueueName.expand(queue_name), job) }\n end", "def enqueue( task )\n raise Closed if ! open?\n @logger.debug { \"Enqueuing #{task}\" }\n cleanup\n spawn\n @queue.push( task )\n @logger.info \"Enqueued #{task}\"\n end", "def enqueue(element)\n @queue.insert(@queue.length, element)\n @head = @queue.first\n @tail = @queue.last\n end", "def enqueue_at(active_job, timestamp)\n good_job = GoodJob::Job.enqueue(\n active_job,\n scheduled_at: timestamp ? Time.zone.at(timestamp) : nil,\n create_with_advisory_lock: execute_inline?\n )\n\n if execute_inline?\n begin\n good_job.perform\n ensure\n good_job.advisory_unlock\n end\n end\n\n executed_locally = execute_async? && @scheduler.create_thread(queue_name: good_job.queue_name)\n Notifier.notify(queue_name: good_job.queue_name) unless executed_locally\n\n good_job\n end", "def enqueue! data\n @q << data\n self\n end", "def queue_job(priority = nil, delay = default_delay)\n args = { }\n args[:wait] = delay if delay\n args[:priority] = priority if priority\n\n enqueue(args)\n end", "def enqueue(el)\n @queue.push(el)\n end", "def enqueue_in_with_queue(queue, number_of_seconds_from_now,\n klass, *args)\n enqueue_at_with_queue(queue, Time.now + number_of_seconds_from_now,\n klass, *args)\n end", "def enqueue(node)\n #the double arrow means to PUSH the node into our q list\n @q << node\n end", "def enqueue(payload)\n @queue.publish(JSON.generate(payload), :routing_key => @queue.name)\n end", "def enqueue_task(task)\n default_queue << task\n end", "def push(*jobs)\n jobs.each do |job|\n raise unless job\n @queue.push job\n end\n end", "def enqueue(jobs, args={}, o={})\n opts = [\n o[:pri] || default_pri,\n o[:delay] || default_delay,\n o[:ttr] || default_ttr\n ]\n\n jobs = [jobs.to_s] unless jobs.is_a?(Array)\n jobs.compact!\n raise ArgumentError, \"you need at least 1 job\" if jobs.empty?\n job = jobs.first\n\n beanstalk.use(job)\n beanstalk.yput({ :args => args, :next_jobs => jobs[1..-1]}, *opts)\n rescue Beanstalk::NotConnected => e\n raise e if defined?(r)\n r = true\n error exception_message(e, \"You have a problem with beanstalkd.\\nIs it running?\")\n @@beanstalk = new_beanstalk\n retry\n end", "def enqueue(\n name,\n action: nil,\n method: nil,\n wait_url: nil,\n wait_url_method: nil,\n workflow_sid: nil,\n **keyword_args\n )\n\n enqueue = Enqueue.new(\n name,\n action: action,\n method: method,\n wait_url: wait_url,\n wait_url_method: wait_url_method,\n workflow_sid: workflow_sid,\n **keyword_args\n )\n\n yield(enqueue) if block_given?\n append(enqueue)\n end", "def enqueue(override = false)\n if self.job_id.nil? && !(self.last_fetch_at.present? && (Time.now - self.last_fetch_at) < UPDATE_TIME) && !override\n Delayed::Job.enqueue(ArtistWorker.new(self.id), 0, Time.now)\n self.job_id = Delayed::Job.last.id\n save\n end \n end", "def enqueue(args = [], at: nil, queue: nil, **options)\n options = disc_options.merge(options).tap do |opt|\n opt[:delay] = at.to_time.to_i - DateTime.now.to_time.to_i unless at.nil?\n end\n\n disque.push(\n queue || self.queue,\n Disc.serialize({\n class: self.name,\n arguments: Array(args)\n }),\n Disc.disque_timeout,\n options\n )\n end", "def enqueue(el)\n @queue << el\n el\n end", "def enqueue(element)\n # if queue is full or the element you're trying to add = nil, item cannot be added to the queue\n if full? or element.nil?\n nil\n else\n # 1 subtracted from @tail because the queue is going to be one item longer now\n @tail = @tail.pred\n # last element in the queue is set equal to the element you passed in as an argument\n @store[@tail] = element\n # returns the Queue\n self\n end\n end", "def enqueue_job\n COMMANDS.each do |command, job|\n next unless (matches = text&.match(/>\\s+(?<command>#{command})(\\s+(?<options>.*?))?\\s*$/))\n\n return job.enqueue(event_id: id, options: matches['options'])\n end\n nil\n end", "def queue_job; end", "def push_to_restriction_queue(job, location=:back)\n tracking_key = tracking_key(*job.args)\n\n case location\n when :back then Resque.redis.rpush(restriction_queue_key(tracking_key, job.queue), encode(job))\n when :front then Resque.redis.lpush(restriction_queue_key(tracking_key, job.queue), encode(job))\n else raise \"Invalid location to ConcurrentRestriction.push_to_restriction_queue\"\n end\n\n increment_queue_count(job.queue)\n update_queues_available(tracking_key, job.queue, :add)\n mark_runnable(tracking_key, false)\n end", "def addJob( job )\n\t\t\t\tThread.exclusive {\n\t\t\t\t\t@jobs << job\n\t\t\t\t\twake()\n\t\t\t\t}\n\t\t\tend", "def enqueue(element)\n if element.class.name == \"RealQueue\"\n require 'pry'\n binding.pry\n end\n @store.unshift(element)\n self\n end", "def add_thing(thing)\n enqueue(thing)\n end", "def enqueue(item)\n @todo.push(item) unless @enqueued.include?(item)\n @enqueued.add(item)\n end", "def enqueue(cmd)\n songs = select_songs_with cmd\n if songs.empty?\n failure(\"No songs found with this criteria. Sorry, nothing was enqueued.\")\n else\n playlists[:queue] << songs\n msg = \"These songs were enqueued:\\n\"\n songs.each {|s,idx| msg << \" #{s.to_s}\\n\"}\n\n success(message: msg, enqueued: songs)\n end\n end", "def enqueue(commit_id)\n job = CommitJob.new(repo_name, commit_id)\n enqueue_job(job)\n end", "def enqueue(metric)\n if @queue.length < @max_queue_size\n @queue << metric\n ensure_worker_running\n\n true\n else\n logger.warn(\n 'Queue is full, dropping events. The :max_queue_size ' \\\n 'configuration parameter can be increased to prevent this from ' \\\n 'happening.'\n )\n false\n end\n end", "def queue(&b)\n @queue << b\n end", "def enqueue(*args); end", "def enqueue(item)\n end", "def enque!(time = Time.now.utc)\n @last_enqueue_timestamp = time.to_i\n @next_enqueue_timestamp = calculate_next_enqueue_time(time).to_i\n\n save_enqueue_time_options\n\n klass_const =\n begin\n Sidekiq::Cron::Support.constantize(@klass.to_s)\n rescue NameError\n nil\n end\n\n jid =\n if klass_const\n if defined?(ActiveJob::Base) && klass_const < ActiveJob::Base\n job = enqueue_active_job(klass_const)\n job.try(:job_id)\n else\n enqueue_sidekiq_worker(klass_const)\n end\n else\n if @active_job\n Sidekiq::Client.push(active_job_message)\n else\n Sidekiq::Client.push(sidekiq_worker_message)\n end\n end\n\n save_job_history(jid)\n\n logger.debug { \"CRON JOB: enqueued #{@name}: #{@message}\" }\n end", "def enqueue_job\n BgJob.enqueue(\n WebhookJob::RecordEvent,\n {\n client_id: @user_kyc_detail.client_id,\n event_source: GlobalConstant::Event.web_source,\n event_name: GlobalConstant::Event.update_ethereum_address_name,\n event_data: {\n user_kyc_detail: @user_kyc_detail.get_hash,\n admin: @admin.get_hash\n },\n event_timestamp: Time.now.to_i\n }\n )\n\n end", "def enqueue(payload)\n end", "def add_job(job)\n @stream.add_message(job.to_message)\n job\n end", "def enqueue(data)\n @in.push(data)\n end", "def queue_job(priority = default_priority, time = default_delay.from_now, allow_duplicate = allow_duplicate_jobs?)\n if allow_duplicate || !exists?\n Delayed::Job.enqueue(self, priority: priority, queue: queue_name, run_at: time)\n end\n end", "def schedule(job)\n \n # If we can't get a lock on the @workforce then the Coordinator is most likely shutting down.\n # We want to skip creating new workers in this case.\n if @job_queue.num_waiting == 0 && @workforce.size < QueueToTheFuture.maximum_workers && @workforce.mu_try_lock\n @workforce.push Thread.new() { while job = @job_queue.shift; job.__execute__; end }\n @workforce.mu_unlock\n end\n \n @job_queue.push(job)\n \n nil\n end", "def queue\n pending_duplicate_job || Delayed::Job.enqueue(self)\n end", "def queue_job(job, ds=nil)\n return nil unless get_job(job, ds).nil?\n ds_name = (ds.nil? ? 'miga-project' : ds.name)\n say 'Queueing %s:%s' % [ds_name, job]\n vars = {\n 'PROJECT' => project.path,\n 'RUNTYPE' => runopts(:type),\n 'CORES' => ppn,\n 'MIGA' => MiGA::MiGA.root_path\n }\n vars['DATASET'] = ds.name unless ds.nil?\n log_dir = File.expand_path(\"daemon/#{job}\", project.path)\n Dir.mkdir(log_dir) unless Dir.exist? log_dir\n task_name = \"#{project.metadata[:name][0..9]}:#{job}:#{ds_name}\"\n to_run = {ds: ds, ds_name: ds_name, job: job, task_name: task_name,\n cmd: sprintf(runopts(:cmd),\n # 1: script\n MiGA::MiGA.script_path(job, miga:vars['MIGA'], project:project),\n # 2: vars\n vars.keys.map { |k| sprintf(runopts(:var), k, vars[k]) }.\n join(runopts(:varsep)),\n # 3: CPUs\n ppn,\n # 4: log file\n File.expand_path(\"#{ds_name}.log\", log_dir),\n # 5: task name\n task_name)}\n @jobs_to_run << to_run\n end", "def enqueue_at(job, precise_timestamp)\n build_worker(job).schedule(time_at: Time.at(precise_timestamp))\n end", "def enqueue(el)\n @queue.push(el)\n true\n end", "def enqueue(element)\n raise 'Queue is full' if self.full?\n\n @queue[@tail] = element\n\n # Check to see if the queue was first initialized\n if @head.nil?\n @head = 0\n end\n\n # Check to see if tail is the first element\n if @tail == @length - 1\n @tail = 0\n else\n @tail += 1\n end\n end", "def job(body)\n @queue.push(@config.get('DISQUE_QUEUE'), body, 1000)\n end", "def publish(queue_name,job)\n queue = queue(queue_name)\n queue.send_message(message_body: encode_job(job))\n end", "def enqueue(element)\n @items.push(element)\n nil\n end", "def enqueue(element)\n # check if queue is full \n raise ArgumentError, \"Queue is full\" if (@back + 1) % MAX_BUFFER == @front \n # check if queue is empty\n # empty queue\n if @front < 0\n @front = @back = 0\n # overflow to wrap around\n elsif @back == MAX_BUFFER - 1 # raise block will check for a full queue\n @back = 0\n @store[@back] = element\n # okay to just add\n else \n @back += 1\n end \n @store[@back] = element\n end", "def enqueue_job\n\n BgJob.enqueue(\n PlanEconomyJob,\n {\n client_token_id: @client_token_id,\n is_first_time_set: @is_first_time_set\n }\n )\n\n end", "def save\n # if job is invalid return false\n return false unless valid?\n\n # update next_enqueue_timestamp\n @next_enqueue_timestamp = calculate_next_enqueue_time(Time.now.utc).to_i\n\n Sidekiq.redis do |conn|\n # add to set of all jobs\n conn.sadd(self.class.jobs_key, redis_key)\n\n # add informations for this job!\n conn.hmset(redis_key, *hash_to_redis(to_hash))\n end\n\n logger.info { \"CRON JOB: add job with name: #{@name}\" }\n end", "def reinsert_job(job = nil)\r\n throw ArgumentError unless job\r\n throw ArgumentError unless job.own?\r\n lock\r\n # TODO: handle other job types later\r\n if internal_job_exists?('run', job.name)\r\n # Move to que\r\n job.disown\r\n FileUtils.mv(@dir + '/run/' + job.name, @dir + '/que/' + job.name)\r\n job.set_status(@dir + '/que/' + job.name, ST_QUEUED)\r\n else\r\n log('attemped to reinsert job that could not be found: ' + job.name)\r\n job = nil\r\n end\r\n unlock\r\n return job\r\n end", "def enqueue(data); end", "def enqueue(record)\n @queue << record.representation\n end", "def putQueue( queue_name, data)\n params = Hash.new\n params['queue_name'] = queue_name\n params['data'] = data\n return doCurl(\"put\",\"/queue\",params)\n end", "def work_queue(queue)\n if job = Riaque.reserve(queue)\n if klass = self.qualified_const_get(job.klass)\n klass.send(:perform, *job.payload)\n end\n\n job.dequeue\n end\n end", "def enqueue(queue_name, message_payload, ttl = 604800)\r\n payload = \"<?xml version=\\\"1.0\\\" encoding=\\\"utf-8\\\"?><QueueMessage><MessageText>#{message_payload}</MessageText></QueueMessage>\"\r\n execute(:post, \"#{queue_name}/messages\", { :messagettl => ttl }, { 'Content-Type' => 'application/xml', :x_ms_version => \"2009-09-19\"}, payload)\r\n end", "def enqueue_job\n BgJob.enqueue(\n NewUserRegisterJob,\n {\n client_id: @client_id,\n user_id: @user.id,\n geoip_country: nil,\n event: {\n client_id: @user.client_id,\n event_source: GlobalConstant::Event.api_source,\n event_name: GlobalConstant::Event.user_register_name,\n event_data: {\n user: @user.get_hash\n },\n event_timestamp: Time.now.to_i\n }\n }\n )\n end", "def enqueue_received_message(message,queue=received_message_queue)\n enqueue(message,queue)\n end", "def enqueue(chunk)\n raise NotImplementedError, \"Implement this method in child class\"\n end", "def enqueue(element)\n @in.push(element)\n end", "def enqueue_message(obj)\n output_queue << obj\n end", "def dequeue(job)\n @jobs = @jobs.delete(job.key)\n\n self.save\n end", "def enqueue(payload)\n @queue.publish(payload.encode, :persistent => true)\n end", "def queue(queue_name, &block)\n q = create_queue(queue_name)\n block.call(q) if block\n q\n end", "def enqueue(action)\n # add our request id for tracing purposes\n action[:messageId] = uid\n unless queue_full = @queue.length >= @max_queue_size\n ensure_worker_running\n @queue << action\n end\n !queue_full\n end", "def acknowledge_job(job)\n @redis.multi do\n @redis.hdel(key_queue_running, @worker_id)\n @redis.sadd(key_queue_processed, job)\n end\n end", "def enqueue( * )\n\t\t# No-op\n\tend", "def enqueue_outgoing_jobs(job)\n job.outgoing.each do |job_id|\n out = workflow.job(job_id)\n\n enqueue_job!(out, Time.now) if out.ready_to_start?\n end\n end", "def add_to_queue(result)\n @last_twid = @last_twid > result.id ? @last_twid : result.id\n App.log.info(\"pushing job to queue #{@queue.name}:\\n#{result.text}\\n#{\"-\"*80}\")\n @queue.push(ConversionJob.new({\n \"queue_id\" => @queue.id,\n \"twid\" => result.id,\n \"from_user_id\" => result.from_user_id,\n \"to_user_id\" => result.to_user_id,\n \"from_user\" => result.from_user,\n \"to_user\" => result.to_user,\n \"profile_image_url\" => result.profile_image_url,\n \"iso_language_code\" => result.iso_language_code,\n \"text\" => result.text,\n \"created_at\" => result.created_at\n }).to_json)\n end", "def push_pending_job (job)\n\n old = @pending_jobs.find { |j| j.job_id == job.job_id }\n @pending_jobs.delete(old) if old\n #\n # override previous job with same id\n\n if @pending_jobs.length < 1 or job.at >= @pending_jobs.last.at\n @pending_jobs << job\n return\n end\n\n for i in 0...@pending_jobs.length\n if job.at <= @pending_jobs[i].at\n @pending_jobs[i, 0] = job\n return # right place found\n end\n end\n end" ]
[ "0.8014748", "0.7826505", "0.7725549", "0.7592699", "0.7414797", "0.7373411", "0.7295768", "0.72914034", "0.7290652", "0.72016895", "0.71104693", "0.71096313", "0.7105313", "0.7100409", "0.7095674", "0.7084536", "0.7083227", "0.7054562", "0.6986887", "0.69255364", "0.6885057", "0.6833964", "0.68032205", "0.6736558", "0.66819525", "0.6654305", "0.6462708", "0.6434912", "0.64047015", "0.6385175", "0.6381089", "0.635459", "0.63456714", "0.6303535", "0.62990886", "0.62907255", "0.6269686", "0.6240563", "0.62208223", "0.62000656", "0.61979866", "0.61782295", "0.61552", "0.6142638", "0.6138444", "0.61233205", "0.6121819", "0.60999626", "0.6073419", "0.6070262", "0.60677385", "0.60605156", "0.6059163", "0.60498434", "0.6044149", "0.60416365", "0.6031856", "0.60309243", "0.60058194", "0.6004108", "0.5998466", "0.59970134", "0.5983391", "0.5982158", "0.59662944", "0.5965297", "0.595235", "0.59408593", "0.59400266", "0.5922675", "0.59164083", "0.5901653", "0.58981234", "0.58929765", "0.58902365", "0.58813304", "0.5877137", "0.58749014", "0.58748585", "0.5855886", "0.58441263", "0.5840317", "0.58392704", "0.583608", "0.58305764", "0.5830505", "0.5828942", "0.5809058", "0.58006394", "0.57911783", "0.5784321", "0.577683", "0.5771608", "0.5770983", "0.57635164", "0.5757365", "0.5753724", "0.5744482", "0.573742", "0.571749" ]
0.68385386
21
Dequeue a job from a particular queue.
def dequeue(job) @jobs = @jobs.delete(job.key) self.save end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dequeue; @queue.pop end", "def dequeue\n self.job.destroy if self.job.present?\n self.job_id = nil\n end", "def deq\n @queued = false\n nil\n end", "def dequeue\n @queue.shift\n end", "def delete_queue(queue_name)\r\n execute(:delete, queue_name, {}, {:x_ms_version => '2009-09-19'})\r\n end", "def pop\n @lock.synchronize do\n @queue.pop\n end\n end", "def delete_queue(queue_name, options = {})\n query = {}\n query[\"timeout\"] = options[:timeout].to_s if options[:timeout]\n\n uri = queue_uri(queue_name, query)\n\n call(:delete, uri, nil, {}, options)\n nil\n end", "def pop\n if @q.empty?\n raise QueueEmptyError, \"tried to pop empty queue in #{@component.inspect}\"\n end\n obj = @q.shift\n @component.dec_queue_ready_count if @q.empty?\n obj\n end", "def deq(&proc)\n loop do\n batch = @out_queue.deq\n if batch == CLOSE_BATCH\n break\n end\n proc.call(batch)\n end\n end", "def dequeue\n\t\[email protected]\n\tend", "def on_dequeue(queue_name, &block)\n raise ArgumentError, \"The provided block must accept at least one argument - #{block.inspect} accepts no arguments\" if block.arity.zero?\n @queues[q_name(queue_name)] = block\n end", "def pop_from_queue(queue_class, user_id:, namespace:)\n queue_class\n .new(user_id: user_id, namespace: namespace)\n .pop\n end", "def pop_from_queue(queue)\n prioritized?(queue) ? z_pop_from_queue(queue) : super\n end", "def dequeue\n @q.shift\n end", "def work_queue(queue)\n if job = Riaque.reserve(queue)\n if klass = self.qualified_const_get(job.klass)\n klass.send(:perform, *job.payload)\n end\n\n job.dequeue\n end\n end", "def deleteQueue( queue_id)\n params = Hash.new\n params['queue_id'] = queue_id\n return doCurl(\"delete\",\"/queue\",params)\n end", "def delete( queue_name )\n req = KJess::Request::Delete.new( :queue_name => queue_name )\n resp = send_recv( req )\n return KJess::Response::Deleted === resp\n end", "def queue(queue_name)\n @queues[queue_name]\n end", "def pop()\n @queue.shift\n end", "def pull_queue_pull\n return nil if self.pull_queue_names.length == 0\n \n puts \"pulling from queues #{self.pull_queue_names.join ', '}\"\n \n elt = nil\n elt = @redis.brpop(self.pull_queue_names, self.queue_wait_seconds) while elt == nil\n \n key = elt[0]\n val = elt[1]\n self.debug_out \"got data from pull queue #{key}\"\n Marshal.load val\n end", "def queue(queue = 'default')\n get_queue(queue)\n end", "def remove_from_queue\n Resque.remove_delayed(SchedulerJob, :id => self.id)\n end", "def pop(queue)\n job = Resque.reserve(queue)\n [job.payload_class, job.args] if job \n end", "def pop(queue)\n job = Resque.reserve(queue)\n [job.payload_class, job.args] if job \n end", "def pull_job(job_name = nil)\r\n lock\r\n job,status = internal_find_job(job_name)\r\n if job && status == ST_QUEUED\r\n # Move to run, notice use of job.name rather than job_name\r\n # .. if we are pulling a new job, it could be nil\r\n FileUtils.mv(@dir + '/que/' + job.name, @dir + '/run/' + job.name)\r\n job.set_as_active\r\n elsif job\r\n # We cannot pull a job that isn't queued\r\n log(\"cannot pull job that isn't queued: \" + job_name)\r\n job = nil\r\n end\r\n unlock\r\n return job\r\n end", "def delete_queue(queue_url)\n request({\n 'Action' => 'DeleteQueue',\n :parser => Fog::Parsers::AWS::SQS::Basic.new,\n :path => path_from_queue_url(queue_url),\n })\n end", "def remove\n @queue.pop\n end", "def queue_receive(queue, options = {})\n raise NotImplementedError\n end", "def message_from_queue\n if msg = @connection.pop\n message = msg.body.delete(\"\\n\")\n msg.finish\n message\n end\n end", "def requeue\n Sidekiq.redis { |conn| conn.rpush(QueueName.expand(queue_name), job) }\n end", "def clear_queue(queue_name)\n if queue_name.kind_of?(Class)\n queue_name = queue_name.instance_variable_get(\"@queue\")\n end\n if queue_name == :delayed\n Resque.reset_delayed_queue\n else\n Resque.redis.del(\"queue:#{queue_name}\")\n Resque.redis.del(\"resque:queue:#{queue_name}\")\n end\n end", "def pop_perform_unlocked(queue)\n item = pop(queue) or return\n\n job = Resque::Job.new(queue, item)\n if job.uniqueness.perform_locked?\n push(queue, item)\n nil\n else\n job\n end\n end", "def remove\n @queue.shift\n end", "def receive(id)\n @queues[id].pop\n end", "def remove\n @queue.shift\n end", "def dequeue\n if !empty?\n @queue.delete_at(@queue.length-1)\n end\n @tail = @queue.last\n @head = @queue.first\n end", "def remove_from_queue\n if in_queue?\n decrement_queue_positions_on_lower_items\n update_attribute queue_position_column, nil\n end\n end", "def unsubscribe(queue_name = nil)\n client.unsubscribe(\"/queue/#{queue_name || queue}\")\n end", "def remove\n @queue.shift\n end", "def dequeue\n\t\t\tself.pop\n\t\tend", "def unlock_queueing_for_queue(queue)\n Resque.data_store.everything_in_queue(queue).uniq.each do |string|\n item = Resque.decode(string)\n\n unlock_queueing(queue, item).tap { RecoveringQueue.remove(queue, item) }\n end\n end", "def pop_from_restriction_queue(tracking_key, queue)\n queue_key = restriction_queue_key(tracking_key, queue)\n str = Resque.redis.lpop(queue_key)\n post_pop_size = Resque.redis.llen(queue_key)\n\n if post_pop_size == 0\n update_queues_available(tracking_key, queue, :remove)\n clear_runnable(tracking_key, queue)\n end\n\n decrement_queue_count(queue)\n\n # increment by one to indicate that we are running\n increment_running_count(tracking_key) if str\n\n decode(str)\n end", "def pop(options = {})\n @api_adapter.pop_from_queue(self.name, options)\n end", "def queue(queue, klass, **options)\n check_exists\n SideJob.queue(queue, klass, options.merge({parent: self, by: \"job:#{id}\"}))\n end", "def dequeue\n last_el = self.queue_var[-1]\n self.queue_var = self.queue_var[0...-1]\n last_el \n end", "def remove\n if empty?\n raise \"Can't remove if queue is empty\"\n else\n @info.shift\n end\n end", "def remove\n if empty?\n raise \"Can't remove if queue is empty\"\n else\n @info.shift\n end\n end", "def remove\n if empty?\n raise \"Can't remove if queue is empty\"\n else\n @info.shift\n end\n end", "def peek( queue_name )\n get( queue_name, :peek => true )\n end", "def pop\n @queue.pop(true)\n rescue ThreadError\n if closed?\n raise ClosedQueue if @raise_exception.true?\n return nil\n else\n sleep\n retry\n end\n end", "def pop\n\t\treturn nil if @q1.empty?\n\t\[email protected] # Dequeue element from queue and return the same \n\tend", "def decrement_queue_position\n return unless in_queue?\n update_attribute queue_position_column, self.send(queue_position_column).to_i - 1\n end", "def abort( queue_name )\n get( queue_name, :abort => true )\n end", "def pop\n entry = queue.pop[:payload]\n if (entry != :queue_empty)\n Marshal.load(entry)\n else\n nil\n end\n end", "def delete_game_id_in_queue game_id, queue = nil\n current_queue = queue || craft_firebase_command(\"minesweeper/queue.json\")\n\n # getting new queue to update\n new_queue = current_queue&.reject { |queue_game_id|\n # reject chosen game\n game_id == queue_game_id\n }\n\n # update queue on server\n update_current_queue(new_queue)\nend", "def pop\n @mutex.synchronize do\n loop do\n if @queue.empty?\n @waiting.push Thread.current\n @mutex.sleep\n else\n return @queue.shift\n end\n end\n end\n end", "def getQueueBy_id( queue_id)\n params = Hash.new\n params['queue_id'] = queue_id\n return doCurl(\"get\",\"/queue/by_id\",params)\n end", "def requeue(queue, opts = {})\n queue_name = case queue\n when String, Symbol then queue\n else queue.name\n end\n\n note_state_change :requeue do\n @client.call('requeue', @client.worker_name, queue_name, @jid, @klass_name,\n JSON.dump(opts.fetch(:data, @data)),\n opts.fetch(:delay, 0),\n 'priority', opts.fetch(:priority, @priority),\n 'tags', JSON.dump(opts.fetch(:tags, @tags)),\n 'retries', opts.fetch(:retries, @original_retries),\n 'depends', JSON.dump(opts.fetch(:depends, @dependencies))\n )\n end\n end", "def z_remove_from_queue(queue, data)\n raise PipelineNotSupported if @redis.client.is_a?(Redis::Pipeline)\n\n priority = extract_priority(data)\n z_item = @redis.zrevrange(redis_key_for_queue(queue), 0, -1).find do |item|\n (priority && item.include?(data)) || \\\n (item.include?(data[/\"class\":\"[^ \"]+/]) && item.include?(data[/\"args\":.+/]))\n end or return\n @redis.zrem(redis_key_for_queue(queue), z_item)\n end", "def after_dequeue_batch(id, *args)\n remove_batched_job(id, *args)\n end", "def complete_order\n @queue.dequeue\n end", "def _put_back_on_queue(message)\n future = nil\n _redis.multi do\n _redis.rpush(@queue, message)\n future = _redis.lrem(@in_progress_queue, 1, message)\n end\n removed = future.value\n if removed !=1\n RailsPipeline.logger.error \"ERROR: Didn't remove message from in_progress queue?!!!\"\n end\n end", "def dequeue\n # if queue is empty, cannot remove anything\n if empty?\n nil\n else\n # +1 added to @tail because the queue is going to be one item shorter now\n @tail = @tail.succ\n # dequeued is set equal to the item at the front of the queue\n dequeued = @store[@head]\n # adds a nil to the \"front\" of @store (but this is actually the back of the queue)\n @store.unshift(nil)\n # removes the last item from @store (first item in the queue)\n @store.pop\n # returns the item removed from the queue\n dequeued\n end\n end", "def remove_from_queue(queue, data)\n prioritized?(queue) ? z_remove_from_queue(queue, data) : super\n end", "def pop()\n @size -= 1 \n @queue.shift\n end", "def dequeue!(user=nil)\n update_attribute(:queued, false)\n end", "def destroy\n profile = calc_profile\n cmd = \"jms-queue #{profile} remove --queue-address=#{@resource[:name]}\"\n bring_down 'JMS Queue', cmd\n end", "def dequeue\n loop do\n return nil if @stop\n message = receive_message\n if message\n if message.valid?\n return message\n else\n delete_message(message)\n end\n end\n end\n end", "def pop()\n res = @pop_queue.shift()\n return res\n end", "def peek; @queue[-1] end", "def delete\n @queue << \"delete\"\n end", "def queue(job_id)\n redis do |conn|\n conn.lpush(key.queued, job_id)\n end\n end", "def deq\r\n @mutex.synchronize {\r\n while @tasks.empty? and not @done\r\n @resource.wait(@mutex)\r\n end\r\n @tasks.pop\r\n }\r\n end", "def unschedule (job_id)\n\n @unschedule_queue << job_id\n end", "def dequeue\n # check if queue is empty \n raise ArgumentError, \"Queue is empty\" if self.empty? \n \n element = @store[@front]\n \n # check last element removed\n if (@front == @back)\n @front = @back = -1 \n elsif @front + 1 == MAX_BUFFER\n @front = 0\n # nominal case\n else\n @front += 1\n end\n\n return element\n end", "def wait_for_queue_url(queue_name)\n queue_url = nil\n until queue_url\n queue_url = @sqs.queue_url_by_name(queue_name)\n unless queue_url\n print '-'\n STDOUT.flush\n sleep 1\n end\n end\n queue_url\n end", "def remove(pid)\n @dead_queues.unshift(@pids[pid]) # keep track of queues that pid was running, put it at front of list\n @pids.delete(pid)\n procline\n end", "def process_node_queue(jid)\n if @queues[jid].empty?\n @queues.delete(jid)\n else\n @queues[jid].pop do |pair|\n Fiber.new do\n process_node(*pair)\n process_node_queue(jid)\n end.resume\n end\n end\n end", "def dequeue\n @store.pop\n end", "def dequeue\n end", "def destroy\n if is_runasdomain\n profile = \"--profile=#{@resource[:profile]}\"\n else\n profile = ''\n end\n cmd = \"jms-queue #{profile} remove --queue-address=#{@resource[:name]}\"\n bringDown \"JMS Queue\", cmd\n end", "def pop\n if @queue.empty?\n nil\n else\n tempArr = []\n while @queue.size > 1\n tempArr.push(@queue.remove)\n end\n tempItem = @queue.remove\n while !tempArr.empty?\n @queue.insert(tempArr.pop)\n end\n tempItem\n end\n end", "def [](worker)\n @queues[worker]||=Queue.new\n @queues[worker].pop(true) rescue nil\n end", "def de_queue()\n if is_empty() == true\n return false\n end\n\n @front = (@front + 1) % (@q.length)\n @length -= 1\n return true\n end", "def pop opts = {}, &blk\n @on_msg = blk\n @ack = opts[:no_ack] === false\n popper = Proc.new do\n @mq.get_queues.push(self)\n @mq.callback{\n @mq.send Protocol::Basic::Get.new({ :queue => name,\n :consumer_tag => name,\n :no_ack => true,\n :nowait => true }.merge(opts))\n }\n end\n if delay = opts.delete(:delay)\n EM.add_timer(delay, popper)\n else\n popper.call\n end\n self\n end", "def queue(queue_name, &block)\n q = create_queue(queue_name)\n block.call(q) if block\n q\n end", "def dequeue\n add(queued_requests.shift) unless queued_requests.empty?\n end", "def dequeue\n raise 'Queue is empty' if self.empty?\n\n el = @queue[@head]\n\n if @head == @length - 1\n @head = 0\n else\n @head += 1\n end\n\n if @head == @tail\n self.reset\n end\n\n return el\n end", "def pop()\n if @queue_out.size == 0 \n if @queue_in.size == 0 \n return \n end\n \n while @queue_in.size > 0 \n @queue_out.push(@queue_in.pop)\n end\n end\n \n @size -= 1\n return @queue_out.pop\n end", "def dequeue\n # raise NotImplementedError, \"Not yet implemented\"\n #check if front is empty, if yes, return nil\n if @front == -1\n return nil\n #otherwise if front IS the rear, then the queue is full\n elsif @front == @rear\n raise Error, \"Full up here\"\n #not empty\n else \n #do same thing as the enqueue\n new_front = (@front + 1) % queue_size\n temp = @store[@front]\n @store[@front] = nil\n @front = new_front\n end\n return temp\n end", "def pop()\n new_queue = Queue.new\n until @queue.size == 1\n new_queue << @queue.pop\n end\n last = @queue.pop\n @queue = new_queue\n last\n end", "def delete_pipeline_queue_item(organization, pipeline, queue, opts = {})\n delete_pipeline_queue_item_with_http_info(organization, pipeline, queue, opts)\n nil\n end", "def pop()\n if @queue_out.size == 0\n if @queue_in.size == 0\n return\n end\n\n while @queue_in.size > 0\n @queue_out.push(@queue_in.pop)\n end\n end\n\n @size -= 1\n return @queue_out.pop\n end", "def get_queue\n return @queue unless @queue.nil?\n\n Domo::Queue::Redis::JobQueue.active_queue(@redis_client, @dataset_id, @stream_id, pipeline_id)\n end", "def drain_queue(topic, &message_processor)\n QueueListener.drain(aws_client, config, topic, &message_processor)\n end", "def remove!(item)\n @queue.delete(item)\n end", "def dequeue\n @mutex.synchronize do\n # Check if the queue is empty.\n if @head == @tail # not using empty? to avoid extra synchronize block\n return nil\n end\n\n # Remove next object.\n object = @queue[@head]\n @queue[@head] = nil\n\n @head = (@head + 1) & @max_mask\n return object\n end\n end", "def pop(non_block=false)\n @mutex.synchronize{\n while true\n if @que.empty?\n raise ThreadError, \"queue empty\" if non_block\n @waiting.push Thread.current\n @mutex.sleep\n else\n return @que.pop[0]\n end\n end\n }\n end", "def de_queue\n unless is_empty\n @q[@tail] = nil\n @tail = (@tail + 1) % @size\n @openings += 1\n true\n else\n false\n end\n end", "def queue\n @queue ||= channel.queue(queue_name)\n end" ]
[ "0.69838077", "0.6737052", "0.6486255", "0.6462884", "0.6448187", "0.6438257", "0.642777", "0.64235634", "0.63131315", "0.6302172", "0.6301203", "0.62997967", "0.62590736", "0.6258003", "0.6223807", "0.6210933", "0.618593", "0.61715263", "0.6166351", "0.6150891", "0.61361855", "0.6130306", "0.6113979", "0.6113979", "0.6098155", "0.6083978", "0.6082603", "0.6071586", "0.6066751", "0.6058898", "0.6024746", "0.597893", "0.5966862", "0.5960763", "0.59469134", "0.5915387", "0.58784497", "0.58746046", "0.5845886", "0.5826386", "0.58177596", "0.58100975", "0.5806925", "0.58026266", "0.5800272", "0.5787535", "0.5787535", "0.5787535", "0.5786871", "0.578641", "0.57858324", "0.5777871", "0.5771735", "0.5769993", "0.5743573", "0.5738009", "0.5737097", "0.5736949", "0.573567", "0.5729958", "0.57286865", "0.5722566", "0.57186913", "0.56971765", "0.5692591", "0.5691689", "0.5662441", "0.5658749", "0.56533647", "0.5652501", "0.5633064", "0.56313497", "0.56219214", "0.5589897", "0.55802256", "0.5576111", "0.5575376", "0.5571964", "0.55630726", "0.5562989", "0.55549735", "0.55480134", "0.55359715", "0.5532491", "0.5512817", "0.55014974", "0.55009323", "0.5499417", "0.5498803", "0.54899216", "0.54834163", "0.54689157", "0.5465878", "0.5462416", "0.5459904", "0.54527825", "0.54520965", "0.54505694", "0.5449766", "0.54322547" ]
0.65016425
2
Returns the status of a job existing in a queue.
def include?(job) jobs.keys.include?(job.key) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_job_status(job_id)\n @dbh[:progress_bars].filter(:id => job_id).first\n end", "def status\n job = delayed_job\n return 'queued' if job_queued?(job)\n return 'working' if job_working?(job)\n return 'completed' if job_completed?(job)\n return 'failed' if job_failed?(job)\n return 'unknown'\n end", "def get_job_status(job)\n status = nil\n if job.present?\n if job.is_a?(Hash)\n job = job.stringify_keys\n actor = @registered_jobs[job['id']]\n status = actor.status\n else\n actor = @registered_jobs[job.to_i]\n status = actor.status\n end\n end\n status\n end", "def key_queue_status\n key(\"queue\", \"status\")\n end", "def job_exists? name\n\t\t\tjob = get_job name\n\t\t\treturn job[:success]\n\t\tend", "def status\n check_exists\n SideJob.redis.get \"#{redis_key}:status\"\n end", "def get_job(jobid, cluster)\n begin\n data = cluster.job_adapter.info(jobid)\n\n raise OodCore::JobAdapterError if data.native.nil?\n ActiveJobs::Jobstatusdata.new(data, cluster, true)\n\n rescue OodCore::JobAdapterError\n OpenStruct.new(name: jobid, error: \"No job details because job has already left the queue.\" , status: status_label(\"completed\") )\n rescue => e\n Rails.logger.info(\"#{e}:#{e.message}\")\n Rails.logger.info(e.backtrace.join(\"\\n\"))\n OpenStruct.new(name: jobid, error: \"No job details available.\\n\" + e.backtrace.to_s, status: status_label(\"\") )\n end\n end", "def get_status(job)\n (job.status == 'done' and !job.successful) ? 'Failed' : job.status.capitalize\n end", "def query_status(job_id)\n job = get_job_status(job_id)\n if job && (job[:job_finish] || job[:last_updated] > @last_queried[job_id])\n @last_queried[job_id] = Time.now\n job\n else\n nil\n end\n end", "def job_status(organization, jobid)\n uri = server_uri(\"/organizations/#{organization}/jobs/#{jobid}/status\")\n res_data = api_get(uri)\n @logger.debug res_data\n\n return res_data\n end", "def job_exists?(job_uuid)\n res = false\n EtcdUtils.pool.with do |conn|\n key = \"#{@workflow_base}/jobs/#{job_uuid}/\"\n res = conn.exists?(key)\n end\n res\n end", "def status\n id ? Resque::Plugins::Status::Hash.get(id) : nil\n end", "def job?(cmd)\n @job = api.query_job(cmd)\n job.wait_until_done!\n fld = job.failed?\n puts(job.error['message']) if fld\n fld\n end", "def has_job?(job_name)\n jobs.exists?(job_name)\n end", "def find_pending(job, batch)\n sql = \"Select * from #{@schema_name}.job_runs where job_id = '#{job.id}' and batch = '#{batch.to_json}' and (status = 'queued' or status = 'running' );\"\n job_run_query(sql)\n end", "def get_job_status(job_id)\n raise ArgumentError, \"job_id is required\" unless job_id.present?\n\n request_body = ''\n xml = Builder::XmlMarkup.new(:target => request_body, :indent => 1)\n\n xml.instruct!\n xml.Envelope do\n xml.Body do\n xml.GetJobStatus do\n xml.JOB_ID job_id\n end\n end\n end\n\n doc = send_xml_api_request(request_body)\n status = result_dom(doc)['JOB_STATUS'] rescue nil\n desc = result_dom(doc)['JOB_DESCRIPTION'] rescue nil\n [status, desc]\n end", "def simple_queue\n make_json_request('qstatus')\n end", "def has_pending?(job, batch)\n sql = \"Select count(*) from #{@schema_name}.job_runs where job_id = '#{job.id}' and batch = '#{batch.to_json}' and ( status = 'queued' or status = 'running' );\"\n r = exec_sql(sql)\n count = r.first[\"count\"].to_i\n count > 0\n end", "def has_job_progress?\n !job_progress_queue.empty?\n end", "def queued?\n job_queued?\n end", "def running(queue = nil)\n scope = where(state: [:queued, :received, :started]).order('jobs.id')\n scope = scope.where(queue: queue) if queue\n scope\n end", "def status(job_id:)\n login_result = login\n return login_result unless login_result.success?\n\n result = SdrClient::BackgroundJobResults.show(url: Settings.sdr_api.url, job_id: job_id)\n if result[:status] != 'complete'\n nil\n elsif result[:output][:errors].present?\n error = result[:output][:errors].first\n error_msg = error[:title]\n error_msg += \": #{error[:message]}\" if error[:message]\n Dry::Monads::Failure(error_msg)\n else\n Dry::Monads::Success(result[:output][:druid])\n end\n end", "def pending_status\n 'pending' if archive_retrieval_job_id\n end", "def exists?\n SideJob.redis.sismember 'jobs', id\n end", "def all_jobs_hve_been_processed?(queue)\n (job_count(queue) == 0) && (working_job_count(queue) <= 1)\n end", "def status\n MsgQ.status(@qid)\n end", "def queueable(queue = nil)\n scope = where(state: :created).order('jobs.id')\n scope = scope.where(queue: queue) if queue\n scope\n end", "def exists?\n queue_exists?(@name)\n end", "def get_queue_id_status(queue_id)\n response = @api.GetQueueIDStatus({\n :QueueID => queue_id,\n :LicenseKey => @license_key })\n result = response.getQueueIDStatusResult\n\n {\t:response_code \t\t\t=> result.responseCode,\n :response_text\t\t\t=> result.responseText,\n :call_answered\t\t\t=> result.callAnswered,\n :queue_id\t\t\t\t\t\t=> result.queueID,\n :try_count\t\t\t\t\t=> result.tryCount,\n :demo\t\t\t\t\t\t\t\t=> result.demo,\n #-- TODO\n # break this SOAP::Mapping::Object down\n #:digits_pressed\t\t\t=> resp.digitsPressed,\n #++\n :machine_detection\t=> result.machineDetection,\n :duration\t\t\t\t\t\t=> result.duration,\n :start_time\t\t\t\t\t=> result.startTime,\n :end_time\t\t\t\t\t\t=> result.endTime,\n :minute_rate\t\t\t\t=> result.minuteRate,\n :call_complete\t\t\t=> result.callComplete }\n end", "def get_status(do_poll = false)\n if do_poll\n stat = issue_erequest({ :command => 'status' })\n else\n stat = self.persisted_status\n end\n jobid = stat.to_i\n stat = jobid unless jobid == 0\n return stat\n end", "def queued?\n status.queued?\n end", "def federated_queue_status\n `curl -s -i -L \\\n -u #{Conn[:creds]} \\\n -H 'content-type:application/json' \\\n #{[Conn[:host_api], 'federation-links', Conn[:vhost]].join('/')} | grep -o \"running\" | wc -l`\n end", "def status\n if not(exist?)\n return :init\n end\n\n if @ppg_filename.nil?\n return :unset\n end\n\n if Global.job_queue.active?(@id)\n return :processing\n end\n\n return :processable\n end", "def get_status_job(gear, component)\n args = build_base_gear_args(gear)\n job = RemoteJob.new(component.cartridge_name, 'status', args)\n job\n end", "def check_queue\n # The interesting options hash for our new work query\n check = {\n :deliver_at => {'$lte' => Time.now.utc},\n :result => {'$exists' => false},\n :locked => {'$exists' => false}\n }\n Candygram.queue.find(check).to_a\n end", "def status\n if @pbsid.nil?\n OSC::Machete::Status.not_submitted\n else\n @torque.qstat @pbsid, host: @host\n end\n end", "def wait(jobid)\n resp = command %{wait jobid=\"#{jobid}\"}\n expr = /^JobStatus=.*\\((.)\\)$/\n if resp =~ expr\n $1\n else\n raise \"Command error:\" + \\\n \" expected #{expr.inspect},\" + \\\n \" got #{resp.inspect}\"\n end\n end", "def get_job_status id\n response = get \"http://#{@host}/loadbalancers/tenant/#{@tenant}/jobs/#{id}\"\n raise LBModelException.new \"Expected HTTP 200 but got #{response.code} instead\" unless response.code == 200\n\n JSON.parse(response)\n end", "def test_get_submission_queue_status\n skip if @smoke_test # relies on assets that don't exist in staging Rawls\n status = @fire_cloud_client.get_submission_queue_status\n assert status.any?, 'Did not receive queue status object'\n assert status['workflowCountsByStatus'].any?, 'Did not receive queue count status'\n end", "def status()\n JobRunStatus.label_from_id(self.job_run_status_id)\n end", "def exist?\n jobinfo.exist?\n end", "def status\n (@remote_job || fetch_job).merge(attributes)\n end", "def batch_job_status_url\n return if job_description.blank?\n\n job_hash = JSON.parse(job_description)\n if job_hash && job_hash['jobId']\n AwsUtil.get_batch_job_url(job_hash['jobQueue'], job_hash['jobId'])\n end\n end", "def exists?\n $data = nil\n cmd = compilecmd \"/subsystem=messaging/hornetq-server=default/jms-queue=#{@resource[:name]}:read-resource()\"\n res = executeAndGet cmd\n\n if not res[:result]\n Puppet.debug \"JMS Queue do not exists\"\n return false\n end\n $data = res[:data]\n return true\n end", "def wait_for_job\n\t\tloop {\n\t\t\tputs \":: #{self.feedback['job_desc']}: #{self.job_state}\" if ENV['LKP_VERBOSE']\n\n\t\t\tbreak if self.job_successfull_finished? or\n\t\t\t\t self.job_failed? or\n\t\t\t\t self.job_cancelled?\n\n\t\t\t# otherwise, the job is still in the queue or running; let's wait\n\t\t\tsleep 60\n\t\t}\n\n\t\treturn self.feedback['scheduled_time'] || self.feedback[\"#{self.job_state}_time\"]\n\tend", "def has_pending?\n self.class.job_run_class.has_pending?(@job, @batch)\n end", "def queued?\n attributes['status'] == 3\n end", "def pull_job(job_name = nil)\r\n lock\r\n job,status = internal_find_job(job_name)\r\n if job && status == ST_QUEUED\r\n # Move to run, notice use of job.name rather than job_name\r\n # .. if we are pulling a new job, it could be nil\r\n FileUtils.mv(@dir + '/que/' + job.name, @dir + '/run/' + job.name)\r\n job.set_as_active\r\n elsif job\r\n # We cannot pull a job that isn't queued\r\n log(\"cannot pull job that isn't queued: \" + job_name)\r\n job = nil\r\n end\r\n unlock\r\n return job\r\n end", "def rocket_job_singleton_active?\n self.class.where(:state.in => [:running, :queued], :id.ne => id).exists?\n end", "def exists?\n result = loaddata\n unless result[:result]\n Puppet.debug 'JMS Queue do not exists'\n return false\n end\n true\n end", "def send_job_status(jobname)\n\t\tputs \"* Checking status of: #{@jenkins_url}\"\n\t\tresponse = http_req(@jenkins_url)\n\t\t\n\t\tjobs = JSON.parse(response.body)[\"jobs\"]\n\t\t\n\t\tjobs.each { |j|\n\t\t\tname = j[\"name\"]\n\n\t\t\tif name.casecmp(jobname) == 0\n\t\t \twrite_serial_status(j)\n \t\t\tend\n\t\t}\n\tend", "def request(job)\n if @queue.size > @queue_max_size\n # server is busy\n update_status(job.id, name: \"BUSY\")\n else\n # cancel if there exists the session's old request already\n cancel(job) if active?(job.id)\n\n # register the request\n @jobs.add(job.id)\n @queue.push(job)\n\n # send an \"ACCEPTED\" message\n update_status(job.id, name: \"ACCEPTED\")\n end\n end", "def checkJobStatus (msg)\n\n @sqs = Aws::Sqs.new(AMAZON_ACCESS_KEY_ID, AMAZON_SECRET_ACCESS_KEY)\n\n @queue_name = PRESCHEDULING_QUEUE\n\n @queue = @sqs.queue(@queue_name, false)\n @msg = msg\n\n puts 'I just received the message:'\n puts @msg\n\n @parts = @msg.to_s.split(';')\n # toca también por punto y coma porque las urls van separadas por :\n @parts2 = @msg.to_s.split(';')\n\n if(@parts[0] == INSTALLING_APP_MSG)\n\n # ahora encuentro el job que me dicen que esta en estado instalando\n @job = Job.find(@parts[1])\n @job.status = JOBS_STATUS[:INSTALLING]\n @job.save\n @msg.delete\n\n @event = Event.new(:code => 4, :description => @msg, :event_date => DateTime.now )\n @event.execution = @job.execution\n @event.save\n\n end\n\n if(@parts[0] == RUNNING_APP_MSG)\n\n # ahora encuentro el job que me dicen que esta en estado instalando\n @job = Job.find(@parts[1])\n @job.status = JOBS_STATUS[:RUNNING]\n @job.save\n\n @hostname = @parts[2]\n @execution = @job.execution\n @execution.running_jobs = @execution.running_jobs+1\n @execution.save\n @virtual_machines = @execution.cluster.virtual_machines\n puts @hostname\n puts 'is busy'\n\n @busy_vm\n\n @virtual_machines.each do |vm|\n\n if(vm.hostname.split('.').first == @hostname)\n @busy_vm = vm\n end\n\n end\n\n if(@busy_vm != nil)\n @busy_vm.is_busy = true\n @busy_vm.save\n end\n puts 'vm saved'\n\n @msg.delete\n\n @event = Event.new(:code => 5, :description => @msg, :event_date => DateTime.now)\n @event.execution = @execution\n @event.save\n end\n\n if(@parts[0] == UPLOADING_OUTPUTS_MSG)\n\n # ahora encuentro el job que me dicen que esta en estado instalando\n @job = Job.find(@parts[1])\n @job.status = JOBS_STATUS[:UPLOADING_OUTPUTS]\n @job.save\n @msg.delete\n\n @event = Event.new(:code => 6, :description => [email protected]_s, :event_date => DateTime.now)\n @event.execution = @job.execution\n @event.save\n end\n\n if(@parts2[0] == REGISTER_FILE_MSG)\n\n puts 'registering file'\n\n # ahora encuentro el job que me dicen que toca registrarle el output\n @job = Job.find(@parts2[1])\n # creo un nuevo cloud file para representar el archivo\n @cloud_file = CloudFile.new\n @file_url =@parts2[2]\n puts @file_url\n\n @directories = Directory.where('name=?',@job.id.to_s)\n\n # es que ya existe\n if(@directories.size == 1)\n @directory = @directories[0]\n else\n @parent_directory = @job.directory\n #primero creo un directorio donde guardar la salida, el id es el nombre del job\n @directory = Directory.new\n @directory.user = @job.user\n @directory.name = @job.id.to_s\n @directory.parent_id = @parent_directory.id\n @directory.save\n end\n\n\n\n\n\n @file_url_parts = @file_url.split('/')\n @file_name = @file_url_parts.last\n\n @cloud_file.name = @file_name\n @cloud_file.directory = @directory\n @cloud_file.user = @job.user\n\n @url = @file_url_parts[4] + '/'+ @file_url_parts[5]+ '/' + @file_url_parts[6]\n @cloud_file.url = @url\n @cloud_file.avatar = @file_name\n\n #TODO Esto se debe cambiar OJO.\n @cloud_file.size = get_object_size_s3 (@file_url_parts[3]+\"/\"+@cloud_file.url)\n @cloud_file.save\n\n @msg.delete\n\n @event = Event.new(:code => 7, :description => REGISTER_FILE+@cloud_file.name, :event_date => DateTime.now)\n @event.execution = @job.execution\n @event.save\n end\n\n if ( @parts[0]== FINISHED_JOB_MSG)\n\n # ahora encuentro el job que me dicen que esta en estado Finalizado\n @job = Job.find(@parts[1])\n @job.status = JOBS_STATUS[:FINISHED]\n @job.save\n\n puts @job.to_s\n\n @hostname = @parts[2]\n @execution = @job.execution\n @execution.running_jobs = @execution.running_jobs-1\n @execution.finished_jobs = @execution.finished_jobs+1\n # si el número de jobs es 0 quiere decir que no hay jobs corriendo en este momento\n # esto quiere decir que la ejecución se ha terminado\n if @execution.running_jobs == 0\n @execution.ended = true\n puts \"Finished\" + @job.to_s\n end\n\n @execution.save\n @virtual_machines = @execution.cluster.virtual_machines\n puts @hostname\n puts 'is busy'\n\n @busy_vm\n\n @virtual_machines.each do |vm|\n\n if(vm.hostname.split('.').first == @hostname)\n @busy_vm = vm\n end\n\n end\n\n if(@busy_vm != nil)\n @busy_vm.is_busy = false\n @busy_vm.save\n end\n puts 'vm saved'\n\n @msg.delete\n\n #si la ejecución terminó\n if @execution.ended?\n\n @execution_total_cost = 0\n\n #si la ejecución terminó, apago todas las máquinas virtuales\n @virtual_machines.each do |vm|\n\n stop_one_vm(vm, @execution.cluster.user)\n\n #@execution_total_cost += vm.execution_hours* VM_PRICING[vm.execution.vm_type]\n @execution_total_cost += (vm.execution_hours * VM_PRICING[@execution.vm_type])\n\n end\n\n #le pongo que la fecha de finalización es ahora\n @end_date = DateTime.now\n @execution.end_date = @end_date\n\n #OJO!!!! TODO\n @execution.total_cost = @execution_total_cost\n @execution.save\n\n\n @event = Event.new(:code => 10, :description => [email protected]_s, :event_date => @end_date)\n @event.execution = @execution\n @event.save\n\n end\n\n\n end\n\n if ( @parts[0]== SWITCHED_TO_QUEUE)\n\n @event = Event.new(:code => 3, :description => @msg, :event_date => DateTime.now )\n @queue = @parts[1].to_s.split('-')\n @exec = Execution.find(@queue[1])\n @event.execution = @exec\n @event.save\n\n end\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nend", "def persistent_job\n job_id = Thread.current[:persistent_job_id]\n job_id ? BdrbJobQueue.find_by_id(job_id) : nil\n end", "def check_for_job_completion\n self.job.check_for_completion if complete?\n end", "def get_status\n fetch_status unless status == :completed\n status\n end", "def get_status\n fetch_status unless status == :completed\n status\n end", "def run(job)\n resp = command %{run job=\"#{job}\" yes}\n expr = /^Job queued. JobId=(\\d+)$/\n if resp =~ expr\n $1.to_i\n else\n raise \"Command error:\" + \\\n \" expected #{expr.inspect},\" + \\\n \" got #{resp.inspect}\"\n end\n end", "def check_job_status(report)\n job = load_job(report[:job_id])\n if (job && ((job.processed?) && !(job.complete?)))\n job.send_pack_request # Send a job complete message to the workers, and have one of them download and zip up the results files\n end\n end", "def job\n job_uuid && Jobber::Job::Base.find_job(job_uuid)\n end", "def complete?\n case @status\n when 'new', 'voting', 'running'\n false\n when 'complete'\n true\n else\n fail Exceptions::PushJobError, @job\n end\n end", "def incomplete?\r\n job_status != JobStatus::COMPLETED\r\n end", "def queue\n @queue.pending\n end", "def enqueue_job(job, event)\n delayed_job = job.queue\n\n logger.info \"Queued #{event.event_description} as job #{delayed_job.id}\"\n\n {\n 'status' => 'ok',\n 'job_id' => delayed_job.id,\n 'queue' => delayed_job.queue,\n 'priority' => delayed_job.priority,\n 'created_at' => delayed_job.created_at,\n }\n end", "def job(name)\n @manifest[\"jobs\"].find { |job| job[\"name\"] == name }\n end", "def background_job\n values.find { | job | job.is_background? } \n end", "def push_pending_job (job)\n\n old = @pending_jobs.find { |j| j.job_id == job.job_id }\n @pending_jobs.delete(old) if old\n #\n # override previous job with same id\n\n if @pending_jobs.length < 1 or job.at >= @pending_jobs.last.at\n @pending_jobs << job\n return\n end\n\n for i in 0...@pending_jobs.length\n if job.at <= @pending_jobs[i].at\n @pending_jobs[i, 0] = job\n return # right place found\n end\n end\n end", "def seen_job_with_id?(job_id)\n @processed_job_ids.include?(job_id)\n end", "def queue(job_id)\n redis do |conn|\n conn.lpush(key.queued, job_id)\n end\n end", "def current_job\n return @job_current\n end", "def search_job_get_status(searchString)\n search_and_get_value_table('requisition-view-table-body',4, searchString, 10)\n end", "def find_job(job_id)\n response = HTTParty.get(\"#{@host}/api/jobs/#{job_id}\")\n\n return response['job']\n end", "def status(*job_id)\n #take default job_id if not specified\n if job_id.empty?\n job_id = @job_id\n else\n job_id = job_id[0]\n end\n\n \n url=\"#{@base_url}/#{@tool}/status/#{URI.encode(job_id)}\"\n uri = URI.parse(url)\n\n resp = Net::HTTP.get_response(uri)\n #puts resp.body\n\n #params = XmlSimple.xml_in(resp.body)\n\n return resp.body\n\n\n end", "def job_class\n SingleInstanceQueueJob\n end", "def job_ended?\n FINAL_JOB_TASK_STATUSES.include?(@task_status)\n end", "def job\n\t\tjid = @db.hget('sgt-unit:'+@id, 'job')\n\t\treturn nil if jid == nil\n\t\tgetJob(@db, jid)\n\tend", "def queue_status(queue_names, &block)\n return false unless connected?\n @queues.each do |q|\n if queue_names.include?(q.name)\n begin\n q.status { |messages, consumers| block.call(q.name, messages, consumers) if block }\n rescue StandardError => e\n logger.exception(\"Failed checking status of queue #{q.name} on broker #{@alias}\", e, :trace)\n @exception_stats.track(\"queue_status\", e)\n block.call(q.name, nil, nil) if block\n end\n end\n end\n true\n end", "def wait_on_status(jobid)\r\n uri = URI(\"http://api.idolondemand.com/1/job/status/\" + jobid)\r\n uri.query = URI.encode_www_form(:apikey => $api_key)\r\n res = Net::HTTP.get_response(uri, p_addr = $proxy_host, p_port = $proxy_port)\r\n obj = JSON.parse(res.body)\r\n\r\n if obj['status'] == 'queued'\r\n puts \"job [#{jobid}] #{obj['status']}, waiting #{$status_wait} seconds\"\r\n sleep($status_wait)\r\n wait_on_status(jobid)\r\n end\r\nend", "def get_queue\n return @queue unless @queue.nil?\n\n Domo::Queue::Redis::JobQueue.active_queue(@redis_client, @dataset_id, @stream_id, pipeline_id)\n end", "def job_position(job)\n jobs.index(job)\n end", "def job_position(job)\n jobs.index(job)\n end", "def queue_exists?(name, credentials)\n command = \"rabbitmqadmin #{credentials} list queues | grep #{name}\"\n command = Mixlib::ShellOut.new(command)\n command.run_command\n begin\n command.error!\n true\n rescue\n false\n end\nend", "def job_status_expired?\n if self.expired?\n return true\n else\n return false\n end\n end", "def success\n jobs_index(Job.success)\n end", "def reserve\n queues.each do |queue|\n log_with_severity :debug, \"Checking #{queue}\"\n if job = Resque.reserve(queue)\n log_with_severity :debug, \"Found job on #{queue}\"\n\n if job.payload['enqueue_ts']\n delay_ts = Time.now.to_i - job.payload['enqueue_ts'].to_i\n max_delay = Resque.redis.get(\"fifo-stats-max-delay\") || 0\n Resque.redis.incrby(\"fifo-stats-accumulated-delay\", delay_ts)\n Resque.redis.incr(\"fifo-stats-accumulated-count\")\n if (delay_ts > max_delay.to_i)\n Resque.redis.set(\"fifo-stats-max-delay\", max_delay)\n end\n end\n return job\n end\n end\n\n nil\n rescue Exception => e\n log_with_severity :error, \"Error reserving job: #{e.inspect}\"\n log_with_severity :error, e.backtrace.join(\"\\n\")\n raise e\n end", "def batch_job_status_reason\n return if job_description.blank?\n\n job_hash = JSON.parse(job_description)\n if job_hash\n job_hash['statusReason']\n end\n end", "def job_status(folder,hash)\n \n error_files = []\n \n status = 'LOCAL'\n \n # if there is a sh file, it is a queue job\n sh_files = Dir.glob(File.join(folder,'*.sh'))\n \n #there are sh files\n if !sh_files.empty?\n #is queue job\n status = 'UNKNOWN' \n \n if File.exists?(File.join(folder,'QUEUED'))\n status = 'QUEUED'\t\n \n elsif File.exists?(File.join(folder,'RUNNING'))\n status = 'RUNNING'\n else\n error_files = []\n error_files |= Dir.glob(File.join(folder,'*.sh.e*'))\n error_files.push(File.join(folder,'ERRORS'))\n \n errors=''\n \n \n #TODO - add more files for errors\n error_files.each do |ename|\n if File.exists?(ename)\n errors += File.read(ename)\n end\n end\n \n errors.gsub!(/\\n/,'<br>')\n errors.gsub!(/'/,'`')\n\n \n \n if errors != ''\n status = 'ERRORS'\n hash['errors']=errors\n else\n status = 'DONE'\n end\n \n end\n \n \n else\n # normal job\n \n end\n \n hash['job_status'] = status\n \nend", "def queue(queue_name)\n @queues[queue_name]\n end", "def job\n operation_ids = operations.map(&:id)\n ja_ids = JobAssociation.where(operation_id: operation_ids).map(&:job_id).uniq\n jobs = Job.find(ja_ids).select(&:active?)\n raise ProtocolError, 'Cannot resolve the current Job' if jobs.length > 1\n\n jobs.last\n end", "def action(queue_name)\n `curl -s -i -L \\\n -u #{Conn[:creds]} \\\n -H 'content-type:application/json' \\\n -f #{[Conn[:host_api], 'queues', Conn[:vhost], queue_name].join('/')} | jq '.messages_ready'`\n end", "def next_runnable_job(queue)\n tracking_key = get_next_runnable(queue)\n return nil unless tracking_key\n\n job = nil\n lock_key = lock_key(tracking_key)\n\n run_atomically(lock_key) do\n \n # since we don't have a lock when we get the runnable,\n # we need to check it again\n still_runnable = runnable?(tracking_key, queue)\n if still_runnable\n klazz = tracking_class(tracking_key)\n job = klazz.pop_from_restriction_queue(tracking_key, queue)\n end\n\n end\n\n return job\n \n end", "def status()\n return [true, [], [], []] if not self.jobids.any?\n \n statuses = GridVid.query(@_keys.merge({:jobids => self.jobids}))\n passed = []\n running = [] \n failed = [] \n \n statuses.each_pair {|jobid, val|\n case val['status']\n when \"PASS\"\n passed << val['jobid'] \n when \"FAILED\" \n failed << val['jobid'] \n else\n running << val['jobid'] \n end \n }\n \n return [running.length == 0, passed, failed, running] \n end", "def job_status(report, message, status)\n job = load_job(report[:job_id])\n if job\n job.status = status\n job.save!\n end\n message.delete\n end", "def get_queue_url(job)\n sqs.create_queue(queue_name: job.queue_name)[:queue_url]\n end", "def get_job(job, ds = nil)\n (jobs_to_run + jobs_running).find do |j|\n if ds.nil?\n j[:ds].nil? and j[:job] == job\n else\n (! j[:ds].nil?) and j[:ds].name == ds.name and j[:job] == job\n end\n end\n end", "def job_completed(job)\n report_success(job) if job['on_success']\n if job['period'] && job['period'].to_i > 0\n job['status'] = 'queued'\n job['make_after'] = job['period']\n job['args'].delete(:job_itself)\n storage.save(job) { |job| schedule(job) }\n else\n if JR.config[:remove_done_jobs]\n storage.destroy(job)\n else\n job['status'] = 'complete'\n storage.save(job)\n end\n end\n end", "def wait_for_job(job_id, timeout_seconds)\n states = ['FINISHED', 'CANCELED', 'FAILED']\n wait_interval = 2 # seconds\n total_taken = 0\n while total_taken < timeout_seconds\n sleep wait_interval\n total_taken += wait_interval\n status = @cp.get_job(job_id)\n if states.include? status['state']\n return\n end\n end\n end", "def jenkins_job_exists?(name)\n job_url = \"http://#{Pkg::Config.jenkins_build_host}/job/#{name}/config.xml\"\n form_args = [\"--silent\", \"--fail\"]\n output, retval = Pkg::Util::Net.curl_form_data(job_url, form_args, :quiet => true)\n return output if retval.nil?\n return Pkg::Util::Execution.success?(retval)\n end", "def status\n return :completed if completed?\n return :failed if failed?\n :pending\n end", "def job_priority\n case params[:queue]\n when 'high'\n 0\n when 'medium'\n 1\n when 'low'\n 2\n end\n end" ]
[ "0.68900853", "0.68393534", "0.6745003", "0.67309284", "0.6697998", "0.66489804", "0.6630047", "0.65943176", "0.64722687", "0.6439859", "0.6397403", "0.63956904", "0.6361938", "0.6348004", "0.6336583", "0.6308881", "0.6304712", "0.6273187", "0.62699956", "0.62672824", "0.62298757", "0.621764", "0.6209671", "0.6196376", "0.6194057", "0.61702186", "0.6142087", "0.614184", "0.6132724", "0.6078473", "0.60705537", "0.6046598", "0.602348", "0.6002609", "0.60007113", "0.5979571", "0.59682685", "0.59524655", "0.5950345", "0.58891445", "0.58884794", "0.5886859", "0.58857566", "0.5847416", "0.58415294", "0.5827852", "0.5825591", "0.5817298", "0.58088034", "0.5799513", "0.57727754", "0.57658327", "0.57642347", "0.5734309", "0.5729993", "0.5727547", "0.5727547", "0.5726608", "0.57218724", "0.5719456", "0.5707966", "0.57062054", "0.5696587", "0.56811386", "0.56764966", "0.5670088", "0.5663496", "0.5659494", "0.5637792", "0.56243914", "0.56222886", "0.56209177", "0.5615908", "0.561542", "0.56117", "0.56104267", "0.56053233", "0.56002647", "0.55905503", "0.5584517", "0.5584517", "0.5572406", "0.5571483", "0.55646926", "0.5547341", "0.55428725", "0.5542225", "0.55400115", "0.5528669", "0.5520965", "0.55162096", "0.5503225", "0.5487393", "0.54710007", "0.54697", "0.5468707", "0.5467402", "0.54634255", "0.5455613", "0.5442608" ]
0.5509822
91
Find the next available job id to work.
def next_availability if job = jobs.detect { |k, v| v['reserved_at'].nil? } job.first end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def next_job_id\n new_id = rand(10*5)\n get_job_status(new_id).nil? ? new_id : next_job_id\n end", "def next_available_id\n last_id = all_ids.map do |key|\n key.sub(\"#{self.name}_\", \"\").to_i\n end.max.to_i\n\n last_id + 1\n end", "def getNextJob()\n\t\t\t\tjob = nil\n\t\t\t\t\n\t\t\t\tThread.exclusive {\n\t\t\t\t\ti = @jobs.index { |j| j.canStart() }\n\t\t\t\t\tjob = @jobs.delete_at( i ) if ( i != nil )\n\t\t\t\t}\n\t\t\t\t\n\t\t\t\treturn job\n\t\t\tend", "def get_next_task\n if worker_key && !worker_key.empty?\n BdrbJobQueue.find_next(worker_name.to_s,worker_key.to_s)\n else\n BdrbJobQueue.find_next(worker_name.to_s)\n end\n end", "def get_job_id\n # check if the job ID has already been set (the job name will not change\n # during execution, so it is safe to assume that the job ID won't change)\n if not defined? @job_id or @job_id == '' or @job_id == nil\n result = veeamconfig('job', 'list').lines\n if result.length > 1\n result.each_with_index do |line, index|\n # skip the first line of output, since it is just the table setup\n next if index == 0\n\n # split line into array by space\n bits = line.split(' ')\n # pull out the repository name\n job_name = bits[0]\n\n # parse and return the job ID\n @job_id = bits[1].tr('{}', '') if job_name == @resource[:job_name]\n return @job_id\n end\n\n # return false if the job doesn't exist\n false\n else\n # return false if there are no jobs that exist\n false\n end\n else\n # return the job ID if it's already set\n @job_id\n end\n end", "def next\n active_job_id = nil\n job_query.perform_with_advisory_lock(parsed_queues: parsed_queues, queue_select_limit: GoodJob.configuration.queue_select_limit) do |execution|\n active_job_id = execution.active_job_id\n performing_active_job_ids << active_job_id\n end\n ensure\n performing_active_job_ids.delete(active_job_id)\n end", "def next_task_id\n\n # If the non_completed_task_ids array is empty, we're done.\n if non_completed_task_ids.empty?\n :lesson_finished\n\n # Else, if the user is completing the last lesson, return them to the first lesson they skipped and didn't complete\n elsif current_task_id == lesson.tasks.last.id\n non_completed_task_ids.min\n\n # Otherwise, just go on to the next lesson\n else\n current_task_id + 1\n end\n\n end", "def next_available_number\n request :get_next_available_number\n end", "def next_available_number\n request :get_next_available_number\n end", "def next_id\n (all.map(&:id).max || 0) + 1\n end", "def read_next_id\n id = nil\n list = current_list\n\n if @co_index <= list.size\n id = list[@co_index - 1][:id]\n end\n\n @co_index += 1\n\n id\n end", "def job_id\n async && async.job_id\n end", "def next_id\n @next_id = ((start...(start + size)).to_a - leases.map { |l| l.id }).first\n end", "def get_next\n r = nil\n iterator_lock do\n if @iterator <= @last_id\n r = get(@iterator)\n @iterator += 1\n @iterator_file.write(\"#{@iterator.to_s(36)}\\n\")\n r\n else\n nil\n end\n end\n end", "def next_id\n self.latest_id += 1\n end", "def next_id\n @imutex.synchronize do\n @mid += 1\n end\n end", "def job\n operation_ids = operations.map(&:id)\n ja_ids = JobAssociation.where(operation_id: operation_ids).map(&:job_id).uniq\n jobs = Job.find(ja_ids).select(&:active?)\n raise ProtocolError, 'Cannot resolve the current Job' if jobs.length > 1\n\n jobs.last\n end", "def next_id\n id = nil\n MinterState.transaction do\n state = read\n minter = ::Noid::Minter.new(state)\n id = minter.mint\n write!(minter)\n end # transaction\n id\n end", "def next_journal_id\r\n id = journal_ids.max { |p,q| p <=> q }\r\n id.nil? ? 1 : id + 1\r\n end", "def job_key; Thread.current[:job_key]; end", "def next_id\n self[:next_id]\n end", "def next_team_id\r\n id = team_ids.max { |p,q| p <=> q }\r\n id.nil? ? 1 : id + 1\r\n end", "def job_position(job)\n jobs.index(job)\n end", "def job_position(job)\n jobs.index(job)\n end", "def next_runnable_job(queue)\n tracking_key = get_next_runnable(queue)\n return nil unless tracking_key\n\n job = nil\n lock_key = lock_key(tracking_key)\n\n run_atomically(lock_key) do\n \n # since we don't have a lock when we get the runnable,\n # we need to check it again\n still_runnable = runnable?(tracking_key, queue)\n if still_runnable\n klazz = tracking_class(tracking_key)\n job = klazz.pop_from_restriction_queue(tracking_key, queue)\n end\n\n end\n\n return job\n \n end", "def get_next_id\r\n id = java.lang.System.nanoTime.to_s\r\n $log.info(\"*** get_next_id: \" + id)\r\n return id\r\n end", "def get_next_sub_id\n\n env = get_root_environment\n\n c = nil\n\n c = env.variables[:next_sub_id]\n n = if c\n c + 1\n else\n c = 0\n 1\n end\n env.variables[:next_sub_id] = n\n env.store_itself\n\n c\n end", "def find_job(job_id)\n find_users.each do |user|\n user.find_jobs.each do |job|\n return job if job.id == job_id\n end\n end\n\n return nil\n end", "def next_id\n @id ||= 0\n @id += 1\n end", "def get_next_id\n id = java.lang.System.nanoTime.to_s\n $log.info(\"*** get_next_id: \" + id)\n return id\n end", "def next_free_id\n # Index 0 is not valid, so start at 1.\n found = ([email protected]).find do |i|\n @items[i].nil?\n end\n if found\n # There's an empty slot.\n found\n else\n # No empty slots, next ID is at the end.\n @items.length\n end\n end", "def job_id\n raise NotImplementedError\n end", "def next\r\n\r\n BgWorker.first(:order => 'bg_workers.id', :conditions => [\"bg_workers.id > ?\", self.id])\r\n end", "def next_request_id\n @next_request_mutex.synchronize do\n request_id = @next_request_id\n @next_request_id += 1\n return request_id\n end\n end", "def next_sequence_id\n last_sequence_id + 1\n end", "def next_id(options = {})\n raise \"Needs implementation :-)\"\n end", "def next_match_number\n # This returns a PGresult object\n # [http://rubydoc.info/github/ged/ruby-pg/master/PGresult]\n result = Match.connection.execute(\"SELECT nextval('match_number_seq')\")\n result[0]['nextval']\n end", "def next_intermediate_job\n if @next_intermediate_job < @map_jobs.to_a.size\n job = @map_jobs.to_a[@next_intermediate_job]\n @next_intermediate_job += 1\n file_pattern = job[0]\n intermediate_job = job[1][:intermediate_job]\n return file_pattern, intermediate_job\n end\n end", "def last_job_number\n @jobs.length\n end", "def get_job_id(error = true)\n job_id = get_env_var('SLURM_JOB_ID', error_ = false)\n job_id = get_env_var('SLURM_JOBID', error_ = error) unless job_id\n job_id.to_i\nend", "def job_id\n @gapi.job_reference.job_id\n end", "def next\n # try to set next job to running (already running is ok)\n until ready_jobs.empty? || ready_jobs.first.running?\n unless set_to_running(ready_jobs.first)\n locked_jobs << ready_jobs.shift # some other executor running this\n end\n end\n if ready_jobs.empty?\n return nil\n else\n return ready_jobs.first.next_cucumber_test_case\n end\n end", "def next_id\n COUNTER_LOCK.synchronize do\n @@id_counter += 1\n end\n end", "def next_instance_to_start\n active = active_instances.map {|inst| inst.num.to_i}\n if active.size >= max_instances\n raise NoMoreAvailableInstances, \n _('Laboratory %s (%d) has reached its maximum number of instances (%d)') %\n [name, id, max_instances]\n end\n\n # Assign the lowest available number\n 1.upto(max_instances) {|num| return num unless active.include?(num)}\n\n raise NoMoreAvailableInstances, _('Cannot assign a new instance number')\n end", "def next_available_index\n for index in 0..32 do\n break index unless self[index].exists?\n end\n end", "def next_reduce_job\n if @next_reduce_job < @red_jobs.to_a.size\n job = @red_jobs.to_a[@next_reduce_job]\n @next_reduce_job += 1\n output_result = job[0]\n proc = job[1]\n return output_result, proc\n end\n end", "def get_next_available_counter\n if @last_filename_counter\n @last_filename_counter + 1\n else\n last_used_counter + 1\n end\n end", "def next_id(items)\n max_id = items.map { |item| item[:id] }.max || 0\n max_id + 1\nend", "def getNext\n tasks = @db[:tasks]\n query_statuses = [\n Task.getStatusValue(:not_started),\n Task.getStatusValue(:restarted),\n Task.getStatusValue(:waiting),\n ]\n db_task = tasks.where(:status => query_statuses).order(:wake).order(:created).first\n if !db_task.nil?\n if self.lock db_task[:id]\n begin\n require db_task[:path]\n task = Marshal::load(db_task[:data])\n task.id = db_task[:id]\n status = Task.getStatusValue :running\n tasks.where(:id => task.id).update :status => status\n return task\n rescue\n self.unlock db_task[:id]\n end\n else\n @logger.warn \"lock contention for task #{db_task[:id]}\"\n return nil\n end\n end\n end", "def mint\n Mutex.new.synchronize do\n loop do\n pid = next_id\n return pid unless identifier_in_use?(pid)\n end\n end\n end", "def next_map_job\n if @next_map_job < @map_jobs.to_a.size\n job = @map_jobs.to_a[@next_map_job]\n @next_map_job += 1\n file_pattern = job[0]\n proc = job[1][:proc]\n return file_pattern, proc\n end\n end", "def get_id\n @task_count ||= 0\n @task_count += 1\n end", "def next_id\n next_id = \"sdc:\" + (current_id.to_i + 1).to_s\n next_id\n end", "def next_available_number\n session.request Debtor.soap_action(:get_next_available_number)\n end", "def next_id\n (@curr_id +=1).to_s\n end", "def find_job(job_id)\n response = HTTParty.get(\"#{@host}/api/jobs/#{job_id}\")\n\n return response['job']\n end", "def find(id)\n res = transmit(\"peek #{id}\")\n Job.new(client, res)\n rescue Beaneater::NotFoundError\n nil\n end", "def get_next_game_id\n log_everything(\"Get next game id\")\n # get current registered game id\n retrieved_game_id_number = craft_firebase_command(\"minesweeper/game_id.json\").to_i\n game_id = retrieved_game_id_number + 1\n\n # update game id (increment by 1)\n craft_firebase_command(\"minesweeper/game_id.json\", \"PUT\", game_id)\n\n # return current game id\n game_id\nend", "def next_group_id\n if @groups.empty?\n # Start each time from 1 to make sure groups get the same id's for the\n # same input data\n 1\n else\n id = @groups.last.id\n loop do\n id += 1\n break id if @groups.find { |g| g.id == id }.nil?\n end\n end\n end", "def next_temp_id\n @id ||= ASSET_GROUP_TEMPORARY_ID.to_i\n @id -= 1\nend", "def next_list_id(lists)\n max = lists.map { |list| list[:id] }.max || 0\n max + 1\nend", "def getJobId()\n return @helper.getJobId()\n end", "def getJobId()\n return @helper.getJobId()\n end", "def getJobId()\n return @helper.getJobId()\n end", "def getJobId()\n return @helper.getJobId()\n end", "def getJobId()\n return @helper.getJobId()\n end", "def getJobId()\n return @helper.getJobId()\n end", "def getJobId()\n return @helper.getJobId()\n end", "def run_next_job(proc_node)\n job = nil\n get_next_job = true\n self.transaction do\n while get_next_job\n begin\n job = jobs.first\n job.run(proc_node) if job\n get_next_job = false\n rescue => ex\n # Just name sure we go on\n logger.info \"Exception getting next job from queue: #{ex}\"\n end\n end\n end\n job\n end", "def get_next_id\n id = 0\n contacts = read_contacts\n contacts.each do |contact|\n if id < contact[:id]\n id = contact[:id]\n end\n end\n id + 1\nend", "def next_available_task\n\t\tnat_candidate = next_available_immediate_task\n\t\tif nat_candidate && nat_candidate.has_subtasks?\n\t\t\tnat_candidate.next_available_task\n\t\telse\n\t\t\tnat_candidate\n\t\tend\n\tend", "def job\n job_uuid && Jobber::Job::Base.find_job(job_uuid)\n end", "def _next_id\n @@id -= 1\n @@id\n end", "def next_id\n next_chapter = Chapter.where(\"book_id=? and section_id=? and sequence>=? and id<>?\",self.book_id,self.section_id,self.sequence,self.id).order(\"sequence asc\").limit(1)\n if next_chapter.count==0\n next_chapter = Chapter.where(\"book_id=? and section_id>? \",self.book_id,self.section_id).order(\"sequence asc\").limit(1)\n end\n \n if next_chapter.count==1\n next_chapter.first.id\n else\n 0\n end\n \n end", "def run_next_job(proc_node)\n job = nil\n get_next_job = true\n self.transaction do\n while get_next_job\n begin\n job = jobs.first\n job.run(proc_node) if job\n get_next_job = false\n rescue => ex\n # Just name sure we go on\n logger.info \"Exception getting next job from queue: #{ex}\"\n get_next_job = false\n reload\n end\n end\n end\n job\n end", "def next_id\n self.class.where(\"id > ? and quiz_id = ?\", self.id, self.quiz_id).pluck(:id).first\n end", "def get_next_channel_id; end", "def next_build\n builds_dataset.order(:id.desc).first(:state => \"awaiting_deploy\")\n end", "def prime(job_id)\n redis do |conn|\n conn.lpush(key.primed, job_id)\n end\n end", "def find_job\n @job = Job.find(params[:job_id])\n end", "def sync_job\n return sync_jobs.last unless sync_jobs.empty?\n end", "def next_available_immediate_task\n\t\timmediate_tasks.select{ |t| !t.completed? }.sort_by(&:rank).first\n\tend", "def next_serial_number\n size + 1\n end", "def next_work\n name,fasta,qual,comments=@@fastq_file.next_seq\n\n if !name.nil?\n return name,fasta,qual,comments\n else\n return nil\n end\n\n end", "def current_position\n return 1 if Task.all.empty?\n Task.maximum('position') + 1\n end", "def next_messages_id\n messages.max{|a,b| a[:id] <=> b[:id]}[:id] + 1\nend", "def next_todo_id(todos)\n max = todos.map { |todo| todo[:id] }.max || 0\n max + 1\nend", "def gid_next\n gid_last = execute(\"dscl . -list /Groups PrimaryGroupID | sort -k 2 -g | tail -1 | awk '{print $2}'\")\n gid_last.to_i + 1\n end", "def gid_next\n gid_last = execute(\"dscl . -list /Users PrimaryGroupID | sort -k 2 -g | tail -1 | awk '{print $2}'\")\n gid_last.to_i + 1\n end", "def next_identifier\n if @identifier >= @max_identifier\n @identifier = 1\n else\n @identifier += 1\n end\n end", "def job_spec_id\n self.job_spec.id\n end", "def find_next\n PcpItem.where( pcp_subject_id: pcp_subject_id ).where( 'id > ?', id ).first\n end", "def next_sequence_number\n last_sequence_number ? last_sequence_number.next : 0\n end", "def get_next_serial_number(dt_wo)\n dt_wo.total_serial_nums += 1\n raise \"invalid serial number\" if dt_wo.total_serial_nums > 65525\n end_num = dt_wo.total_serial_nums.to_s\n serial_number = dt_wo.starting_serial_num\n serial_number = serial_number.ljust(10, \"0\")\n serial_number[serial_number.size - end_num.size, serial_number.size]= end_num\n dt_wo.current_serial_num = serial_number\n dt_wo.save!\n return serial_number\n end", "def next\n\t\tTask.order(:position).where(\"position > ?\", position).first\n\tend", "def get_next_number\n if project && number.blank?\n last_invoice_number = Invoice.where(:project_id => project.id).max(:number)\n self.number = last_invoice_number ? ((last_invoice_number + 1).to_i) : 1\n end\n end", "def next_question\n # If we have no info, assume user is on the first question\n # TODO \"1\" may not be a valid question id, we may need to change this\n return 1 if !session[:answered_questions]\n # TODO more validation that this is an integer, this is UGC!\n question_candidate = session[:answered_questions].keys.map(&:to_i).max + 1\n result = Question.exists?(question_candidate) ? question_candidate : nil\n return result\n end", "def next_sequence_value(sequence_name)\n select_one(\"select #{sequence_name}.nextval id from dual\")['id']\n end", "def next_check_number\n\t\tmaximum = cash_bank_check_offereds.maximum(\"number\")\n\t\t(maximum ? maximum+1 : initial_check_number).to_s\n\tend", "def get_next_childid\n\t\tStudySubject.maximum(:childid).to_i + 1\n\tend" ]
[ "0.79036564", "0.7319212", "0.68572474", "0.6805253", "0.655543", "0.64407593", "0.63851684", "0.6381658", "0.6381658", "0.6325248", "0.6299104", "0.62964135", "0.6267012", "0.62111694", "0.62026817", "0.6197503", "0.6156408", "0.6155267", "0.61458766", "0.61289924", "0.6128735", "0.61262137", "0.61064297", "0.61064297", "0.6090304", "0.608105", "0.6080506", "0.6056847", "0.60542285", "0.6049258", "0.6038878", "0.6036393", "0.6035878", "0.6031945", "0.59988165", "0.5995317", "0.59889096", "0.5987948", "0.5974584", "0.594725", "0.5947209", "0.5943174", "0.5931859", "0.59298867", "0.5913822", "0.589749", "0.58947355", "0.5847018", "0.58239317", "0.58166647", "0.5813905", "0.5813821", "0.5791983", "0.5783158", "0.57806695", "0.5770193", "0.57684815", "0.5741329", "0.5727215", "0.5719709", "0.57101375", "0.5699055", "0.5699055", "0.5699055", "0.5699055", "0.5699055", "0.5699055", "0.5699055", "0.5695371", "0.56927395", "0.56914806", "0.5679692", "0.56586653", "0.56476283", "0.5647052", "0.5638362", "0.56273115", "0.5621529", "0.5613087", "0.5608838", "0.5607817", "0.5606926", "0.5606018", "0.5604951", "0.5601798", "0.5598153", "0.5588819", "0.5584544", "0.5583504", "0.55822766", "0.55816746", "0.557867", "0.55621463", "0.55548257", "0.5549306", "0.5540691", "0.5536726", "0.5533941", "0.55282897", "0.5521009" ]
0.6686222
4
Mark the job as reserved.
def make_reservation(job_id) self.jobs[job_id]['reserved_at'] = Time.now.to_i self.save end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reserve_with_restriction\n @job_in_progress = reserve_without_restriction\n return @job_in_progress\n end", "def reserve\n db.transaction do\n if job = waiting.order(:id).last # asc\n job[:started_working_at] = Time.now\n update_job!(job)\n payload = job[:payload].nil? ? nil : Marshal.load(job[:payload])\n EQ::Job.new(job[:id], job[:queue], payload)\n end\n end\n rescue ::Sequel::DatabaseError => e\n retry if on_error e\n end", "def reserve\n queue_names = params[:queue_names].split(\",\")\n reservation_details = params[:reservation_details]\n \n Worker.identify( reservation_details, queue_names )\n \n job = JobCommands::JobReservation.new(queue_names: queue_names, reservation_details: reservation_details).perform\n if job.present?\n render_job_as_message(job)\n else\n head status: :not_found\n end\n end", "def unavailable!(reason = nil)\n if persisted? && reserved?\n raise Error, \"Cannot make a reserved identifier unavailable.\"\n end\n if unavailable? and reason.nil?\n return\n end\n value = Status::UNAVAILABLE\n if reason\n value += \" | #{reason}\"\n end\n self.status = value\n end", "def reserve_and_run_one_job; end", "def reject\n log_debug { \"Rejecting job with jid: #{item[JID_KEY]} already running\" }\n send_to_deadset\n end", "def assign_job(job_id, job_time)\n @busy = true\n @remaining_job_time = job_time - 1\n @job_id = job_id\n end", "def reserve_job! to=10\n begin qjob = job_queue.reserve(to)\n rescue Beanstalk::TimedOut => e ; Log.info e.to_s ; sleep 0.4 ; return ;\n rescue StandardError => e ; Log.warn e.to_s ; sleep 1 ; return ; end\n qjob\n end", "def set_reserved\n @reserved = Reserved.find(params[:id])\n end", "def mark_reserved(reservation)\n blocks = @venues[reservation.venue][reservation.day_id]\n\n for block_index in reservation.start_block...reservation.end_block\n blocks[block_index] = true\n end\n\n nil\n end", "def create\n if current_user.check_reservations\n return redirect_back(fallback_location: root_url,alert: \"You already have #{Variable.find_by_name(\"MAX_RESERVATIONS\").value} reservations in progress. Please deliver one of these first to make more reservations.\")\n else\n @reservation = Reservation.new(reservation_params)\n ending_time = (@reservation.job.duration / Variable.find_by_name(\"RESERVATION_FACTOR\").value.to_f)*24*60*60\n @reservation.ending_time = Time.now + ending_time\n @reservation.status = \"LIVE\"\n job = @reservation.job\n @reservation.amount = @reservation.job.amount\n respond_to do |format|\n if @reservation.save\n @reservation.job.update(status: \"RESERVED\")\n TimerJob.set(wait_until: @reservation.ending_time).perform_later(@reservation,\"LIVE\")\n format.html { redirect_to @reservation, notice: \"Job reserved successfully.\" }\n format.json { render :show, status: :created, location: @reservation }\n else\n format.html { render :new }\n format.json { render json: @reservation.errors, status: :unprocessable_entity }\n end\n end\n end\n end", "def set_reserved_room\n @reserved_room = ReservedRoom.find(params[:id])\n end", "def reserved\n @book = Book.find(params[:book_id])\n @reservation = Reservation.new(\n user_id: current_user.id,\n book_id: @book.id,\n price: @book.price\n )\n @reservation.save!\n end", "def reserve_for(desired_showdate, processor, new_comments='')\n if reserved?\n errors.add :base,\"This ticket is already holding a reservation for #{reserved_date}.\" and return nil\n raise ReservationError\n end\n redemption = valid_voucher_adjusted_for processor,desired_showdate\n if processor.is_boxoffice || redemption.max_sales_for_this_patron > 0\n reserve!(desired_showdate, new_comments)\n else\n errors.add :base,redemption.explanation\n raise ReservationError\n end\n end", "def leave_unoccupied\n vehicle = self.vehicle\n if mark_free(-1)\n if vehicle.present?\n vehicle.update_attributes!(slot_id: nil)\n end\n end\n end", "def reserve_inventory!(order_quantity)\n self.quantity -= order_quantity\n save!\n end", "def vacate_room(room)\n room.occupied = 0\n end", "def reserved; end", "def block_room\n\t\tself.room.set_occupied\n\tend", "def reserve_inventory\n begin\n @product_reserved_stock = @product.reserved_stock_availabilty_in_distribution_center(\n @distribution_center.id, inventory_stock_params[:items_count]).first\n unless @product_reserved_stock.present?\n @product_reserved_stock = @product.stock_inventories.create!(available: false,\n items_count: 0,\n distribution_center_id:\n @distribution_center.id)\n end\n if @product_available_stock.update_attribute(:items_count,\n @product_available_stock.items_count -\n inventory_stock_params[:items_count]) &&\n @product_reserved_stock.update_attribute(:items_count,\n @product_reserved_stock.items_count +\n inventory_stock_params[:items_count])\n\n response = {message: 'Inventory has been reserved of particular product stock in specified distribution center.'}\n status_code = 200\n else\n response = {errors:\n [{detail: \"We can't apply this operation at this time, please try later.\"}]}\n status_code = 403\n end\n rescue => ex\n response = {errors: [{detail: ex.message}]}\n status_code = 403\n end\n render json: response, status: status_code\n end", "def crear_reserva\n if (horainicio - Time.now) < 14_400\n ReservationsJob.set(wait: (self.horainicio - Time.now).seconds).perform_later(self.space_id)\n asignar_espacio\n end\n end", "def reserve_job(conn, reserve_timeout = Backburner.configuration.reserve_timeout)\n Backburner::Job.new(conn.tubes.reserve(reserve_timeout))\n end", "def reserve_for(key)\n reserves.fetch(key)\n end", "def reserve_space space\n return if self.last and self.last.id >= space\n\n o = self.new\n o.id= space\n o.save_without_alter_ego\n o.delete\n return\n end", "def retain!\n if changed?\n store_job!\n self.retained = true\n end\n end", "def reserved?\n status == Status::RESERVED\n end", "def apply\n agent = @engine.item_by_name(@name)\n agent.state[\"busy\"] -= 1 if agent.state[\"busy\"] > 0\n end", "def release_restriction(job)\n tracking_key = tracking_key(*job.args)\n lock_key = lock_key(tracking_key)\n\n run_atomically(lock_key) do\n\n # decrement the count after a job has run\n decrement_running_count(tracking_key)\n\n end\n end", "def instance_unreserve\n if reserve? && instance\n n = reader.update( <<-SQL )\n UPDATE urls\n SET reserved = NULL\n WHERE reserved IS NOT NULL AND\n instance = '#{instance}'\n SQL\n @log.info { \"Unreserved #{n} orders for instance #{instance}\" }\n n\n end\n rescue SQLException => x\n @log.error( \"On instance_unreserve: \", x )\n end", "def mark_free(state = 0)\n self.state = state\n self.save\n end", "def reserve!\n memory_map.transform_values! { |_| RESERVED }\n end", "def reserve(*args)\n @reservations << Reservation.new(self, *args)\n end", "def dequeue(job)\n @jobs = @jobs.delete(job.key)\n\n self.save\n end", "def check_for_released\n if released_to_students && marking_state_changed?(to: Result::MARKING_STATES[:incomplete])\n errors.add(:base, I18n.t('results.marks_released'))\n throw(:abort)\n end\n true\n end", "def unlock\n if job\n job.report_running if report_job_status\n job.set_status(\"Unlocking #{to_s}\")\n end\n\n attempt_unlock\n\n report(true)\n end", "def before_perform_approve(*args)\n # Check if the job needs to be approved, and if so, do not enqueue it.\n job = PendingJob.new(SecureRandom.uuid, class_name: name, args: args)\n\n if job.approval_keys? && !job.max_active_jobs?\n ApprovalKeyList.new.add_job(job)\n\n raise Resque::Job::DontPerform, \"The job has not been approved yet.\"\n else\n job.max_jobs_perform_args(args)\n\n true\n end\n end", "def set_reserved_address\n @reserved_address = ReservedAddress.find(params[:id])\n end", "def unmarked_mission(task)\n\t\tsuper if defined? super\n\t\treturn unless task.distribute? && task.self_owned?\n\n\t\tunless Distributed.updating?(self) || Distributed.updating?(task)\n\t\t Distributed.each_updated_peer(self, task) do |peer|\n\t\t\tpeer.transmit(:plan_set_mission, self, task, false)\n\t\t end\n\t\tend\n\t end", "def mark_as_used\n # noop\n end", "def acquire!(locker)\n self.locked_by = locker\n self.locked_at = Time.now\n save!\n end", "def set_availability(name,value)\n\t\tlock_name = \"#{name}_inventory\"\n\t\tSidekiq.redis do |connection|\n\t\t\tRedis::Semaphore.new(\"#{lock_name}\", redis: connection) do \t\t# product level lock\n\t\t\t\tconnection.hset(\"inventory\", name, value)\n\t\t\tend\n\t\tend\n\tend", "def mark_as_finished \n if self.used_quantity + self.scrapped_quantity == self.quantity\n self.is_finished = true \n end\n self.save\n end", "def mark_as_finished \n if self.used_quantity + self.scrapped_quantity == self.quantity\n self.is_finished = true \n end\n self.save\n end", "def mark!(reason)\n @session.nickserv.mark(self.name, :on, reason)\n end", "def reserve_item\n doc = @mongo.db('weather').collection('queue').find_and_modify(\n :query=>{:reserved_at=>0},\n :update=>{:$set=>{:reserved_at=>Time.now.to_i}})\n return nil if !doc\n return doc\n end", "def delayed_jobs_kicked_off\n Grade.delay.add_or_update(self.user, self) if correct?\n end", "def reservation_available\n if timeslot_contains_reservations?\n errors.add(:base, I18n.t('reservation.reservation_available'))\n end\n end", "def occupy_room(room)\n room.occupied = 1\n end", "def resend_unlock_instructions; end", "def before_schedule_check_lock_availability(*args)\n Resque.inline? || job_available_for_queueing?(args)\n end", "def before_enqueue_check_lock_availability(*args)\n # Second condition handles the case when the job is already queueing\n # (was allowed to put in queue at the moment of scheduling), and now scheduler moves it\n # from future schedule to queue; if we'll not allow this, job will be lost.\n Resque.inline? || call_from_scheduler? || job_available_for_queueing?(args)\n end", "def release(id)\n @available.push(@reserved.delete(id))\n if pending = @pending.shift\n pending.resume\n end\n end", "def after_perform_check_unique_lock(*args)\n create_job(args).uniqueness.ensure_unlock_perform\n end", "def mark_used\n @used = true\n end", "def ignored(job)\n @processed_job_ids << job.job_id\n end", "def hold(job = ALL_JOBS)\n DRMAA.control(job, DRMAA::ACTION_HOLD)\n end", "def set_number_available(count)\n @number_available = count unless @number_available\n end", "def before_perform_lock(*args)\n if lock_workers(*args)\n nx = Resque.redis.setnx(lock_workers(*args).to_s, true)\n if nx == false\n sleep(requeue_perform_delay)\n Resque.redis.del(lock_enqueue(*args).to_s)\n Resque.enqueue(self, *args)\n raise Resque::Job::DontPerform\n end\n end\n end", "def mark_runnable(tracking_key, runnable)\n queues = queues_available(tracking_key)\n queues.each do |queue|\n runnable_queues_key = runnables_key(queue)\n if runnable\n Resque.redis.sadd(runnable_queues_key, tracking_key)\n else\n Resque.redis.srem(runnable_queues_key, tracking_key)\n end\n end\n if runnable\n Resque.redis.sadd(runnables_key, tracking_key) if queues.size > 0\n else\n Resque.redis.srem(runnables_key, tracking_key)\n end\n end", "def assign_to(sid, take_home = false)\n e = self.last_open_exam?\n w = e.worksheets.create student_id: sid \n\n indie = self.teacher.indie \n w.bill unless indie # for indie quizzes, block slots only on payment\n\n unless take_home \n Delayed::Job.enqueue WriteTex.new(w.id, w.class.name)\n job = Delayed::Job.enqueue(CompileTex.new(w.id, w.class.name))\n w.update_attribute(:job_id, job.id) if indie\n else \n Mailbot.delay.quiz_assigned(w.id)\n end\n return w\n end", "def reject_job(user)\n job_rejections.create(user_id: user.id)\n end", "def reserve_with_restriction(queue)\n order = [:get_queued_job, :get_restricted_job]\n order.reverse! if ConcurrentRestriction.restricted_before_queued\n\n resque_job = nil\n order.each do |m|\n resque_job ||= self.send(m, queue)\n end\n\n # Return job or nil to move on to next queue if we couldn't get a job\n return resque_job\n end", "def leave_for_delivery(slot_number)\n @parcel.delivery(slot_number)\nend", "def reserve_with_limiter(queue)\n return nil if Resque.size(queue) == 0 # nothing in the queue\n\n rate_restricted = rate_limiter.is_restricted?(queue)\n concurrency_restricted = concurrency_limiter.is_restricted?(queue)\n\n # Call the original method and return if no restrictions applied to this queue\n return reserve_without_limiter(queue) unless rate_restricted || concurrency_restricted\n\n begin\n if concurrency_restricted\n # Try concurrency limit. Return if it's exceeded\n concurrency_limit_tx = concurrency_limiter.start_work(queue)\n return nil unless concurrency_limit_tx\n end\n\n if rate_restricted\n # Try to consume the reserved amount ...\n rate_limit_txs = rate_limiter.consume(queue, reserved_rates[queue] || 1)\n # ... return if we exceeded the limit - this is what it's all about\n return nil unless rate_limit_txs\n end\n\n job = reserve_without_limiter(queue)\n ensure\n if job\n # Tuck the transaction info onto the job\n job.rate_limit_txs = rate_limit_txs\n else\n # There was an error, or queue is empty - undo limiters\n rate_limiter.reimburse(rate_limit_txs, rate_limit_txs[0].amount) if rate_limit_txs\n concurrency_limiter.end_work(queue) if concurrency_limit_tx\n end\n end\n\n job\n end", "def mark_unavailable\n t = @offering_interviewer.interview_availabilities.find_by_time_and_offering_interview_timeblock_id(\n params[:time].to_time, params[:timeblock_id])\n t.destroy\n render :partial => \"apply/timeslot_not_available\", :locals => { :b => params[:timeblock_id], :ti => params[:ti], :time => params[:time] }\n end", "def release(job = ALL_JOBS)\n DRMAA.control(job, DRMAA::ACTION_RELEASE)\n end", "def hold(job = ALL_JOBS)\n DRMAA.control(job, DRMAA::ACTION_HOLD)\n end", "def reserved=(_arg0); end", "def abort\n logger.debug { { message: 'BawWorkers::ActiveJob::Unique job already exists, aborting', job_id: job_id } }\n @unique = false\n throw :abort\n end", "def ignored(job)\n @last_job_id = job.job_id\n end", "def recover_usage(quantity_to_be_recovered)\n self.used_quantity -= quantity_to_be_recovered \n self.save \n \n self.unmark_as_finished\n \n \n \n item = self.item \n item.add_ready_quantity( quantity_to_be_recovered ) \n \n return self \n end", "def acknowledge_job(job)\n @redis.multi do\n @redis.hdel(key_queue_running, @worker_id)\n @redis.sadd(key_queue_processed, job)\n end\n end", "def mark_worker_was_here\n @adventure.events.create(action: 'worker_ran')\n end", "def reserved?(product)\n product.status == 'Reserved'\n end", "def dequeue\n self.job.destroy if self.job.present?\n self.job_id = nil\n end", "def mark_unavailable\n t = @user_application.interview_availabilities.find_by_time_and_offering_interview_timeblock_id(\n params[:time].to_time, params[:timeblock_id])\n t.destroy\n render :partial => \"timeslot_not_available\", :locals => { :b => params[:timeblock_id], :ti => params[:ti], :time => params[:time] }\n end", "def set_pending_job\n @pending_job = PendingJob.find(params[:id])\n end", "def cancel_reservations_in_my_range\n \tself.conflicts.each { |o| o.cancel }\t\n\tend", "def ensure_exclusive\n acquire_locks\n write_pid\n end", "def book_slot\n slot.state = 1\n slot.save!\n end", "def book_it\n unclaimed_reservation = rooms_available\n \n unclaimed_reservation.first.change_block_status\n \n return unclaimed_reservation.first\n end", "def send_to_preservation\n self.preservation_state = PRESERVATION_STATE_INITIATED.keys.first\n self.preservation_details = 'The preservation button has been pushed.'\n self.save\n Resque.enqueue(SendToPreservationJob,self.pid)\n end", "def release(resource)\n\t\t\t\t@available << resource\n\t\t\t\t\t\n\t\t\t\tif task = @waiting.pop\n\t\t\t\t\ttask.resume\n\t\t\t\tend\n\t\t\tend", "def set_canceled_reservation\n @canceled_reservation = CanceledReservation.find(params[:id])\n end", "def perform tenant_id\n accepting_new_jobs = $redis.setnx \"generating_docset\", Time.now.to_i\n\n if !accepting_new_jobs \n $redis.setnx \"new_docset_request\", Time.now.to_i\n\n if stale?\n Rails.logger.debug \"assume prev job died, doing heavy lifting anyway\"\n run_job tenant_id\n else\n Rails.logger.debug \"already in progress, skipping\"\n end\n\n else\n Rails.logger.debug \"starting new job\"\n run_job tenant_id\n end\n\n end", "def skip\n param 'state' => Patriot::JobStore::JobState::SUCCEEDED\n end", "def expire!\n successor = BillingPeriod.create! booking: booking,\n start_date: start_of_next_period\n successor.delay.charge!\n end", "def revoke!\n self.used = true\n self.save\n end", "def recover_usage(quantity_to_be_recovered)\n self.used_quantity -= quantity_to_be_recovered \n self.save \n \n self.unmark_as_finished\n \n item = self.item \n item.update_ready_quantity\n \n return self \n end", "def preschedule_ct(ct)\n assign_package_for(ct, reservation: true)\n end", "def ensure_seat_is_free\n\t\tseat=Seat.find(seat_id)\n\t \tunless(seat.reservation==nil)\n\t \t\tself.errors.add(:alert, \"Seat is already taken!\")\n\t \tend\n\tend", "def borrowed\n self.decrement!(:availability)\n end", "def assign_right_no\n no = self.request_no\n id = self.id rescue nil\n if ((no.blank? || ContractingRequest.exists?(request_no: no)) && id.nil?)\n no = cr_next_no(self.project_id)\n if no == '$err'\n no = self.request_no\n end\n end\n self.request_no = no\n end", "def work\n if @busy && @remaining_job_time > 0\n @remaining_job_time -= 1\n end\n end", "def reserve_room(check_in, check_out)\n dates = date_range(check_in, check_out)\n new_reservation = nil\n @rooms.each do |room|\n if is_available?(room, dates) && is_not_blocked?(room, dates)\n new_reservation = Reservation.new(room, check_in, check_out)\n new_reservation.id = assign_res_id\n @reservations << new_reservation\n break\n end\n end\n if new_reservation != nil\n return new_reservation\n else\n raise StandardError, 'no rooms available in date range'\n end\n end", "def reschedule(job, time = nil)\n if (job.attempts += 1) < max_attempts(job)\n time ||= job.reschedule_at\n job.run_at = time\n job.unlock\n job.save!\n else\n job_say job, \"REMOVED permanently because of #{job.attempts} consecutive failures\", 'error'\n failed(job)\n end\n end", "def mark_due!\n self.due = true\n self.save\n end", "def mark!\n\t\t\t\t@marked = true\n\t\t\tend", "def create\n @reservation = Reservation.new(reservation_params)\n @reservation.teacher_id = current_user.id\n authorize! :create, @reservation\n\n if(session[:rental_category_id].nil?)\n redirect_to shopping_path\n end\n \n \n reservation_category = ItemCategory.find(session[:rental_category_id])\n \n #Nasty race condition if multiple ppl grab same kit\n @@semaphore.synchronize {\n kit_pool = Kit.available_for_item_category(reservation_category)\n \n test_kit = kit_pool.sample\n if(!test_kit.nil?)\n test_kit.set_reserved\n test_kit.reload\n @reservation.kit_id = test_kit.id\n end\n \n respond_to do |format|\n if @reservation.save\n session[:rental_category_id] = nil\n session[:start_date] = nil\n session[:end_date] = nil\n session[:pickup_date] = nil\n session[:return_date] = nil\n\n format.html { redirect_to rental_history_path(current_user), notice: 'Thank you for supporting the STEAM Kit rental program.' }\n else\n if(!test_kit.nil?)\n test_kit.unset_reserved\n test_kit.reload\n end\n format.html { redirect_to reservation_error_path }\n end\n end\n }\n end", "def overdue_job(user, job)\n @job = job \n mail(to: user.email, subject: 'Job Overdue') \n end" ]
[ "0.6649992", "0.6210308", "0.61216795", "0.60015666", "0.5845675", "0.58375907", "0.58310795", "0.582418", "0.57884026", "0.57077265", "0.56065375", "0.55709714", "0.5553302", "0.5550803", "0.5543129", "0.55062103", "0.549373", "0.5461285", "0.54420185", "0.5428861", "0.5392061", "0.5387062", "0.53033054", "0.52894336", "0.52728325", "0.52269834", "0.5206949", "0.5191072", "0.5176752", "0.5162167", "0.5138596", "0.5084199", "0.5082175", "0.5058389", "0.50530314", "0.5039609", "0.50394183", "0.5037399", "0.5035655", "0.5032327", "0.5028584", "0.50257635", "0.50257635", "0.50220907", "0.5018365", "0.5014473", "0.5006706", "0.5006315", "0.4985994", "0.49852303", "0.4983161", "0.49709973", "0.49595106", "0.49227256", "0.4905809", "0.48932576", "0.4892188", "0.48897615", "0.48881117", "0.48848435", "0.4873662", "0.4866971", "0.48657665", "0.48605624", "0.4858331", "0.48583278", "0.48580718", "0.485631", "0.48527637", "0.48506802", "0.4844346", "0.4842666", "0.4839114", "0.48374844", "0.48351112", "0.48316422", "0.48247272", "0.4820733", "0.48180813", "0.48156348", "0.48062736", "0.48054323", "0.47887838", "0.47879124", "0.47872403", "0.47821864", "0.47803152", "0.47757715", "0.47704127", "0.47683012", "0.47663733", "0.47659606", "0.4762982", "0.47626", "0.47592294", "0.47587225", "0.4758344", "0.47583234", "0.47540283", "0.47519213" ]
0.70366853
0
Generates a unique key to be used when creating the associated Riak object.
def default_key Digest::SHA1.hexdigest("riaque:#{name}") end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_key; end", "def gen_key(record)\n return Digest::SHA2.hexdigest(record.to_s)\n end", "def create_key\n self.key = Digest::MD5.hexdigest(self.official_id.to_s + self.sent_to_email)\n end", "def generate_key\n proposed_key = nil\n\n loop do\n num_part = rand(10**7).to_s.rjust(7, \"0\")\n proposed_key = \"#{IDB_CONFIG[:key_prefix]}-#{num_part}\"\n break unless self.class.find_by(key: proposed_key)\n end\n proposed_key\n end", "def generate_unique_key\n\n # @TODO:need to update the algo. Right now it's very simple algo\n length = self.url.length\n rand(36**length).to_s(36)\n end", "def generate_unique_key\n # not doing uppercase as url is case insensitive\n charset = ::Shortener.key_chars\n (0...::Shortener.unique_key_length).map{ charset[rand(charset.size)] }.join\n end", "def generate_unique_key(now = Time.now)\n u1 = ((now.to_i*1000*1000+now.usec) << 12 | rand(0xfff))\n uid = [u1 >> 32, u1 & 0xffffffff, rand(0xffffffff), rand(0xffffffff)].pack('NNNN')\n uid.unpack('C*').map { |x| \"%02x\" % x }.join\n end", "def gen_api_key\n u = UUID.new\n self.api_key = u.generate\n end", "def generate_user_key\n\t\tself.key = loop do\n random_hex = SecureRandom.urlsafe_base64\n break random_hex unless self.class.exists?(key: random_hex)\n end\n\tend", "def unique_key\n @unique_key ||= section.identifier\n end", "def generate_api_key\n key = Digest::SHA1.hexdigest(Time.now.to_s + rand(12345678).to_s)[1..10]\n self.api_key = self._id.to_s + key\n end", "def generate_api_key\n key = Digest::SHA1.hexdigest(Time.now.to_s + rand(12345678).to_s)[1..10]\n self.api_key = self._id.to_s + key\n end", "def key(name)\n @unique_key = name\n end", "def generate_id()\n return nil unless @name and @represents\n @id ||= Digest::SHA1.hexdigest(@name + @represents)[0..5].force_encoding('utf-8').to_s\n end", "def generate_key\n self.key ||= SecureRandom.urlsafe_base64 32\n end", "def generate_primary_key\n self[self.class.primary_key] ||= self.class.new_primary_key(10_000)\n end", "def create_unique_key\n begin\n self.unique_key = UrlShort.generate_key\n rescue ActiveRecord::RecordNotUnique\n if (count +=1) < 5\n retry\n else\n raise\n end\n end\n end", "def generate_cache_key(record, namespace)\n \"#{namespace}-#{record.cache_key_with_version}\"\n end", "def generate_key\n length = 5\n begin\n self.key = SecureRandom.urlsafe_base64 length\n end while Invite.exists?(key: self.key)\n end", "def create_key\n self.key = loop do\n random_token = SecureRandom.urlsafe_base64(nil, false)\n break random_token unless Api.exists?(key: random_token)\n end\n end", "def key_pair_name\n\n # return the key_pair_name if one is already generated\n return @key_pair_name if @key_pair_name\n\n # generate on from a random string of 10 upper and lowercase letters\n o = [('a'..'z'), ('A'..'Z')].map { |i| i.to_a }.flatten\n @key_pair_name = (0..10).map { o[rand(o.length)] }.join\n return @key_pair_name\n\n end", "def generate_api_key\n generate_uuid\nend", "def key_name\n \"#{prefix}:#{@id}\"\n end", "def key\n \"#{@@PREFIX}#{@name.gsub(\" \",\"-\")}\"\n end", "def rand_key\n return JString.new\n end", "def gen_key(string_key)\n b_key = self._hash_digest(string_key)\n return self._hash_val(b_key) { |x| x }\n end", "def key(name = nil)\n \"#{@id}:#{name}\"\n end", "def generate_key(*args)\n @cache_keys[args.join] ||= @key_generator.generate_key(*args)\n end", "def key\n @key ||= name.to_s\n end", "def unique_key\n \"redis:bitops:#{SecureRandom.hex(20)}\"\n end", "def key\n @key ||= self.project.key + '-' + self.no.to_s\n end", "def generate_access_key\n access_keys.generate_new\n end", "def regenerate_api_key\n self.api_key = Digest::SHA1.hexdigest(\"#{self._id}-#{Time.now.to_f}-#{self.created_at}\")\n end", "def key\n \"#{@@PREFIX}#{@type}-#{@version.gsub(\".\",\"-\")}\"\n end", "def generate_id\n Util::UUID.generate\n end", "def generate_unique_id\n \"1.3.6.1.4.1.21367.2009.5.14.#{id}.#{Time.now.to_i}\"\n end", "def key_generator; end", "def key_generator; end", "def generate_api_key\n begin\n api_key = SecureRandom.uuid\n end while ApiKey.exists?(:api_key => api_key)\n\n api = ApiKey.new\n api.api_key = api_key\n api.save\n\n self.api_key = api\n end", "def generate_key\n self.key = SecureRandom.hex(KEY_LENGTH / 2)\n end", "def create_url_key(str, url_key_field, klass)\n str = UrlKey.escape(str)\n str = rand(30000).to_s(36) if str.length < 1\n key = str\n counter = 1\n until klass.find(:all, :conditions => [\"#{url_key_field} = ?\", key]).empty?\n key = \"#{str}-#{counter}\"\n counter += 1\n end\n\n key\n end", "def generate_new_id\n Util::UUID.generate\n end", "def generate_registration_key\n self.registration_key = 'C'.freeze + SecureRandom.base64(8)\n end", "def generate_key(key_size = T.unsafe(nil)); end", "def generate_validkey(from_string = nil)\n\t\tfrom_string ||= User.sha1(AuthHelper::Utils::random_string(30))\n write_attribute \"validkey\", from_string\n end", "def generate_unique_name\n SecureRandom.uuid\n end", "def generate_api_key!\n\t\t@api_key = Digest::SHA1.hexdigest((Time.now.to_f + rand(100) * rand()).to_s) \n\tend", "def cache_key\n return \"#{model_key}/new\" if new_record?\n return \"#{model_key}/#{_id}-#{updated_at.utc.to_formatted_s(cache_timestamp_format)}\" if do_or_do_not(:updated_at)\n \"#{model_key}/#{_id}\"\n end", "def generate_api_key\n SecureRandom.random_number(2 ** 256).to_s(36)\n end", "def prefix_key(key)\n if self.instance_id\n [self.path, self.instance_id, key].join('/')\n else\n raise RuntimeError.new(\"Attempted to generate a key name without an instance id.\")\n end\n end", "def generate_cache_key_name\n alphabetical = [@origin_a, @origin_b].sort\n \"#{alphabetical.first}_#{alphabetical.last}_#{@destination_city}_#{@date_there}_#{@date_back}\"\n end", "def prefix_key(key)\n if self.instance_id\n [KEY_PREFIX, self.instance_id, key].join('/')\n else\n raise RuntimeError.new(\"Attempted to generate a key name without an instance id.\")\n end\n end", "def generate_saml_key\n saml_key = formulate_saml_key\n Rails.cache.write(saml_key, self.id, :expires_in => 1.day)\n saml_key\n end", "def generate_id \n end", "def generate_api_key(id)\n request(:post, \"/users/#{id}/make_new_api_key.json\")\n end", "def generate_hashkey_for(params, api_key)\n sorted_params = sort_params(params)\n params_string = concat_params(sorted_params)\n Digest::SHA1.hexdigest \"#{params_string}&#{api_key}\"\n end", "def __object_unique_id__\n return @args[:data][:Key_name]\n end", "def generate_api_key\n digest = Digest::SHA2.new(512)\n # generate a unique token\n unique_seed = SecureRandom.hex(20)\n digest.update(\"#{unique_seed} SECRET! #{Time.current.to_f}\").to_s\n end", "def generate_api_key\n digest = Digest::SHA2.new(512)\n # generate a unique token\n unique_seed = SecureRandom.hex(20)\n digest.update(\"#{unique_seed} SECRET! #{Time.current.to_f}\").to_s\n end", "def create_appplication_key\n\t\t# set guid key for current application_id\n\t\t# self.application_id = Admin::Guid.new.key\n\t\t@application_id = Admin::Guid.new.key\n\tend", "def cache_key\n if new_record?\n \"#{self.class.model_name.cache_key}/new\"\n else\n \"#{self.class.model_name.cache_key}/#{id}\"\n end\n end", "def gen_key( string )\n md5 = Digest::MD5.hexdigest( string )\n return md5[0, 3] + md5[29, 31]\nend", "def generate_api_key\n begin\n self.api_key = SecureRandom.hex(16)\n end while self.class.exists?(api_key: api_key)\n self.save\n self.api_key\n end", "def get_cache_key(unique_hash)\n memcache_key_object.key_template % @options.merge(u_h: unique_hash)\n end", "def generate_unique_id\n Digest::SHA256.hexdigest unique_id_elements.join\n end", "def unique_id\n \"name-#{@language_id}-#{@name_id}\"\n end", "def build_item_key(item)\n if options[:id_namespace]\n \"#{options[:id_namespace]}_#{item.key}\"\n else\n item.key\n end\n end", "def build_item_key(item)\n if options[:id_namespace]\n \"#{options[:id_namespace]}_#{item.key}\"\n else\n item.key\n end\n end", "def key\n \"#{Goalkeeper.namespace}:#{label}\"\n end", "def generate_random_key\n SecureRandom.random_bytes(32)\n end", "def uniqid\n SecureRandom.hex(32)\n end", "def uniqid\n SecureRandom.hex(32)\n end", "def generate_random_key(length=32)\n\t\to = [('a'..'z'), (0..9)].map { |i| i.to_a }.flatten\n\t\t(0...length).map { o[rand(o.length)] }.join\n\tend", "def default_key\n :\"#{self[:name]}_id\"\n end", "def set_unique_random_key(len = 16)\n begin\n chars = ('a'..'z').to_a + ('A'..'Z').to_a\n self.key = (0..(len-1)).collect { chars[Kernel.rand(chars.length)] }.join\n end while Key.find_by_key(self.key)\n end", "def build_key(last_part)\n [scope[:id], super].compact.join(\"_\")\n end", "def gen_id\n SecureRandom.hex(32)\n end", "def set_identifier\n time_tag = \"#{Time.now.to_s}\".gsub(/[^0-9]/,'')[0,14]\n fingerprint_tag = \"#{fingerprint}\".gsub(/[^0-9a-zA-Z]/,'')[-6,6]\n self.identifier ||=\n begin\n case key_type\n when KEY_TYPE_USER\n user.gitolite_identifier\n #\"#{user.gitolite_identifier}_#{fingerprint_tag}_#{time_tag}\"\n when KEY_TYPE_DEPLOY\n \"#{user.gitolite_identifier}_#{fingerprint_tag}_#{time_tag}_#{DEPLOY_PSEUDO_USER}\"\n end\n end\n end", "def jmaki_generate_uuid(name)\n if (@jmaki_uuid == nil)\n @jmaki_uuid = 0\n end\n @jmaki_uuid = @jmaki_uuid + 1\n return name.tr('.', '_') + \"_\" + @jmaki_uuid.to_s;\n end", "def generate_key\n SecureRandom.hex(32)\nend", "def create_guid\n self.id ||= UUIDTools::UUID.random_create.to_s\n end", "def unique_object_name_for(name)\n \"#{name}_#{SecureRandom.hex(5)}\"\n end", "def simple_cache_key\n \"#{self.class.name.underscore}/#{id}\"\n end", "def generate_auth_key\n name = \"fasdfadf\"\n puts self.name\n puts name\n end", "def gen_uid\n \"#{rand(100000)}-#{Time.now.to_i}-#{rand(100000)}\"\n end", "def cache_key\n \"#{self.class.cache_key_base}/#{self.id}\"\n end", "def generate_random_key\n (0...8).map { 65.+(rand(25)).chr }.join\n end", "def generate_new_id\n UUIDTools::UUID.random_create.to_s\n end", "def make_key t\n (sig_key(t) + sum_key(t))[0..MAX_KEY_SIZE].sub(/\\0+\\z/, \"\")\n end", "def default_key\n :\"#{self[:name]}_id\"\n end", "def cache_key\n case\n when new_record? then\n \"#{self.class.name.underscore}/new\"\n when ::ActiveRemote.config.default_cache_key_updated_at? && (timestamp = self[:updated_at]) then\n timestamp = timestamp.utc.to_s(self.class.cache_timestamp_format)\n \"#{self.class.name.underscore}/#{self.to_param}-#{timestamp}\"\n else\n \"#{self.class.name.underscore}/#{self.to_param}\"\n end\n end", "def generate_registration_key(components)\n string_to_encode = components.join('|')\n\n Digest::MD5.hexdigest(string_to_encode)\n end", "def kid_generator; end", "def kid_generator; end", "def kid_generator; end", "def gen\n key = :exists\n @mutex.synchronize do\n key = rand(2*31) while @hash[key]\n @hash[key] = true\n end\n key\n end", "def generate_id\n SecureRandom.hex(8)\n end", "def gen_node_key(num)\n \"node#{num}\".to_sym\nend", "def generate_identifier\n self.identifier ||= self.name.parameterize.underscore\n end", "def new_id\n dbm = self.class.dbm\n\n max = dbm.keys.map { |k| k.to_i }.max || 0\n id = max + 1\n\n dbm[id.to_s] ||= \"\"\n\n id.to_s\n end" ]
[ "0.8125229", "0.7617282", "0.7545355", "0.7478967", "0.74477893", "0.74038094", "0.7332637", "0.72810644", "0.7180655", "0.7153357", "0.7148903", "0.7148903", "0.71096706", "0.7105881", "0.70891494", "0.7080526", "0.6984075", "0.69129705", "0.69052917", "0.68694407", "0.6867765", "0.68449706", "0.6823885", "0.68053514", "0.6799912", "0.6792843", "0.6785258", "0.67812747", "0.6772621", "0.67604613", "0.67544115", "0.6754346", "0.67402", "0.67382723", "0.6737363", "0.67012227", "0.6700686", "0.6700686", "0.6692672", "0.66830343", "0.66792214", "0.6678607", "0.6667134", "0.6662114", "0.6658342", "0.6643047", "0.66358864", "0.66354626", "0.66308224", "0.66165525", "0.6608234", "0.6590358", "0.6588923", "0.6583538", "0.6568216", "0.6566976", "0.65516734", "0.6533849", "0.6533849", "0.65234846", "0.6520579", "0.651555", "0.64970493", "0.647881", "0.64702255", "0.6468959", "0.64612", "0.64612", "0.64576113", "0.64505434", "0.6447592", "0.6447592", "0.64324653", "0.64291847", "0.642412", "0.6422508", "0.6413699", "0.64109325", "0.64107984", "0.63949406", "0.63838845", "0.6377147", "0.6365359", "0.6364701", "0.6363579", "0.6362939", "0.63560003", "0.6351371", "0.6351281", "0.634988", "0.6349362", "0.6349026", "0.63477796", "0.63477796", "0.63477796", "0.6339979", "0.63393134", "0.63380235", "0.6331448", "0.6328388" ]
0.7108455
13
Seed multiple ethnicities by loading the YAML file
def db_seed_ethnicities path = Rails.root.join('db','seeds','ethnicities.yml') File.open(path) do |file| # puts "Seeding App Ethnicities from #{path}" YAML.load_stream(file) do |doc| doc.keys.each do |group| # puts "Seeding ethnicity with group #{group}" names = doc[group] names.each do |name| db_seed_ethnicity(group, name) end end end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def seed\n establish_connection(\n database_yaml_path: options.fetch('database_yaml'),\n environment: options.fetch('environment')\n )\n\n load_seeds\n end", "def db_seed_questions\n path = Rails.root.join('db','seeds','questions.yml')\n File.open(path) do |file|\n # puts \"Seeding App Questions from #{path}\"\n YAML.load_stream(file) do |doc|\n doc.keys.sort.each do |tag|\n # puts \"Seeding question with tag #{tag}\"\n attributes = doc[tag]\n db_seed_question(tag, attributes)\n end\n end\n end\nend", "def load_seed\n s1 = Station.new('Moscow')\n s2 = Station.new('St.Petersburg')\n s3 = Station.new('Novosibirsk')\n s4 = Station.new('Chelyabinsk')\n s5 = Station.new('Khabarovsk')\n\n t1 = PassengerTrain.new('PAS-01')\n t3 = CargoTrain.new('CAR-01')\n\n r1 = Route.new(s1, s5)\n r2 = Route.new(s3, s1)\n\n r1.add_station(s4)\n r2.add_station(s2)\n\n t1.route = r1\n t3.route = r2\n\n w1 = PassengerWagon.new(random_wagon_number, 120)\n w2 = PassengerWagon.new(random_wagon_number, 110)\n w3 = PassengerWagon.new(random_wagon_number, 115)\n w4 = CargoWagon.new(random_wagon_number, 6000)\n w5 = CargoWagon.new(random_wagon_number, 7000)\n w6 = CargoWagon.new(random_wagon_number, 5000)\n\n t1.add_wagon(w1)\n t1.add_wagon(w2)\n t1.add_wagon(w3)\n\n t3.add_wagon(w4)\n t3.add_wagon(w5)\n t3.add_wagon(w6)\n\n 65.times { w1.take_seat }\n 43.times { w2.take_seat }\n 88.times { w3.take_seat }\n\n w4.occupy_volume(123.45)\n w5.occupy_volume(1234)\n w6.occupy_volume(432.1)\n\n menu_list[:main_menu][:menu].delete('9')\n print_nn 'Seed data successfully loaded.'\n end", "def load_seeds_data(table_name)\n seed_file = Rails.root.join('db','seeds_data',\"#{table_name}.yml\")\n YAML::load_file(seed_file)\n end", "def load_seed; end", "def load_seed; end", "def load_fixtures(name)\n data = YAML.load_file File.join(File.dirname(__FILE__), \"#{name}.yml\")\n # hmm, how do I dynamically add instance variables? data.each { |k,v| @`k` = User.new data[k] }\n @user_1 = User.new data['user_1']\n @user_2 = User.new data['user_2']\n end", "def seed_bank_card_types\n\tdata = ActiveSupport::JSON.decode(File.read('db/seeds/bank_card_types.json'))\n\tdata.each do |d|\n\t\tBankCardType.create!(d)\n\tend\nend", "def load_seeds(*files)\n env = Rails.env\n\n `cp -R ./spec/fixtures/seeds/#{env}/files ./spec/fixtures/db/seeds/#{env}`\n\n files.each do |file|\n `cp ./spec/fixtures/seeds/#{env}/#{file} ./spec/fixtures/db/seeds/#{env}`\n end\n\n yield\n\n `rm -R ./spec/fixtures/db/seeds/#{env}/files`\n\n files.each do |file|\n `rm ./spec/fixtures/db/seeds/#{env}/#{file}`\n end\nend", "def pets\n load_yaml('personaje/pets')\nend", "def seed_banks\n\tdata = ActiveSupport::JSON.decode(File.read('db/seeds/banks.json'))\n\tdata.each do |d|\n\t\tBank.create!(d)\n\tend\nend", "def db_seed_categories\n path = Rails.root.join('db','seeds','categories.yml')\n File.open(path) do |file|\n YAML.load_stream(file) do |doc|\n doc.keys.sort.each do |key|\n # puts \"Seeding comparison category structure #{key}\"\n # puts \"Warning this will delete any comparison category <-> question relationships existing for any duplicate categories\"\n attributes = doc[key]\n db_seed_category(attributes)\n end\n end\n end\nend", "def fill_from_yaml(yaml)\n fill_from_hash(Psych.load(yaml))\n end", "def run\n\t\t\tflush_database\n\t\t\tseed_countries\n\t\t\tseed_group_organizations\n\t\t\tseed_organizations\n\t\t\tseed_locations\n\t\t\tseed_admin\n\t\t\tseed_api_key\n\t\tend", "def fixtures(name)\n entry = YAML::load_file(File.dirname(__FILE__) + \"/spec/fixtures/#{name}.yaml\")\n klass = begin\n Kernel::const_get(Inflector.classify(Inflector.singularize(name)))\n rescue\n nil\n end\n\n unless klass.nil?\n database.logger.debug { \"AUTOMIGRATE: #{klass}\" }\n klass.auto_migrate!\n\n (entry.kind_of?(Array) ? entry : [entry]).each do |hash|\n if hash['type']\n Object::const_get(hash['type'])::create(hash)\n else\n klass::create(hash)\n end\n end\n else\n table = database.table(name.to_s)\n table.create! true\n table.activate_associations!\n\n #pp database.schema\n\n (entry.kind_of?(Array) ? entry : [entry]).each do |hash|\n table.insert(hash)\n end\n end\nend", "def load_data(options={})\n @data = @files.collect do |seeds_file|\n seeds_file.instanciate_data(@keys, :override => options[:override]) \n end.flatten.compact\n end", "def fill_from_yaml_file(filename)\n yml = File.read(File.join(User.recipes, filename), encoding: 'utf-8')\n fill_from_yaml(yml)\n @filename = filename\n end", "def initialize(seed_file)\n setup_grid(seed_file)\n end", "def populate(file, klass)\n yml = YAML.load_file(file)\n yml.each_pair do |key, values|\n obj = klass.new\n values.each_pair do |k, val|\n obj.send((\"#{k}\" + \"=\").to_sym, val) unless val == \"id\"\n end\n obj.save\n end\nend", "def test_new\n assert_equal({\n \"drylands\"=>\"Drylands\",\n \"savannah\"=>\"Savanna\",\n }, FakeYaml.load(sample(\"terrain_stuff_l_english\")))\n end", "def load_yaml(file)\n @data = YAML.load(File.open(file))\n\n @data[\"people\"].each do |yaml_person|\n person = Person.new(yaml_person[\"fname\"], yaml_person[\"surname\"], yaml_person[\"dob\"])\n\n yaml_person[\"emails\"].each do |e|\n person.add_email(e)\n end\n\n yaml_person[\"phones\"].each do |e|\n person.add_phone(e)\n end\n\n add(person) \n end\n end", "def create_species_and_data\n # Entrance message\n puts \"** Creating new species from lifeviz/ubiota files using hagrid_ubid as the bridge\"\n puts \" Note! New species are species with data imported from lifeviz. Orphaned species \"\n puts \" are ubiota species with no associated lifeviz data.\"\n new_species = []\n orphaned_species = []\n \n # Open files\n lifeviz, ubiota, map = nil\n seed \"Opening data files\" do\n lifeviz = IO.popen(\"bunzip2 -c #{LIFEVIZ}\")\n ubiota = IO.popen(\"bunzip2 -c #{UBIOTA}\")\n map = IO.readlines(LIFEVIZ_UBIOTA)\n lifeviz && ubiota && map ? true : false\n end\n\n # Dump all species\n seed \"Removing existing species.\"\n progress \"Deleting\", Species.count do |progress_bar|\n Species.all.each do |species|\n species.delete\n progress_bar.inc\n end\n end\n\n # Dump all related data\n seed \"Removing any existing age, litter sizes, adult weights, birth weights data\" do\n Lifespan.delete_all && LitterSize.delete_all && AdultWeight.delete_all && BirthWeight.delete_all ? true : false\n end\n\n # Load taxon from lifeviz, let's use hpricot\n lifeviz_species, lifeviz_ages, lifeviz_development, lifeviz_refs = nil\n seed \"Loading lifeviz data with hpricot\" do\n doc = Hpricot::XML(lifeviz)\n lifeviz_species = (doc/'names')\n lifeviz_ages = (doc/'age')\n lifeviz_development = (doc/'development')\n lifeviz_refs = (doc/'refs')\n (lifeviz_species.size > 0 && lifeviz_ages.size > 0 && lifeviz_development.size > 0 && lifeviz_refs.size > 0) ? true : false\n end\n notice \"#{lifeviz_species.size} species loaded with #{lifeviz_ages.size} ages\"\n\n # Create new species array to load lifeviz species and attributes we want\n seed \"Loading new species and storing lifeviz data from lifeviz dump\"\n development_index = ref_index = 0\n progress \"Storing\", lifeviz_species.length do |progress_bar|\n lifeviz_species.each_with_index do |s, index|\n hagrid = (s/'id_hagr').inner_html.to_i\n x = {}\n x[:synonyms] = (s/'name_common').inner_html\n x[:age] = (lifeviz_ages[index]/'tmax').inner_html\n x[:context] = (lifeviz_ages[index]/'phenotype').inner_html\n x[:hagrid] = hagrid\n x[:references] = x[:context].scan(/\\[(\\d*)\\]/).flatten\n \n while lifeviz_development[development_index] && (lifeviz_development[development_index]/'hagrid').inner_html.to_i < hagrid\n notice \"#{(lifeviz_development[development_index]/'hagrid').inner_html} is less than #{hagrid}\"\n development_index += 1\n end\n # development attributes matches the current species id\n if lifeviz_development[development_index] && (lifeviz_development[development_index]/'hagrid').inner_html.to_i == hagrid\n development = lifeviz_development[development_index]\n if development && (development/'hagrid').inner_html.to_i == hagrid\n x[:adult_weight] = (development/'adult_weight').inner_html.blank? ? \"\" : (development/'adult_weight').inner_html.to_f\n x[:birth_weight] = (development/'birth_weight').inner_html.blank? ? \"\" : (development/'birth_weight').inner_html.to_f\n x[:litter_size] = (development/'litter_size').inner_html.blank? ? \"\" : (development/'litter_size').inner_html.to_f\n else\n x[:adult_weight] = \"\"\n x[:birth_weight] = \"\"\n x[:litter_size] = \"\"\n end\n development_index += 1\n end\n new_species << x\n progress_bar.inc\n end\n end\n notice \"#{new_species.length} new species stored\"\n\n # Load ubid ids into new species from mapping\n seed \"Loading mapped ubiota ids into new species\" do\n new_species_pointer = 0\n map.each do |line|\n hagrid, ubid = line.split(/\\s+/)\n while new_species[new_species_pointer] && hagrid.to_i != new_species[new_species_pointer][:hagrid]\n new_species_pointer += 1\n end\n new_species[new_species_pointer][:ubid] = ubid.to_i if new_species[new_species_pointer]\n end\n end\n\n # Remove any new species that have no ubid from mapping\n count = new_species.size\n seed \"Delete any new species that do not have a ubiota id mapped\", \n :success => \"Mappings completed\", \n :failure => \"No species had mappings\" do\n new_species.delete_if { |species| species[:ubid] == nil }\n new_species.length != 0 ? true : false\n end\n notice \"deleted #{count - new_species.size} species, #{new_species.size} remaining\"\n\n # Sort species by ubid\n seed \"Sorting new species by ubid\" do\n new_species = new_species.sort_by { |each| each[:ubid] }\n true\n end\n\n # Find and load ubiota genus ids and species name for each species\n # Ensure the rank is 6 (species level)\n # Set taxon_id to nil if the species inside ubiota doesn't exist\n seed \"Looking up and loading each new species' genus id from the ubiota data\"\n x = 0\n a_couple = 0\n num_lines = num_lines_bz2(UBIOTA)\n progress \"Matching\", num_lines do |progress_bar|\n ubiota.each do |line|\n id, term, rank, hierarchy, parent_id, num_children, hierarchy_ids = line.split(\"|\")\n # skip if we're not looking at a species level taxon\n if rank.to_i != 6\n progress_bar.inc\n next\n end\n if new_species[x].nil? || id.to_i != new_species[x][:ubid]\n y = {:taxon_id => parent_id.to_i, :name => term.to_s}\n orphaned_species << y\n if !new_species[x].nil? then new_species[x][:taxon_id] = nil end\n if !new_species[x].nil? && id.to_i > new_species[x][:ubid] then x += 1 end\n else\n new_species[x][:taxon_id] = parent_id.to_i\n new_species[x][:name] = term.to_s\n x += 1\n end\n progress_bar.inc\n end\n end\n notice \"traversed #{x} new species and #{orphaned_species.size} orphaned species\"\n\n # Remove any new species that has no genus in ubiota\n count = new_species.size\n seed \"Delete any species that had no genus id\" do\n new_species.delete_if { |species| species[:taxon_id] == nil }\n end\n notice success_string(\"deleted #{count - new_species.size} species, #{new_species.size} remaining\")\n\n # Remove any orphaned species that has no genus in ubiota\n count = orphaned_species.size\n seed \"Delete any orphaned species that had no genus id\" do\n orphaned_species.delete_if { |species| species[:taxon_id] == 0 }\n end\n notice success_string(\"deleted #{count - orphaned_species.size} species, #{orphaned_species.size} remaining\")\n\n # Create species with all the new species stored in memory\n count = species_without_parents = 0\n seed \"Saving all of the new species.\"\n\n progress \"Species\", new_species.length do |progress_bar|\n new_species.each_with_index do |taxon, index|\n s = new_species[index]\n species = Taxon.new(:name => s[:name], :parent_id => s[:taxon_id], :rank => 6, :id => s[:ubid])\n species.send(:create_without_callbacks)\n # # This was commented out because we're using Cera's lifespan data now.\n # unless s[:age].blank?\n # s[:references].each do |reference_id|\n # lifespan = Lifespan.new(:value_in_days => (s[:age].to_f * 365), :units => \"Years\", :species_id => species.id)\n # lifespan.context = s[:context]\n # lifespan.citation = Reference.find(reference_id).to_s\n # lifespan.created_by = ANAGE_USER_ID\n # lifespan.created_by_name = ANAGE_USER_NAME\n # lifespan.send(:create_without_callbacks)\n # end\n # end\n BirthWeight.new(\n :value_in_grams => (s[:birth_weight]),\n :units => \"Grams\",\n :species_id => species.id,\n :created_by => ANAGE_USER_ID,\n :created_by_name => ANAGE_USER_NAME\n ).send(:create_without_callbacks) unless s[:birth_weight].blank?\n AdultWeight.new(\n :value_in_grams => (s[:adult_weight]),\n :units => \"Grams\",\n :species_id => species.id,\n :created_by => ANAGE_USER_ID,\n :created_by_name => ANAGE_USER_NAME\n ).send(:create_without_callbacks) unless s[:adult_weight].blank?\n LitterSize.new (\n :value => (s[:litter_size]),\n :species_id => species.id,\n :created_by => ANAGE_USER_ID,\n :created_by_name => ANAGE_USER_NAME\n ).send(:create_without_callbacks) unless s[:litter_size].blank?\n count = index\n progress_bar.inc\n end\n end\n notice success_string(\"saved #{count - species_without_parents} species\")\n notice success_string(\"saved #{Lifespan.count} ages\")\n notice success_string(\"saved #{AdultWeight.count} adult weights\")\n notice success_string(\"saved #{BirthWeight.count} birth weights\")\n notice success_string(\"saved #{LitterSize.count} litter sizes\")\n notice failure_string(\"#{species_without_parents} species didn't have taxons matching taxon_id in our database\") if species_without_parents != 0\n\n # Create orphaned species with all the species stored in memory\n count = 0\n species_without_parents = 0\n seed \"Saving all the orphaned species\"\n progress \"Saving orphans\", orphaned_species.length do |progress_bar|\n orphaned_species.each_with_index do |s, index|\n taxon = Taxon.find_by_id(s[:taxon_id])\n if taxon == nil\n # notice failure_string(\"no taxon found with an id of #{s[:taxon_id].to_s} for species with ubid of #{s[:ubid].to_s}\")\n species_without_parents += 1\n else\n species = Taxon.new(:name => s[:name], :parent_id => taxon.id, :rank => 6)\n species.send(:create_without_callbacks)\n end\n count = index\n progress_bar.inc\n end\n end\n notice success_string(\"Phew!... saved #{count - species_without_parents} species\")\n notice failure_string(\"#{species_without_parents} species didn't have taxons matching taxon_id in our database\") if species_without_parents != 0\n\n seed \"Rebuilding heirarchical tree\" do\n Taxon.rebuild!\n end\n\n seed \"Vacuuming database\" do\n SQL.execute \"VACUUM ANALYZE;\"\n end\n\n notice \"Species creation is completed.\"\nend", "def setup_default_fixtures(files = ['sample_actors' , 'users', 'wiki_entries'])\n Fixtures.reset_cache\n files.each do |f|\n Fixtures.create_fixtures( File.dirname(__FILE__) + '/../fixtures' , File.basename( f , '.*'))\n end\nend", "def load_network_yml\n new_cfg = YAML.load_file(File.join(pattern_path('vnet_pattern'), 'config', 'network.yml'))\n\n yml_file = File.join(platform_pattern_path, 'config', 'network.yml')\n new_cfg = ::Chef::Mixin::DeepMerge.deep_merge(YAML.load_file(yml_file), new_cfg) if File.exist?(yml_file)\n\n optional_pattern_names.each do |name|\n yml_file = File.join(pattern_path(name), 'config', 'network.yml')\n new_cfg = ::Chef::Mixin::DeepMerge.deep_merge(YAML.load_file(yml_file), new_cfg) if File.exist?(yml_file)\n end\n\n new_cfg.with_indifferent_access\nend", "def fill_database(db, number_of_people, number_of_villages)\r\n number_of_people.to_i.times do\r\n fill_nightswatch(db, Faker::GameOfThrones.character, Faker::GameOfThrones.house, Faker::Number.between(1, number_of_villages), Faker::Number.between(1, 5), Faker::Number.between(1, 60))\r\n end\r\n number_of_villages.times do\r\n fill_village_table(db, Faker::GameOfThrones.city)\r\n end\r\nend", "def seed_all\n current_or_create_new_misc\n current_or_create_new_institutions\n current_or_create_new_williams_dining_opps\n current_or_create_new_williams_dining_places\n current_or_create_new_dining_opps\n seed_app_dining_periods\n seed_williams_rss_scraping\nend", "def seed_roles\n\tdata = ActiveSupport::JSON.decode(File.read('db/seeds/roles.json'))\n\tdata.each do |d|\n\t\tRole.create!(d)\n\tend\nend", "def externalRecipeSeed\n 20.times do\n api_recipe = get_random_recipe()\n createRecipeFromAPI(api_recipe)\n end\nend", "def load_all_seeds\n puts \"Loading db/seeds/*\"\n Dir[File.join(Rails.root, 'db', 'seeds', '*.rb')].sort.each { |seed| load seed }\nend", "def load_all_fixtures\n \n end", "def seed_data_for_holistic(prof, student)\n course = Course.create!(name: \"Intro to Computer (Holistic)\",\n pin: Faker::Number.number(digits: 6),\n professor_id: prof.id,\n minimum_group_member: 3,\n maximum_group_member: 5,\n has_group: false,\n is_voting: false,\n state: \"choose_algo\",\n withProject: true)\n students = []\n 40.times do\n first = Faker::Name.first_name\n last = Faker::Name.last_name\n s = User.create!(firstname: first,\n lastname: last,\n email: last + Faker::Number.number(digits: 6).to_s + \"@brandeis.edu\",\n password: \"password\", type: \"Student\", time_zone: seed_time_zone)\n students << s\n Taking.create!(student_id: s.id, course_id: course.id, state: \"created\")\n end\n Taking.create!(student_id: student.id, course_id: course.id, state: \"created\")\n students << student\n projects = []\n 10.times do\n active_project = Project.create!(project_name: Faker::Team.name, course_id: course.id, description: Faker::Game.genre,\n is_active: true, number_of_likes: 0, added_by: students.sample)\n projects << active_project.id\n end\n 4.times do\n inactive_project = Project.create!(project_name: Faker::Team.name, course_id: course.id, description: Faker::Game.genre,\n is_active: false, number_of_likes: 0, added_by: students.sample)\n end\n seed_voting(students, course, projects)\n seed_preference(course, students)\n puts \"seed holistic\"\nend", "def setup\r\n # Retrieve fixtures via their name\r\n # @first = category(:first)\r\n end", "def seeds\n # Create Universities. !!!!\n puts \"\\n\\n\\n\\n\\nCreating Universities!!!\\n\\n\\n\\n\"\n workbook = RubyXL::Parser.parse(Rails.root.join('public', 'data', 'Universities.xlsx'))\n worksheet = workbook[0]\n\n worksheet.each_with_index do |row, index|\n next if index == 0 || row.blank? || row.cells.blank?\n cells = row.cells\n uni_name = cells[0].value\n next unless uni_name.present?\n uni = University.where(name_en: uni_name).first_or_create\n puts \"**************uni_name:::#{uni_name}\"\n uni.name_en = cells[0].value\n uni.name_cn = cells[1].try(:value)\n uni.web_link = cells[2].try(:value)\n uni.contact = cells[3].try(:value)\n uni.address = cells[4].try(:value)\n uni.grad_website = cells[5].try(:value)\n uni.grad_contact = cells[6].try(:value)\n uni.grad_address = cells[7].try(:value)\n uni.program_list = cells[8].try(:value)\n uni.overview = cells[11].try(:value)\n uni.news_18 = cells[12].try(:value)\n uni.news_17 = cells[13].try(:value)\n uni.description_en = cells[15].try(:value)\n uni.description_cn = cells[16].try(:value)\n uni.save\n end\n\n # Create Departments. !!!!\n puts \"\\n\\n\\n\\n\\nCreating departments!!!\\n\\n\\n\\n\"\n workbook = RubyXL::Parser.parse(Rails.root.join('public', 'data', 'Departments.xlsx'))\n worksheet = workbook[1]\n worksheet.each_with_index do |row, index|\n next if index == 0\n cells = row.cells\n uni_name = cells[1].value\n dept_name = cells[2].value\n next unless uni_name.present?\n next unless dept_name.present?\n uni = University.where(name_en: uni_name).first_or_create\n puts \"Found university: #{uni_name}. Looking for its department #{dept_name}\"\n dept = uni.departments.where(name_en: dept_name).first_or_create\n dept.website = cells[3].value\n dept.address = cells[4].value\n dept.contact = cells[5].value\n dept.program_list = cells[6].value\n dept.tofel_code = cells[7].value\n dept.gre_code = cells[8].value\n dept.gmat_code = cells[9].value\n dept.description_en = cells[10].value\n dept.description_cn = cells[11].value\n dept.save!\n end\n\n # # Create Programs. !!!!\n # puts \"\\n\\n\\n\\n\\nCreating Programsssss!!!\\n\\n\\n\\n\"\n # workbook = RubyXL::Parser.parse(Rails.root.join('public', 'data', 'Programs.xlsx'))\n # worksheet = workbook[1]\n # worksheet.each_with_index do |row, index|\n # next if index == 0\n # cells = row.cells\n # uni_name = cells[0].value\n # dept_name = cells[1].value\n # prog_name = cells[2].value\n # next unless uni_name.present?\n # next unless dept_name.present?\n # next unless prog_name.present?\n # uni = University.where(name_en: uni_name).first_or_create\n # puts \"Found university: #{uni_name}. Looking for its department #{dept_name}\"\n # dept = uni.departments.where(name_en: dept_name).first_or_create\n # program = dept.programs.where(name_en: prog_name).first_or_create\n # program.name_en = cells[2].value\n # program.degree = cells[3].value\n # program.website = cells[4].value\n # program.adminssion = cells[5].value\n # program.fall_deadline = cells[6].value\n # program.fall_deadline_round1 = cells[7].value\n # program.fall_deadline_round2 = cells[8].value\n # program.spring_deadline = cells[9].value\n # program.addmission_requirements = cells[10].value\n # program.contact = cells[11].value\n # program.tution = cells[12].value\n # program.note_en = cells[13].value\n # program.save!\n # end\n\n end", "def db_seed\n\n 10.times do\n\n brands = Faker::Commerce.product_name\n product_names = Faker::Name.name \n prices = Faker::Commerce.price\n\n Product.create( brand: brands,\n name: product_names,\n price: prices)\n end\nend", "def db_seed\n # Your code goes here!\n\n brands = []\n product_names = []\n prices = []\n 10.times do\n brands.push(Faker::Company.name)\n product_names.push(Faker::Commerce.product_name)\n prices.push(Faker::Commerce.price(10..50))\n end\n\n 10.times do\n Product.create(brand: brands.sample, name: product_names.sample, price: prices.sample )\n end\n\nend", "def load_seeds(path = './')\n fullpath = File::expand_path(File::join(LT.env.seed_path,path))\n seedfiles = Dir::glob(File::join(fullpath,'*'+SEED_FILES))\n seedfiles.each do |seedfile|\n load File::expand_path(seedfile)\n end\n end", "def seed_mindleaps_skills(organization)\n Skill.create([\n { skill_name: 'Memorization', skill_description: 'Ability to learn and recall sequences or patterns of information.', organization: organization, grade_descriptors: GradeDescriptor.create([\n { mark: 1, grade_description: 'Student cannot recall a sequence of 4 steps even after prompted at least three times' },\n { mark: 2, grade_description: 'Student can recall a sequence of 4 steps with 1 or no prompts' },\n { mark: 3, grade_description: 'Student can recall at least 2 sections of the warm up from class to class' },\n { mark: 4, grade_description: 'Student can recall entire warm up sequence, moving in time with the teacher, and can repeat diagonal steps after one prompt from the teacher' },\n { mark: 5, grade_description: 'Student can recall entire warm up sequence without teacher guidance, at least four diagonal steps, and at least 16 counts of choreography without teacher prompts' },\n { mark: 6, grade_description: 'Student can recall entire warm up, at least eight diagonal steps, all of choreography and name at least 6 muscles without teacher prompts' },\n { mark: 7, grade_description: 'Student can memorize a new sequence of choreography given in one class and do all of the above' }\n ]) },\n { skill_name: 'Grit', skill_description: 'Perseverance and passion for long-term goals.', organization: organization, grade_descriptors: GradeDescriptor.create([\n { mark: 1, grade_description: 'Student arrives at the center but does not participate in dance class even with teacher or peer encouragement' },\n { mark: 2, grade_description: 'Student participates in less than half of the class' },\n { mark: 3, grade_description: 'Student is present but not actively trying throughout the entire class' },\n { mark: 4, grade_description: 'Student participates in warm up, recognizes change in directions, understands number repetitions, and completes at least 1/2 of diagonal or choreography sections of class' },\n { mark: 5, grade_description: 'Student participates in the entire class and noticeably demonstrates persistence when struggling' },\n { mark: 6, grade_description: 'All of the above and student asks or answers questions' },\n { mark: 7, grade_description: 'Student shows an extraordinary level of commitment by either practicing before/after class (self-initiated), asking questions that suggest a deeper analysis, or asking for more opportunities to practice' }\n ]) },\n { skill_name: 'Teamwork', skill_description: 'Ability to work and/or create with other students.', organization: organization, grade_descriptors: GradeDescriptor.create([\n { mark: 1, grade_description: 'Student refuses to hold hands or interact with partner in a required sequence across the floor' },\n { mark: 2, grade_description: 'Student will do above, but is unable to work or communicate with his/her peer in any piece of choreography or another part of class, even when encouraged by the teacher' },\n { mark: 3, grade_description: 'Student can work together with his/her peer in 2 or 3 simple steps in diagonal (two by two) or choreography when demonstrated/encouraged by the teacher with two verbal prompts' },\n { mark: 4, grade_description: 'Student can work together with his/her peer in a section of diagonal (two by two) and complete at least four partnered/group movements in choreography' },\n { mark: 5, grade_description: 'Student can work in a group to create a short choreographic piece with teacher coaching' },\n { mark: 6, grade_description: 'Student can work in a group to create a short choreographic piece without teacher coaching' },\n { mark: 7, grade_description: 'Student can work in a group to create a piece that is presented to the rest of class' }\n ]) },\n { skill_name: 'Discipline', skill_description: 'Ability to obey rules and/or a code of conduct.', organization: organization, grade_descriptors: GradeDescriptor.create([\n { mark: 1, grade_description: 'Student repeatedly talks back, fights, hits or argues with peers and teachers and does not stop even when asked repeatedly; student is sent out of the class for 5-10 minutes by the teacher' },\n { mark: 2, grade_description: 'Student has to be reminded at least twice by name to respect his peers and/or pay attention to the teacher' },\n { mark: 3, grade_description: 'Student has to be reminded once by name to respect his peers or pay attention to the teacher' },\n { mark: 4, grade_description: 'Student respects/pays attention to the teacher, but bothers his peers, or vice versa (with no comments/prompts by teacher)' },\n { mark: 5, grade_description: 'Student works well with others and no teacher intervention is needed' },\n { mark: 6, grade_description: 'Student actively encourages others to pay attention and improve their behavior' },\n { mark: 7, grade_description: 'Student actively becomes a role model of exceptional, respectful behavior to the others' }\n ]) },\n { skill_name: 'Self-Esteem', skill_description: 'Confidence in one’s own abilities.', organization: organization, grade_descriptors: GradeDescriptor.create([\n { mark: 1, grade_description: 'Student cannot perform any movement isolated (by himself)' },\n { mark: 2, grade_description: 'Student can perform a sequence of 2-4 steps on his own' },\n { mark: 3, grade_description: 'Student can continue through warm up sections and repetition of diagonal steps without encouragement from the teacher' },\n { mark: 4, grade_description: 'Student can demonstrate by himself steps of the diagonal and volunteer parts of the choreography when asked by the teacher' },\n { mark: 5, grade_description: 'Student can demonstrate the warm up, diagonal steps and all of the choreography by himself with confidence and no prompts' },\n { mark: 6, grade_description: 'Student can verbally explain movement in the warm up, diagonal and choreography' },\n { mark: 7, grade_description: 'Student demonstrates confidence as a person and dancer through extending full use of body in space ' }\n ]) },\n { skill_name: 'Creativity & Self-Expression', skill_description: 'Use of imagination.', organization: organization, grade_descriptors: GradeDescriptor.create([\n { mark: 1, grade_description: 'Student is unable to demonstrate personal creativity by making up any pose or movement of his own' },\n { mark: 2, grade_description: 'Student can only demontrate creative movement in a single step or movement with teacher\\'s prompts' },\n { mark: 3, grade_description: 'Student can make up his own arms for a sequence of steps' },\n { mark: 4, grade_description: 'Student can only demonstrate creative movement in a series of steps by copying the teacher or peer\\'s earlier demonstration' },\n { mark: 5, grade_description: 'Student can create his own movements that have not been taught before and differ from standard hip hop moves' },\n { mark: 6, grade_description: 'Student can create his own choreography' },\n { mark: 7, grade_description: 'Student can create his own choreography and present it' }\n ]) },\n { skill_name: 'Language', skill_description: 'The process to understand and communicate.', organization: organization, grade_descriptors: GradeDescriptor.create([\n { mark: 1, grade_description: 'Student is unable to count in a foreign language (eg English)' },\n { mark: 2, grade_description: 'Student can count with teacher prompting, and can recall some basic words with one prompt' },\n { mark: 3, grade_description: 'Student can count without prompts and recall some words' },\n { mark: 4, grade_description: 'Student can recite positions in the warm up, at least six of the diagonal steps\\' names and positions' },\n { mark: 5, grade_description: 'Student can recite positions in warm up, diagonal steps, and muscle names' },\n { mark: 6, grade_description: 'Student can recite simple phrases (minimum of 3 words)' },\n { mark: 7, grade_description: 'Student can make himself understood to ask questions or make comments' }\n ]) }\n ])\nend", "def setup\r\n # Retrieve fixtures via their name\r\n # @first = categories(:first)\r\n end", "def load_fixtures\n load(*fixture_table_names)\n end", "def load_gcp_seed()\n if @seed_data.nil?\n @seed_data = YAML.load( \n ERB.new(\n File.read(\n File.expand_path(GCP_SEED_FILE , __FILE__ ) \n )\n ).result \n )\n\n puts \"seed_data: #{@seed_data.object_id}\"\n\n end\n end", "def setup\n # Retrieve fixtures via their name\n # @first = accounts(:first)\n end", "def load_default_test_data_to_db_before_suite\n community1 = FactoryGirl.create(:community, :ident => \"test\", :consent => \"test_consent0.1\", :settings => {\"locales\" => [\"en\", \"fi\"]}, :real_name_required => true)\n community1.community_customizations.create(name: \"Yelo\", locale: \"fi\")\n community2 = FactoryGirl.create(:community, :ident => \"test2\", :consent => \"KASSI_FI1.0\", :settings => {\"locales\" => [\"en\"]}, :real_name_required => true, :allowed_emails => \"@example.com\")\n community3 = FactoryGirl.create(:community, :ident => \"test3\", :consent => \"KASSI_FI1.0\", :settings => {\"locales\" => [\"en\"]}, :real_name_required => true)\n\n [community1, community2, community3].each { |c| TestHelpers::CategoriesHelper.load_test_categories_and_listing_shapes_to_db(c) }\n end", "def initialize\n \tself.seed_data\n end", "def seed_refund_types\n\tdata = ActiveSupport::JSON.decode(File.read('db/seeds/refund_types.json'))\n\tdata.each do |d|\n\t\tRefundType.create!(d)\n\tend\nend", "def seed_name; config[:seed_name]; end", "def seed\n @data = {}\n\n SEEDED_MODELS.each do |model_name|\n pluralized_model_name = model_name.underscore + 's'\n\n filename = \"app/data/#{pluralized_model_name}.yml\"\n\n # Load data as an array of objects\n models_data = YAML.safe_load(File.read(filename)).deep_symbolize_keys\n\n # Constantize model klass\n model_klass = Class.const_get(\"CodeWars::#{model_name}\")\n\n # Map models data to new Instances\n models_data.each_with_index do |model_data, i|\n model = model_klass.new\n\n model.load_attributes(model_data || {})\n\n model.indexed_at = i\n\n # Push to DataStore memory\n\n @data[pluralized_model_name.to_sym] ||= []\n @data[pluralized_model_name.to_sym] << model\n end\n end\n end", "def populate_wiggles\n return if Wiggle.exists?\n print \"Creating Wiggles...\"\n sample = YAML.load_file(File.expand_path(\"lib/generators/sample_data/wiggles.yml\"))\n\n wiggles = sample[\"wiggles\"].take(10) \n \n wiggles.each do |wiggle| \n print \".\"\n new_wiggle = Wiggle.create(:name => wiggle[\"name\"].chomp, :description => LOREM_IPSUM)\n new_wiggle.opinions << Opinion.create(:value => rand(100))\n end\n end", "def seedTeams\n addresses = Address.all\n 5.times do\n\n SportsTeam.create(name: Faker::Team.name,\n address_id: addresses.sample.id,\n sport: Faker::Team.sport,\n stadium_size: Faker::Number.between(3000, 75000),\n trophies: Faker::Number.between(1, 10))\n end\nend", "def random_venue_yml\n puts \"venue#{@ctr}:\"\n @ctr = @ctr+1\n \n puts \" venue_id: #{@ctr}\"\n puts \" venue_name: #{@venues.random_item.to_s}\"\n puts \" venue_description: Description for #{@venues.random_item.to_s}\"\n puts \" contactinfo_id: 1\"\n puts\n \n \n \n end", "def setup\n # Retrieve fixtures via their name\n # @first = vr_cc_solicituds(:first)\n end", "def seed_categories\n template_categories = [\"Toys\",\"Cards\",\"Games\",\"Misc\"]\n template_categories.each do |category|\n Category.create(name: category)\n end\n puts \"created categories\"\nend", "def populate!()\n\n tile_factory = TileFactory.new()\n livingbeing_factory = LivingBeingFactory.new()\n\n @m.times do |y|\n @n.times do |x|\n\n # Water vs Ground\n location = Location.new(@n, @m, x, y)\n wg = tile_factory.create(Utils.generate_random_percentage(), location)\n\n if wg\n\n @tiles[x][y] = wg\n else\n\n raise StandardError.new(\"Incorrect Percentages less than 100\")\n end\n\n # If tile is Ground type...\n if @tiles[x][y].is_a?(Ground)\n \n lb = livingbeing_factory.create(Utils.generate_random_percentage())\n\n @tiles[x][y].livingbeing = lb if lb\n end\n end\n end\n\n self\n end", "def db_seed\n # Use faker gem to generate arrays with fake brands, product names, and prices.\n 10.times do\n # Create 10 new Product objects, and save them to the database\n Product.create name: Faker::Commerce.product_name, brand: Faker::Company.name, price: Faker::Commerce.price\n end\nend", "def seed_data\n 5.times do\n game = Game.create(date: Time.now.strftime(\"%FT%T.%LZ\"))\n rounds = game.rounds.build([{name: Faker::Name.name, user: User.find(2)}, {name: Faker::Name.name, user: User.find(2)}, {name: Faker::Name.name, user: User.find(2)}])\n rounds.each(&:save)\n rounds.length.times do |i|\n 18.times do |h|\n score = rounds[i].scores.build({score: Faker::Number.between(from: 1, to: 10), hole_id: h + 1})\n score.save\n end\n end\n end\nend", "def seed_data\n ExpressTranslate.seeds\n puts \"Seed data is successful!\"\n end", "def seed_interests(database=\"acani-staging\", collection=\"i\")\n interests = Mongo::Connection.new.db(database).collection(collection)\n interests.drop\n @@interest_id = 0\n require 'yaml'\n interests_yml = File.join(SEED_DIR, \"interests.yml\")\n interests_hash = YAML::load_file(interests_yml)\n def interests.insert_interest(interest_object, parent_id=nil)\n interest_id = @@interest_id.to_s(36)\n if interest_object.instance_of? String # base case\n insert({:_id => interest_id, :n => interest_object, :p => parent_id})\n @@interest_id += 1\n else # it's a hash\n interest_k_v = interest_object.first # get the only key-value pair in hash\n interest_name = interest_k_v[0] # key is the name\n insert({:_id => interest_id, :n => interest_name, :p => parent_id})\n @@interest_id += 1\n interest_k_v[1].each do |i| # value is an array of children\n insert_interest(i, interest_id)\n end\n end\n end\n interests.insert_interest interests_hash\n puts \"Seeded #{collection} collection in #{database} database with interests.yml.\"\n puts\n end", "def seed_clients\n 10.times do |n|\n Client.create!({\n sex: %w(male female other).sample,\n year_of_birth: [*1950..2004].sample,\n cell_phone: fake_cell_phone,\n category_ids: [1, 2, 3, 4].sample\n })\n end\nend", "def initialize\n load_config( \"entityid.yml\" )\n end", "def initialize\n load_config( \"entityid.yml\" )\n end", "def load_db\n basic_foods = {} # store BasicFood objects\n recipes = {} # store Recipe objects\n File.readlines('FoodDB.txt').each do |line|\n line = line.chomp.split(\",\")\n if line[1] == \"b\"\n basic_food = BasicFood.new(line[0], line[2])\n basic_foods[basic_food.name] = basic_food\n elsif line[1] == \"r\"\n recipe = Recipe.new(line[0], line[2..line.size])\n recipes[recipe.name] = recipe\n end\n end\n [basic_foods, recipes]\n end", "def db_seed\n 10.times do \n \tProduct.create(brand: Faker::Company.name, name: Faker::App.name, price: Faker::Commerce.price)\n end\nend", "def create_random_world\n randomize_terrain\n randomize_entities\n end", "def seed_breweries\n @states.each do |state|\n # Loads brewery data from API\n state_brewery_hash = get_state_breweries(state.name)\n state_brewery_hash.each do |brewery|\n # Create city first if doesn't exist\n city_name = brewery[:locality]\n\n # Check in case city is not provided\n\n if city_name != nil\n city_loader = CityLoader.new(city_name, state)\n brewery_city = city_loader.load_city\n city_id = brewery_city.id\n else\n city_id = nil\n end\n\n name = brewery[:brewery][:name]\n state_id = state.id\n\n street_address = brewery[:street_address]\n website = brewery[:website]\n phone = brewery[:phone]\n hours_of_operation = brewery[:hours_of_operation]\n latitude = brewery[:latitude]\n longitude = brewery[:longitude]\n\n # IN THE FUTURE: FIND OR INITIALIZE?\n Brewery.create(\n name: name,\n state_id: state_id,\n city_id: city_id,\n street_address: street_address,\n website: website,\n phone: phone,\n hours_of_operation: hours_of_operation,\n latitude: latitude,\n longitude: longitude\n )\n end\n end\n end", "def setEtcFacts\n etcHash = {}\n\n # The huron_class fact is a list of classes that should be applied to our machines.\n # Set the initial value equal to the building/function sections of each machine's hostname.\n # If the hostname does not conform, set huron_class equal to our general_image class.\n huron_class = %x{hostname | awk -F '-' '{print $2 \",\" $3}'}.chomp.gsub!(/[.].*/,\"\")\n \n # If our hostname does not match the district format, set huron_class to nodes.pp - this\n # way our External Node Classifier script will recognize this and default to the nodes.pp\n # file class declarations.\n if huron_class == \",\" or huron_class == nil\n huron_class = \"nodes.pp\"\n end\n \n # Output values to /etc/facts.txt file\n etcHash = {\"huron_class\" => huron_class, \"environment\" => \"production\"}\n File.open(\"/etc/facts.txt\", 'w') {|g| etcHash.each{|key, value| g.write(key + \"=\" + value + \"\\n\")}} \n>>>>>>> master\nend", "def seed_users_and_attrs\n gym_quotes = [\"When my body shouts ‘STOP’, my mind screams ‘NEVER’.\",\n \"Excuses don’t kill the fat, exercises do.\",\n \"If you have time for Facebook, you have time for exercise.\",\n \"A year from now, you’ll wish you had started today.\",\n \"Fitness is not about being better than someone else, it’s about being better than you used to be.\",\n \"Change your body by changing your thoughts.\",\n \"Never say the sky’s the limit when there are footprints on the moon.\",\n \"Fall in love with taking care of your body.\",\n \"A 1-hour workout is 4% of your day. #noexcuses\",\n \"Being defeated is often a temporary condition. Giving up is what makes it permanent.\",\n \"Respect your body. It’s the only one you get.\",\n \"A healthy lifestyle is something we refine over time – not overnight.\",\n \"Good is not enough if better is possible.\",\n \"The best project you will ever work on is you.\",\n \"Today I will do what others won’t, so tomorrow I can accomplish what others can’t.\",\n \"The secret to getting ahead is getting started.\",\n \"Push harder than yesterday if you want a different tomorrow.\",\n \"She believed she could, so she did.\",\n \"Pain is weakness leaving the body.\",\n \"Hard work beats talent when talent doesn’t work hard.\",\n \"It always seems impossible until it’s done.\",\n \"The body achieves what the mind believes.\",\n \"Of all the people on the planet, you talk to yourself more than anyone… make sure you’re saying the right things.\",\n \"Don’t stop now.\",\n \"You are confined only by the walls you build yourself.\",\n \"Exercise is king. Nutrition is queen. Put them together and you have got a kingdom – Jack Lalanne.\",\n \"It is health that is real wealth – Gandhi.\",\n \"The decent method you follow is better than the perfect method you quit – Tim Ferris.\",\n \"Just remember the letter ‘S’: salads, stir-fries, scrambles, soups, smoothies, and sushi. You can’t go wrong with the letter ‘S’ – Harley Pasternak.\",\n \"Progress, not perfection – Kimberly Snyder.\",\n \"The quality of your sleep depends on the quality of your day – Deepak Chopra.\",\n \"Take care of your body, it’s the only place you have to live – Jim Rohn.\",\n \"Get comfortable with being uncomfortable! – Jillian Michaels.\",\n \"You miss 100% of the shots you never take – Wayne Gretzky.\",\n \"Continuous improvement is better than delayed perfection – Mark Twain.\",\n \"When you start eating food without labels, you no longer need to count the calories – Amanda Kraft.\",\n \"You can’t stop waves, but you can learn how to surf – John Kabat-Zinn.\",\n \"Why are you stopping? You think I can’t see you? – Shaun T\",\n \"We are what we repeatedly do. Excellence, then, is not an act, but a habit. – Aristotle.\",\n \"Eat food, not too much, mostly plants – Michael Pollan.\",\n \"With great size comes great responsibility.\",\n \"There are no shortcuts – everything is reps, reps, reps.\",\n \"You shall gain, but you shall pay with sweat, blood, and vomit.\",\n \"Life´s too short to be small.\",\n \"I’m not on steroids, but thanks for asking…\",\n \"Everybody wants to be a bodybuilder but nobody wants to lift heavy weights!\",\n \"I don’t do this to be healthy – I do this to get big muscles.\",\n \"My warmup is your workout.\",\n \"I got 99 problems but a BENCH ain’t one.\",\n \"If it wasn’t hard, everyone would do it.\"]\n\n 1000.times do\n random_boolean = [true, false]\n random_gender = [\"male\",\n \"female\",\n \"non_binary\"]\n #! Note - Location is hard coded to \"Seattle.\".\n results = Geocoder.search(\"Seattle\")\n lat = results.first.coordinates[0]\n long = results.first.coordinates[1]\n\n lat_long = RandomLocation.near_by(lat, long, 100000)\n seed_lat = lat_long[0]\n seed_long = lat_long[1]\n\n def generate\n pre = Faker::Name.prefix\n verb = Faker::Verb.past\n animal = Faker::Creature::Animal.name\n name = pre + verb.capitalize + animal.capitalize + rand(1..100).to_s\n newname = name.split(/[ ,.\\/]/).join(\"\").to_s\n end\n\n name = generate\n\n User.create(\n name: name,\n email: \"#{name}@gmail.com\",\n password_digest: \"1234\",\n bio: gym_quotes.sample,\n age: rand(18..100),\n gender: random_gender.sample,\n diet: Diet.create(\n keto: random_boolean.sample,\n low_carb: random_boolean.sample,\n vegan: random_boolean.sample,\n vegetarian: random_boolean.sample,\n pescatarian: random_boolean.sample,\n alkaline: random_boolean.sample,\n raw_food: random_boolean.sample,\n intermittent_fasting: random_boolean.sample,\n paleo: random_boolean.sample,\n clean_eating: random_boolean.sample,\n mediterranean: random_boolean.sample,\n ),\n exercise_discipline: ExerciseDiscipline.create(\n cardio: random_boolean.sample,\n muscle_strengthening: random_boolean.sample,\n aerobic: random_boolean.sample,\n\n ),\n exercise_time: ExerciseTime.create(\n early_morning: random_boolean.sample,\n morning: random_boolean.sample,\n afternoon: random_boolean.sample,\n early_evening: random_boolean.sample,\n late_evening: random_boolean.sample,\n late_night: random_boolean.sample,\n\n ),\n gender_preference: GenderPreference.create(\n male: random_boolean.sample,\n female: random_boolean.sample,\n non_binary: random_boolean.sample,\n none: random_boolean.sample,\n ),\n location: Location.create(\n latitude: seed_lat,\n longitude: seed_long,\n ),\n music_preference: MusicPreference.create(\n rock: random_boolean.sample,\n techno: random_boolean.sample,\n rap: random_boolean.sample,\n country: random_boolean.sample,\n pop: random_boolean.sample,\n alternative: random_boolean.sample,\n classical: random_boolean.sample,\n funk: random_boolean.sample,\n latin: random_boolean.sample,\n jazz: random_boolean.sample,\n none: random_boolean.sample,\n ),\n )\n end\nend", "def load_setup( name )\n reader = create_fixture_reader( name ) ### \"virtual\" method - required by concrete class\n\n reader.each do |fixture_name|\n load( fixture_name )\n end\n end", "def seed_db\n\n # Grab login page\n @agent.get(@@server + @@standings) do |login_page|\n \n # Login and fetch the standings page\n standings_page = do_login(login_page)\n \n # Get a link to a random bracket (the first one)\n bracket_link = \"\"\n standings_page.links.each do |link|\n next unless /(#{@@server})\\/(brackets)\\/\\d+\\/\\d+/.match(link.href)\n bracket_link = link.href\n break\n end\n\n # Pull in the regions, teams, and games from the bracket page and store them in the DB\n scrape_data_and_create_records(bracket_link)\n\n end \n\n end", "def load_database_yaml; end", "def load_database_yaml; end", "def init\n create_file options[:inventory_config] do\n<<-YML\n# sources:\n# - \"https://supermarket.getchef.com\"\n# cookbooks:\n# cookbook-name:\n# versions:\n# - \"~> 4.0.2\"\n# - \"> 5.0.0\"\n# git:\n# location: url | path\n# branches:\n# - a_branch_name\n# refs:\n# - SHA\n\nYML\n end\n end", "def load_fixtures\n Firefly::Url.destroy\n urls = YAML::load(File.open('spec/fixtures/urls.yml'))\n urls.each { |key, url| Firefly::Url.create(url) }\n end", "def load_yaml_files\n @room_list = YAML.load_file(__dir__ + '/rooms.yml')\n @weapon_list = YAML.load_file(__dir__ + '/weapons.yml')\n @enemy_list = YAML.load_file(__dir__ + '/enemy.yml')\n @high_score_list = YAML.load_file(__dir__ + '/high_score.yml')\n end", "def seed_data\n\n target_isolates = rand(20) + 5\n shipment = shipments.new({:target_isolates => target_isolates})\n\n base_objective = BaseObjective.parent_objectives.to_a.sample\n if base_objective.first == \"objective_b\"\n bacteria_type = {:bacteria_type => ObjectiveB::bacteria_type_values.sample.last}\n code_prefix = bacteria_type.values.last\n end\n\n objective = base_objective.last\n\n starting_code = starting_code_for(objective, code_prefix ||= nil)\n\n shipment.target_isolates.times do |x|\n parent_objective = objective.new(bacteria_type ||= {})\n \n code_number = parent_objective.code_prefix+\"#{(starting_code + x + 1)}\"\n\n isolate = shipment.isolates.new({code_number: code_number})\n isolate.parent = parent_objective\n isolate.save\n end\n\n shipment.update_attribute(:objective_name, \"#{objective.name} #{objective.new(bacteria_type ||= {}).name}\")\n\n end", "def load_file(file)\n buf = File.read(file)\n\n # parse the document into a Psych tree; we don't load here because we want\n # the file/line info while creating our entities.\n doc = YAML.parse(buf, filename: file)\n\n # Document should be an Array of Hashes\n seq = doc.children.first or return # ignore empty documents\n load_error!('not a yaml sequence (Array)', file, seq) unless seq.sequence?\n\n # Loop through each Hash\n seq.children.each do |map|\n\n # Make sure it's a mapping before we convert it to a ruby Hash\n load_error!('not a yaml mapping (Hash)', file, map) unless map.mapping?\n entity = YAML.send(:symbolize_names!, map.to_ruby)\n\n # Ensure they're not using some unknown keys\n unknown_keys = entity.keys - SUPPORTED_KEYS\n load_error!(\"unknown keys: #{unknown_keys}\", file, map) unless\n unknown_keys.empty?\n\n load_error!(\"id and update are mutually exclusive\", file, map) if\n entity[:id] and entity[:update]\n\n source = \"#{file}:#{map.start_line + 1}\"\n\n create = {}\n create[:id] = entity[:id] if entity.has_key?(:id)\n create[:update] = entity[:update] if entity.has_key?(:update)\n\n # Create an Array of the various base Entities that will be layered into\n # this Entity\n create[:base] = [entity[:base]].flatten.compact\n\n # Construct an Array of component arguments that will be sent to\n # Morrow::EntityManager#create_entity\n entity[:components] ||= []\n load_error!('The `components` field must be an Array; %s' %\n [ entity[:components].inspect ], file, map) unless\n entity[:components].is_a?(Array)\n\n create[:components] = entity[:components].map do |conf|\n case conf\n when Symbol\n conf\n when String\n conf.to_sym\n when Hash\n load_error!(<<~ERROR, file, map) unless conf.size == 1\n Multiple keys found in single component configuration. Note that\n the `components` field is an Array. Perhaps you missed a '-'\n before the next component after this one.\n ERROR\n\n # A Hash is a component with non-default values. The values may be\n # provided as a Hash, an Array (must have all elements), or a scalar\n # (for single field Components)\n comp, config = conf.first\n case config\n when Hash\n config.rekey! { |k| k.to_sym }\n when Array\n # don't make any changes\n else\n # turn this non-array value into an array of a single element\n config = [ config ]\n end\n { comp.to_sym => config }\n else\n load_error!('Unsupported component configuration type: %s' %\n [ conf.inspect ], file, map)\n end\n end\n\n create[:remove] = entity[:remove] || []\n\n # defer the action if we're not able to do it at the moment\n begin\n create_or_update(**create)\n rescue Morrow::UnknownEntity\n defer(source: source, entity: create)\n rescue Exception => ex\n raise Morrow::Error, <<~ERROR.chomp\n error in entity file: #{source}: #{entity.pretty_inspect\n .chomp.gsub(/\\n/, \"\\n\" + ' ' * 16)}\n ERROR\n end\n end\n\n # Attempt to flush any deferred actions now that we've loaded everything in\n # the file.\n flush\n end", "def setup\n # Retrieve fixtures via their name\n # @first = gizmo_types(:first)\n end", "def load_fixtures\n # fixtures = ENV['FIXTURES'] ? ENV['FIXTURES'].split(/,/) : Dir.glob(File.join(File.dirname(__FILE__), 'test', 'fixtures', '*.{yml,csv}')) \n fixtures = [ 'active_sources', 'semantic_relations', 'semantic_properties' 'data_records']\n fixtures.reverse.each { |f| ActiveRecord::Base.connection.execute \"DELETE FROM #{f}\" }\n fixtures.each do |fixture_file|\n Fixtures.create_fixtures(File.join('test', 'fixtures'), File.basename(fixture_file, '.*')) \n end \n end", "def seed!(verbose: false)\n puts '----Seeding ReportTypes----' if verbose\n attrs = {abbr:'R002',\n full_name: 'Appeals performance report',\n class_name: 'Stats::R002AppealsPerformanceReport',\n custom_report: true,\n seq_id: 100}\n create_or_update!(attrs)\n puts ' Created report R002' if verbose\n\n attrs = {abbr:'R003',\n full_name: 'Business unit report',\n class_name: 'Stats::R003BusinessUnitPerformanceReport' ,\n custom_report: true,\n seq_id: 200}\n create_or_update!(attrs)\n puts ' Created report R003' if verbose\n\n\n attrs = {abbr:'R004',\n full_name: 'Cabinet Office report',\n class_name: 'Stats::R004CabinetOfficeReport',\n custom_report: true,\n seq_id: 400}\n create_or_update!(attrs)\n puts ' Created report R004' if verbose\n\n\n attrs = {abbr:'R005',\n full_name: 'Monthly report',\n class_name: 'Stats::R005MonthlyPerformanceReport',\n custom_report: true,\n seq_id: 300}\n create_or_update!(attrs)\n puts ' Created report R005' if verbose\n\n attrs = {abbr: 'R105',\n full_name: 'Monthly report (SARs)',\n class_name: 'Stats::R105SarMonthlyPerformanceReport',\n custom_report: false,\n seq_id: 310}\n create_or_update!(attrs)\n puts ' Created report R105' if verbose\n\n attrs = {abbr:'R006',\n full_name: 'Business unit map',\n class_name: 'Stats::R006KiloMap',\n custom_report: false,\n seq_id: 9999}\n create_or_update!(attrs)\n puts ' Created report R006' if verbose\n\n attrs = {abbr: 'R103',\n full_name: 'Business unit report',\n class_name: 'Stats::R103SarBusinessUnitPerformanceReport',\n custom_report: true,\n seq_id: 250}\n create_or_update!(attrs)\n puts ' Created report R103' if verbose\n\n create_or_update!(attrs)\n puts ' Created report R105' if verbose\n end", "def seed\n 100.times do |i|\n u = User.new(name:\"#{SecureRandom.hex(4)}\",\n email:\"#{SecureRandom.hex(2)}@#{SecureRandom.hex(2)}.#{SecureRandom.hex(1)}\",\n password: \"a\",\n age: 21,\n coordinates: [(43.607+rand(-10..10)).round(6), (-79.708+rand(-10..10)).round(6)],\n tags: [\"a\", \"b\", \"c\", \"d\", \"e\", \"f\"].sample(2))\n rand(2..6).times do\n u.photos << Photo.new(path: \"essence#{rand(1..25)}.jpg\",\n tags: [\"a\", \"b\", \"c\", \"d\", \"e\", \"f\"].sample(2))\n end\n u.save!\n end\n user = User.first\n user.name = \"markpoon\"\n user.email = \"[email protected]\"\n user.password =\"some phrase\"\n user.save\nend", "def seed; end", "def db_seed\n\tbrand = []\n\tproducts_names = []\n\tprices = []\n\tarray = []\n\t10.times do |i|\n\t\tbrand.push(Faker::Commerce.department)\n\t\tproducts_names.push(Faker::Commerce.product_name)\n\t\tprices.push(Faker::Commerce.price)\n\t\tProduct.create(id:i,brand:brand[i],name:products_names[i],price:prices[i])\n\tend\t\nend", "def initialize(world, seeds = [])\n @world = world\n @seeds = seeds\n plant_seeds\n end", "def load_icu_players\n @icu_players_cache ||= YAML.load(File.read(File.expand_path('../factories/icu_players.yml', __FILE__)))\n @icu_players_cache.inject({}) do |h, (id, data)|\n h[id] = FactoryGirl.create(:icu_player, data.merge(id: id))\n h\n end\nend", "def load_rng(seeds = Hash.new(Random.new_seed))\n @move_damage_rng = Random.new(seeds[:move_damage_rng])\n @move_critical_rng = Random.new(seeds[:move_critical_rng])\n @move_accuracy_rng = Random.new(seeds[:move_accuracy_rng])\n end", "def run\n ngen_auth\n @validate = Validator.new\n stack = EcoSystem.new\n @instanceparameters = stack.yaml_reader(config[:yaml])\n stack.validate = @validate\n config[:action] = 'create'\n stack.options = config\n stack.supress_output ='1'\n stack.instanceparameters = @instanceparameters\n inst_result = stack.opt_parse\n ho_hum\n \n inst_result.each do |server|\n config[:inst] = server['server']['display_name']\n instance = {}\n # puts chef_attrs = server['server']['userdata'].at(0)['chef']\n chef_attrs = server['server']['userdata'].at(0)['chef'] if !server['server']['userdata'].at(0)['chef'].nil?\n chef_attrs.each do |attr, value|\n instance[attr] = value\n end\n chef_node_configuration(instance)\n config[:chef_node_name] = config[:inst]\n inst_details = AttrFinder.new(server)\n inst_details.options = config\n inst_details.validate = @validate\n inst_details.function = 'server' \n inst = InstanceClient.new\n inst.validate = @validate\n inst.options = config\n inst.supress_output ='1'\n inst.instanceparameters = @instanceparameters\n ssh_host = inst.list_instance_ip(inst_details.compartment, inst_details.instance).at(1)\n bootstrap_for_linux_node(ssh_host).run\n node_attributes(ssh_host, 'IaaS')\n end\n end", "def seed_users\n 10.times do\n name = Faker::Movies::PrincessBride.character\n new_user = User.new(username: name.split(\" \").join(\"\"), password: \"1234\")\n new_user.save\n Artist.create(name: name.split(\" \").join(\"\"), user_id: new_user[:id])\n end\nend", "def board_setup\n create_boats.each do |boat|\n board.place_boat(boat, board.random_boat_location(boat.size))\n end\n end", "def seed_data_for_preference(prof, student)\n course = Course.create!(name: \"Computer Networks (Preference Voting)\",\n pin: Faker::Number.number(digits: 6),\n professor_id: prof.id,\n minimum_group_member: 2,\n maximum_group_member: 5,\n has_group: false,\n is_voting: false,\n state: \"choose_algo\",\n withProject: true)\n students = []\n 31.times do\n first = Faker::Name.first_name\n last = Faker::Name.last_name\n s = User.create!(firstname: first,\n lastname: last,\n email: last + Faker::Number.number(digits: 6).to_s + \"@brandeis.edu\",\n password: \"password\", type: \"Student\", time_zone: seed_time_zone)\n students << s\n Taking.create!(student_id: s.id, course_id: course.id, state: \"created\")\n end\n Taking.create!(student_id: student.id, course_id: course.id, state: \"created\")\n students << student\n projects = []\n 7.times do\n active_project = Project.create!(project_name: Faker::Team.name, course_id: course.id, description: Faker::Game.genre,\n is_active: true, number_of_likes: 0, added_by: students.sample)\n projects << active_project.id\n end\n 2.times do\n inactive_project = Project.create!(project_name: Faker::Team.name, course_id: course.id, description: Faker::Game.genre,\n is_active: false, number_of_likes: 0, added_by: students.sample)\n end\n seed_preference(course, students)\n puts \"seed preference\"\nend", "def create_seed(&block)\n sources = Ohai.config[:plugin][:shard_seed][:sources] || default_sources\n data = \"\"\n sources.each do |src|\n data << case src\n when :fqdn\n fqdn\n when :hostname\n hostname\n when :machine_id\n machine_id\n when :machinename\n machinename\n else\n yield(src)\n end\n end\n if data.empty?\n Ohai::Log.error(\"shard_seed: Unable to generate seed! Either ensure 'dmidecode' is installed, or use 'Ohai.config[:plugin][:shard_seed][:sources]' to set different sources.\")\n raise \"Failed to generate shard_seed\"\n end\n shard_seed digest_algorithm.hexdigest(data)[0...7].to_i(16)\n end", "def setup\n # Retrieve fixtures via their name\n # @first = vr_cc_solicitudes(:first)\n end", "def add_teams\n Dir['teams/*.yml'].each do |team|\n t = YAML.load_file team\n Team.create name: t['name'], members: t['members']\n end\nend", "def from_yaml\n\t\tdata = YAML.load File.read(\"save.yaml\")\n\t\t@board = Board.new(data[:board], data[:positions])\n\t\t@current_turn = data[:current_turn]\n\t\t@ai_on = data[:ai_on]\t\t\n\tend", "def db_seed\n data_path = File.dirname(__FILE__) + \"/data.csv\"\n CSV.open(data_path, \"ab\") do |csv|\n 0.upto(9).each do |index|\n csv << [index, Faker::Company.name, Faker::Commerce.product_name, Faker::Commerce.price]\n end\n end\nend", "def add_seeds_data # :nodoc:\n dane = <<-CONTENT\nprint \"Adding seeds data...\"\n\npasswd = if Rails.env.development?\n \"admin\"\nelse\n (0...5).map{ ('a'..'z').to_a[rand(26)] }.join\nend\n\nadmin = User.create email: '[email protected]',\n password: passwd,\n password_confirmation: passwd,\n role: :admin\n\nif admin.errors.empty?\n puts \"\\\\nAdmin password: '\\#{passwd}'\"\nelse\n puts \"\\\\nError while creating admin account: \\#{admin.errors.full_messages.join('. ')}.\"\nend\n\nputs \"done!\"\n CONTENT\n\n prepend_file \"db/seeds.rb\", dane\n end", "def load_settings\n attributes = YAML.load_file(\"defaults.yml\")\n begin\n custom_attributes = YAML.load_file(\"config.yml\")\n attributes = simple_deep_merge(attributes, custom_attributes)\n rescue\n end\n attributes\nend", "def create_article_tags\n 5.times do\n Tag.create(tag_text: Faker::Hipster.word, author_id: Author.pluck(:id).sample, taggable_type: \"Article\", taggable_id: Article.pluck(:id).sample)\n end\nend", "def seed_categories\n ['Shelter', 'Food', 'Health', 'Jobs/Training'].each do |name|\n Category.find_or_create_by(name: name)\n end\nend", "def initialize(input_yaml, options, skip_replication=false)\n @input_yaml = (input_yaml.kind_of?(String) ? YAML.load(input_yaml) : input_yaml)\n\n @infrastructure = options[:infrastructure]\n @database_type = options[:database]\n @database_type = @database_type.to_sym if !@database_type.nil?\n @min_images = options[:min_images]\n @max_images = options[:max_images]\n @replication = options[:replication]\n @read_factor = options[:read_factor]\n @write_factor = options[:write_factor]\n \n @nodes = []\n @skip_replication = skip_replication\n end", "def seed\n self.username = Etc.getlogin\n self.id = Time.now.to_ntp\n self.version = self.id\n self.network_type = \"IN\"\n self.address_type = \"IP4\"\n self.unicast_address = Socket.gethostname\n\n self.name = \" \"\n self.connection_network_type = self.network_type\n self.connection_address_type = self.address_type\n self.connection_address = local_ip\n self.start_time = 0\n self.stop_time = 0\n end", "def seed_categories\n category_text = File.read(Rails.root.join('lib', 'seeds', 'categories.csv'))\n category_csv = CSV.parse(category_text, headers: true)\n\n category_csv.each_with_index do |row, index|\n Category.create(\n main_category: row[0],\n sub_category: row[1],\n external_id: index + 1\n )\n end\nend", "def load_default_test_data_to_db_before_test\n community1 = Community.where(ident: \"test\").first\n community2 = Community.where(ident: \"test2\").first\n community3 = Community.where(ident: \"test3\").first\n\n person1 = FactoryGirl.create(:person,\n community_id: community1.id,\n username: \"kassi_testperson1\",\n emails: [\n FactoryGirl.build(:email, community_id: community1.id, :address => \"[email protected]\") ],\n is_admin: 0,\n locale: \"en\",\n encrypted_password: \"$2a$10$WQHcobA3hrTdSDh1jfiMquuSZpM3rXlcMU71bhE1lejzBa3zN7yY2\", #\"testi\"\n given_name: \"Kassi\",\n family_name: \"Testperson1\",\n phone_number: \"0000-123456\",\n created_at: \"2012-05-04 18:17:04\")\n\n person2 = FactoryGirl.create(:person,\n community_id: community1.id,\n username: \"kassi_testperson2\",\n emails: [\n FactoryGirl.build(:email, community_id: community1.id, :address => \"[email protected]\") ],\n is_admin: false,\n locale: \"en\",\n encrypted_password: \"$2a$10$WQHcobA3hrTdSDh1jfiMquuSZpM3rXlcMU71bhE1lejzBa3zN7yY2\", #\"testi\"\n given_name: \"Kassi\",\n family_name: \"Testperson2\",\n created_at: \"2012-05-04 18:17:04\")\n\n FactoryGirl.create(:community_membership, :person => person1,\n :community => community1,\n :admin => 1,\n :consent => \"test_consent0.1\",\n :last_page_load_date => DateTime.now,\n :status => \"accepted\" )\n\n FactoryGirl.create(:community_membership, :person => person2,\n :community=> community1,\n :admin => 0,\n :consent => \"test_consent0.1\",\n :last_page_load_date => DateTime.now,\n :status => \"accepted\")\n\n FactoryGirl.create(:email,\n :person => person1,\n :address => \"[email protected]\",\n :send_notifications => true,\n :confirmed_at => \"2012-05-04 18:17:04\")\n\n FactoryGirl.create(:email,\n :person => person2,\n :address => \"[email protected]\",\n :send_notifications => true,\n :confirmed_at => \"2012-05-04 18:17:04\")\n\n FactoryGirl.create(:marketplace_configurations,\n community_id: community1.id,\n main_search: \"keyword\",\n distance_unit: \"metric\",\n limit_search_distance: 0)\n\n end" ]
[ "0.65405065", "0.6289179", "0.62849", "0.6165538", "0.6074019", "0.6074019", "0.6046924", "0.6001355", "0.59213084", "0.589432", "0.5819937", "0.58128124", "0.57850516", "0.57461166", "0.5740708", "0.5727112", "0.5717558", "0.5716261", "0.56970215", "0.5696358", "0.5672369", "0.56390226", "0.56368023", "0.5613749", "0.5560061", "0.5555182", "0.5547813", "0.55477846", "0.5486534", "0.54785615", "0.546475", "0.544834", "0.5437521", "0.54357177", "0.53956795", "0.5391676", "0.5382756", "0.5382195", "0.5381521", "0.53811187", "0.5374852", "0.5366288", "0.53638524", "0.53500324", "0.5328612", "0.53189087", "0.5318905", "0.5306803", "0.5304787", "0.5301008", "0.528975", "0.52790135", "0.52733725", "0.52651227", "0.52632266", "0.5262444", "0.525936", "0.5256238", "0.5256238", "0.5255964", "0.5252075", "0.52440715", "0.52298355", "0.52296", "0.52199817", "0.52131295", "0.52072036", "0.52050644", "0.52050644", "0.51964265", "0.51871824", "0.51832426", "0.51803786", "0.5168834", "0.5167228", "0.51652694", "0.51493055", "0.51403177", "0.5135213", "0.5125808", "0.5113042", "0.51121587", "0.5110719", "0.50952923", "0.5092106", "0.5091234", "0.5087581", "0.50869906", "0.5079633", "0.50777936", "0.5064311", "0.50627387", "0.5061047", "0.5054586", "0.5052384", "0.5049267", "0.50427586", "0.5040175", "0.50391644", "0.5038273" ]
0.7922321
0
Produces > Thursday May 25, 2006
def date_formatted date.strftime("%A %B %d, %Y") end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def display_day_published\r\n \"Published : #{created_at.strftime('%-b %-d, %Y')}\"\r\n end", "def display_day_published\n \"Published : #{created_at.strftime('%-b %-d, %Y')}\"\n end", "def wday() end", "def week\n published_at.strftime('%W')\n end", "def formatted_date\n \"#{self.day.date.strftime(\"%A, %B %e, %Y\")}\"\n end", "def w_day; end", "def show_date month_of_year, day_of_week\n month = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December']\n day = dom\n return month[month_of_year-1] + day[day_of_week].to_s\nend", "def mweek; (5 - wday + day) / 7 end", "def week; end", "def date_published_human\n date_published.strftime(\"on %B %d, %Y at %H:%M\")\n end", "def humanize_date(date)\n current = Date.today\n case\n when (3..6).include?(date.yday - current.yday)\n date.strftime(\"This %A\")\n when date.yday == (current.yday + 2)\n \"In two days\"\n when date.yday == (current.yday + 1)\n \"Tomorrow\"\n when date.yday == current.yday\n \"Today\"\n when date.yday == (current.yday - 1)\n \"Yesterday\"\n when date.yday == (current.yday - 2)\n \"Two days ago\"\n when (3..6).include?((current.yday - date.yday).abs)\n date.strftime(\"%A\")\n when date.year != current.year\n date.strftime(\"%m/%d/%Y\")\n when date.cweek == (current.cweek + 2)\n date.strftime(\"%A in two weeks\")\n when date.cweek == (current.cweek + 1)\n date.strftime(\"Next %A\")\n when date.cweek == (current.cweek - 1)\n date.strftime(\"Last %A\")\n when date.cweek == (current.cweek - 2)\n date.strftime(\"%A two weeks ago\")\n else\n date.strftime(\"%b %d\")\n end\n end", "def thursday\n day(:thursday)\n end", "def yday() end", "def eval_date\n # FIXME: Make pref?\n h = Hash[\"mo\", 1, \"di\", 2, \"mi\", 3, \"do\", 4, \"fr\", 5, \"???\", 6]\n h.merge(Hash[\"mo\", 1, \"tu\", 2, \"we\", 3, \"th\", 4, \"fr\", 5, \"???\", 6])\n a = description.strip.downcase\n a = \"???\" if a.length < 3 || !h.include?(a[0..1])\n day = h[a[0..1]]\n time = a[2..a.length-1].strip.rjust(3, \"0\")\n \"#{day} #{time}\"\n end", "def beginning_of_week; end", "def beginning_of_week; end", "def goth_published_date(goth)\n\t\tgoth.published_at.strftime('%b %d, %Y %H:%M')\n\tend", "def ymdw\n wek = %w[Sun Mon Tue Wed Thu Fri Sat][wday]\n format(\"%04d-%02d-%02d %s\", year, month, mday, wek)\n end", "def pubdate(format=\"%Y%m%d\")\n t = Time.now\n \n case t.strftime(\"%A\")\n when 'Saturday'\n (t-(48*60*60)).strftime(format) \n when 'Sunday'\n (t-(24*60*60)).strftime(format)\n else\n t.strftime(format)\n end\n end", "def print_dates ()\n for date in DATES\n pp \"#{date[1]} Tweets On #{date[0].strftime(\"%Y-%m-%d\")}\"\n end\nend", "def wday\n to_g.wday\n end", "def web_date; strftime(WEB_DATE_FORMAT); end", "def fancy_date(date)\n h date.strftime(\"%A, %B %d, %Y\")\n end", "def each_thursday( n=1, offset=0, dur=1); each_wdays(self.Thu,n,offset,dur); end", "def inspect\n \"#{year}-W#{week}-#{day}\"\n end", "def get_date_scraped\n return Date.today.to_s\nend", "def get_date_scraped\n return Date.today.to_s\nend", "def wday_shortname date\n %w'вс пн вт ср чт пт сб'[date.wday]\n end", "def monday\n date\n end", "def pub_date_facet\n return nil unless pub_date\n return \"#{pub_date.to_i + 1000} B.C.\" if pub_date.start_with?('-')\n return pub_date unless pub_date.include? '--'\n\n \"#{pub_date[0, 2].to_i + 1}th century\"\n end", "def jwday\n (to_g.wday + 1) % 7\n end", "def get_date_scraped\n\n return Date.today.to_s\n\nend", "def get_date_scraped\n\n return Date.today.to_s\n\nend", "def header(month, day, year=Time.now.year)\n time = Time.new(year, month, day)\n time.strftime(\"-- %b #{[time.week_start, time.week_end].uniq.join(\"-\")}\\n\")\n end", "def fmt_dow_date(dt) _format_dt dt, DOW_M_D end", "def day_name; Date::DAYNAMES[wday] end", "def day() end", "def day_in_week_str\n\t\tdays = []\n\t\ttoday = Time.now\n\t\tdays.push(formatted(today))\n\t\ttoday_week_day = today.wday\n\t\tanothers = (0..6).select {|e| e != today_week_day}\n\t\tanothers.map do |e|\n\t\t\tdays.push(formatted(Time.at(today.to_i - e*day_second)))\n\t\tend\n\n\t\tdays.sort\n\tend", "def dayOfWeek\n days = 0;\n tempYear = MIN_YEAR\n while tempYear < @year\n days += SimpleDate.daysInYear(tempYear).to_i\n tempYear+=1\n end\n\n days = days + ordinalDate\n #puts \"days #{days}\"\n days = days % 7\n\n end", "def ymdnw\n nth = %w[1st 2nd 3rd 4th 5th][(mday + 6) / 7]\n wek = %w[Sun Mon Tue Wed Thu Fri Sat][wday]\n format(\"%04d-%02d-%02d %s %s\", year, month, mday, nth, wek)\n end", "def printHTMLDateShort(tobj)\n return alStrftime(tobj,'%y-%m-%d')\n end", "def published_natural\n @published.strftime(\"#{@published.day.ordinalize} %B %Y\")\n end", "def weekday\n current_time = Time.new\n current_time.strftime(\"%A\")\nend", "def human_date() # for Date object\n # return 'tomorrow' if tomorrow?\n # return 'today' if today?\n # return 'yesterday' if yesterday?\n # return \"HumanDateBoh(#{to_s})\" # TBD\n cool_date2()\n end", "def livia_date_effect(new_date)\n if new_date\n days= (Time.zone.now.to_date - new_date.to_date).to_i\n weekend = new_date.wday\n add_class = ((weekend == 0) || (weekend == 6))? \"weekend\" : \"\"\n \n if days ==0\n return %Q{\n <span style=\"color: #F88158\" class=\"blink #{add_class}\">#{new_date.to_time.strftime('%m/%d/%y') if new_date}</span>\n }\n elsif days > 0\n return %Q{\n <span class=\"#{add_class} red_text\"%>#{new_date.to_time.strftime('%m/%d/%y') if new_date}</span>\n }\n else\n return %Q{\n <span class=\"#{add_class}\">#{new_date.to_time.strftime('%m/%d/%y') if new_date}</span>\n }\n end\n else\n return ''\n end\n end", "def weekday\n current_time = Time.new\n current_time.strftime(\"%A\")\nend", "def W; cat @time.strftime('%W') end", "def day; end", "def publish_date_and_explanation(errata)\n bold = errata.publish_date_explanation == 'custom'\n # This is very ugly, sorry! (fixme)\n html = ''\n html << '<div class=\"compact\">'\n html << '<b>' if bold\n html << [h(errata.publish_date_for_display),\"<small style='color:#888'>(#{errata.publish_date_explanation})</small>\"].compact.join('<br/>')\n html << '</b>' if bold\n html << '<br/>'\n html << \"<small>#{time_ago_future_or_past(errata.publish_or_ship_date_if_available)}</small>\" if errata.publish_or_ship_date_if_available\n html << '</div>'\n html.html_safe\n end", "def formatted_published\n @published.strftime('%A, %d %B %Y at %I:%M %P')\n end", "def day_of_week\n start_on.strftime(WEEKDAY_NAME)\n end", "def w; cat @time.strftime('%w') end", "def pub_date\n Date.today\n end", "def formatted_smalldate(d) # Mon, Januaury 01\n d.strftime('%b, %d') unless !d\n end", "def week_result_string(day)\n start_of_week = day.beginning_of_week\n # adjust if the week started in a different month\n start = (day.month == start_of_week.month ? start_of_week.day - 1 : 0)\n end_of_week = day.end_of_week\n # adjust if the week finished in a different month\n finish = (day.month == end_of_week.month ? end_of_week.day - 1 : 30)\n month_result_string(day).slice(start..finish)\n end", "def send_on_date(n_weeks)\n today = category.user.today\n send_on_date = today + n_weeks * 7 + (send_on - today.wday % 7)\n send_on_date += 7 if today.wday > send_on\n send_on_date\n end", "def nice_date_form(the_date)\n \treturn the_date ? the_date.strftime('%b %d, %Y') : ''\n\tend", "def mday() end", "def word_date()\n #Find date action was completed (from database using date_completed function)\n date_string = self.date_completed\n #Return if date does not exist\n return if date_string == nil\n date_string = self.date_completed\n #Parse date into DATETIME format\n date = DateTime.parse(date_string)\n #if the action has not been completed return string anouncing when the action\n #will occur. Logic needed to check if the date is upcoming or overdue.\n if @completed == 'f'\n if date.to_date > Date.today\n return \"DUE: #{date.strftime(\"%B %e, %Y\")}\"\n elsif date.to_date == Date.today\n return \"DUE Today\"\n elsif date.to_date < Date.today\n return \"OVERDUE: #{date.strftime(\"%B %e, %Y\")}\"\n end\n #if action has already been completed, return the date completed.\n else\n return \"#{date.strftime(\"%B %e, %Y\")}\"\n end\n end", "def humanize_time(time)\n humanize_date(time.to_date) + (time.to_date.cweek == Date.today.cweek ? \" at \" + time.strftime(\"%l:%M %p\") : \"\")\n end", "def weeks_with_year\n rows = []\n rows << \"Su Mo Tu We Th Fr Sa\" + \" \"\n days = format_dates\n (0..7).each {|num|\n fields = days[num * 7, 7]\n rows << fields.join(\" \") + \" \" if fields\n }\n\n if rows.last.length < 22\n rows.last << \" \" * (22 - rows.last.length)\n end\n\n until rows.length == 7\n rows << \" \" * 22\n end\n rows\n end", "def day_of_week(date)\n 7 - date.cwday\n end", "def wrong_date(num)\n first_day(num).text\nend", "def wday() @m_date.wday end", "def week\n self.date ? self.date.strftime('%Y-%W') : ('0000-00')\n end", "def fortnights ; self * 2.weeks ; end", "def short_date( date )\n date.strftime( '%b %d, %Y')\n end", "def display_time_for_tweet\n created_at.strftime(\"%B #{created_at.day.ordinalize}, %Y\")\n end", "def day_of_week\n dnum = day\n dnum -= 10 if dnum > 20\n dnum -= 10 if dnum > 10\n dnum -= 1\n dnum\n end", "def expected_hand_in\n if self[:end_date].nil?\n Time.now.strftime('%Y-%m-%d')\n else\n d = self[:end_date].to_time + 4.weeks\n d.strftime('%Y-%m-%d')\n end\n end", "def day_of_week\n\tif @current_time.wday == 0 || @current_time.wday == 6\n\t\tweek_period = \"Weekends\"\n\telse\n\t\tweek_period = \"Weekdays\"\n\tend\nend", "def format_body\n #convert to ISO date\n d = ((@first_wkday_of_month+5) % 7) + 1\n\n #reset for Sunday\n if d == 7\n d = 0\n end\n\n #returns very long array\n padding = Array.new(d, nil)\n result = (padding + array_of_days).each_slice(7).to_a\n until result.length == 6\n result << [nil]\n end\n result\n end", "def email_date; strftime(EMAIL_TIME_FORMAT); end", "def date_published\n created_at.strftime(\"%b %d, %Y\")\n end", "def calendar_format_printer\n lambda do |event|\n if (event[:date].include?(':'))\n \"#{DateTime.parse(event[:date]).strftime('%b %d %H:%M')}\\t#{event[:text]}\"\n else\n \"#{Date.parse(event[:date]).strftime('%b %d')}\\t#{event[:text]}\"\n end\n end\nend", "def weeks() 7 * days end", "def format_date_nicely(date)\nend", "def yday\n end", "def cweek\n end", "def date\n \"#{day} #{month.capitalize} #{year}\"\n end", "def get_date(date)\n date == Date.today ? \"Today\" : date.strftime(\"%A, %B %e %Y\")\n end", "def pretty_date(time)\n time.strftime(\"%d %b %Y\")\n end", "def pretty_date(time)\n time.strftime(\"%d %b %Y\")\n end", "def day_of_the_week(time)\n Date::DAYNAMES[time.wday]\nend", "def week\n @obj.date.strftime(\"%V\")\n end", "def pretty_date(time)\n time.strftime(\"%B %d, %Y\")\n end", "def pretty_date(time)\n time.strftime(\"%B %d, %Y\")\n end", "def easy_date; date; end", "def date_short(date)\n\t\tdate.strftime(\"%b %e, %Y\") if !date.blank?\n\tend", "def date_as_label\n date_played.to_s\n end", "def name_of_day(style = :full)\n @time.strftime(style == :full ? \"%A\" : \"%a\")\n end", "def beginning_of_fall_semester\n week = 4\n\n date = Chronic.parse(\"#{week}th thursday last august\")\n while date\n week += 1\n date = Chronic.parse(\"#{week}th thursday last august\")\n end\n\n Chronic.parse(\"#{week - 1}th thursday last august\").to_datetime\n end", "def date; end", "def date; end", "def date; end", "def date; end", "def date_for(date)\n content_tag :time do\n date.strftime(\"%e %b\")\n end\n end", "def human_occurrence_date(occurrence)\n if date = occurrence.date\n if date.year == Date.today.year\n date.strftime '%B %d'\n else\n date.strftime '%B %d, %Y'\n end\n end\n end", "def date_end # originally date_start\n\tdate = Date.today\n\t(1..7).each do |n|\n\t\tdate = Date.today - n#.days\n\t\tbreak if date.wday == 6 # 0 = Sun, 1 = Mon ... 6 = Sat\n\tend\n\tdate\nend", "def inspect\n \"it is the \" + ordinal_list(@weeks) + \" week of the month\"\n end" ]
[ "0.6963673", "0.686262", "0.66893697", "0.6641272", "0.66404676", "0.66287845", "0.6593285", "0.6473139", "0.64703923", "0.64516735", "0.6395532", "0.6374176", "0.63724506", "0.6358772", "0.633706", "0.633706", "0.6332694", "0.63323814", "0.6321644", "0.63023096", "0.6278043", "0.62653106", "0.62590027", "0.6254988", "0.62531656", "0.62428117", "0.62428117", "0.62261015", "0.6225326", "0.6213647", "0.62114", "0.6203218", "0.6203218", "0.61906695", "0.61902475", "0.6189564", "0.61849505", "0.6180144", "0.6163838", "0.6159226", "0.6144539", "0.6144149", "0.61255544", "0.6121646", "0.6117984", "0.61116743", "0.61110014", "0.6104501", "0.6102205", "0.6097536", "0.6086575", "0.6076574", "0.607468", "0.6070163", "0.6068467", "0.6065777", "0.6064858", "0.60594064", "0.6054863", "0.6032389", "0.6030744", "0.6027669", "0.6025773", "0.602222", "0.6017051", "0.60089535", "0.60088515", "0.6001221", "0.59974754", "0.599517", "0.5993783", "0.5991612", "0.5978823", "0.59699744", "0.5967159", "0.5967027", "0.5961716", "0.59577197", "0.59513843", "0.59500206", "0.5945016", "0.5935054", "0.5935054", "0.5935042", "0.5933058", "0.59310436", "0.59310436", "0.5931018", "0.5930422", "0.5929448", "0.5922276", "0.5922012", "0.5908955", "0.5908955", "0.5908955", "0.5908955", "0.5901315", "0.5893738", "0.58873713", "0.58867747" ]
0.5890699
98
Generate a mail code, we've just made up some random format for fun
def new @mail_code = new_mail_code end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mk_email\n ps = \"P.S. Jeg har vedhæftet relevante dokumenter, så som eksamensbevis og CV i mailen.\"\n ps += \"\\nP.P.S Denne email var genereret med, og sendt fra et Ruby script :)\" if @skills and @skills.include? \"Ruby\"\n \"#{mk_intro}\\n#{mk_body}#{mk_outro}#{mk_sender}#{ps}\"\n end", "def mk_sender\n [\"-- \", \"Med kærlig hilsen, \", \"Med venlig hilsen, \", \"MVH, \", \"Hilsen \"].sample + \"Nikolaj Lepka\\n\" +\n \"Telefon: 25 14 66 83\\n\" +\n \"Email: [email protected]\\n\" +\n \"Github: https://github.com/ElectricCoffee\\n\" +\n \"Twitter: https://twitter.com/Electric_Coffee\\n\\n\"\n end", "def generate_email\n o = [('a'..'z'),('A'..'Z')].map{|i| i.to_a}.flatten\n email = (0...15).map{ o[rand(o.length)] }.join << \"@\" << (0...10).map{ o[rand(o.length)] }.join << \".com\"\n end", "def email\n imie = %w(ala ola kasia basia tomek atomek xyz)\n name = %w(kowalski nowak)\n domain = %w(gmail.com hotmail.com yahoo.com)\n [imie[rand(imie.size)], name[rand(name.size)], '@', domain[rand(domain.size)]].join(\"\")\n end", "def gen_email(name)\n firstname = name.split.first\n lastname = name.split.last\n domains = %w(yahoo.com gmail.com privacy.net webmail.com msn.com\n hotmail.com example.com privacy.net)\n return \"#{(firstname + lastname).downcase}\"\\\n \"#{rand(100)}\\@#{domains[rand(domains.size)]}\"\nend", "def generate_order_code\n\t\t\tsize = 5\n\t\t\tcharset = %w{0 1 2 3 4 6 7 9 A B C D E F G H I J K L M N O P Q R S T U V W X Y Z}\n\t\t\tself.code = \"DH\" + Time.now.strftime(\"%Y\").last(2) + (0...size).map{ charset.to_a[rand(charset.size)] }.join\n\t\tend", "def generate_confirm_code\n chars = (\"A\"..\"Z\").to_a + (\"0\"..\"9\").to_a\n code = \"\"\n 20.times { |i| code << chars[rand(chars.size-1)] }\n # add the ID to ensure always unique!\n code << self.recipient_id.to_s + self.project_id.to_s\n\n self.confirm_code = code\n end", "def generate_random_email\n # random_number = rand(1000000 .. 9000000)\n random_number = Time.now.getlocal.to_s.delete \"- :\"\n \"ruslan.yagudin+#{random_number}@flixster-inc.com\"\nend", "def get_random_message()\n\t\topening = [ \"Developer Portfolio\",\n \t\t\"My Rails 4 Portfolio\",\n \t\t\"Hello World\" ]\n\n\t\tmiddle = [ \"Built from the Rails Tutorial\",\n \t\t\"Rails Apps Are Great\",\n \t\t\"Rails Twitter clone App\" ]\n\n\t\tending = [ \"Contact us if you need help.\",\n \t\t\"We are here to server you. \",\n \t\t\"Call us if you need to 412-555-1212.\"]\n\n\t\t\"#{opening[rand(3)]}\"\n\n\tend", "def generate_invite_code(size = 10)\n charset = %w[2 3 4 6 7 9 A C D E F G H J K M N P Q R T V W X Y Z]\n (0...size).map { charset.to_a[rand(charset.size)] }.join\n end", "def message\n \"From: <#{from}>\\nTo: <#{to}>\\nMIME-Version: 1.0\\nContent-type: text/html; charset=UTF-8\\nSubject: #{subject}\\n\\n#{code_html}\"\n end", "def generate_confirm_code\n\t\tcode = ('A'..'Z').to_a.shuffle[0,4].join\n\tend", "def generateEmailAddress()\n uuid = SecureRandom.hex(3)\n return \"%s.%[email protected]\" % [uuid, @MAILBOX]\n end", "def gen_code\n ret = '1' * 14\n d = Time.now - 3600 * 24 * 3\n\n ret[6] = d.strftime('%m')[0]\n ret[7] = d.strftime('%m')[1]\n\n ret[0] = d.strftime('%d')[0]\n ret[1] = d.strftime('%d')[1]\n\n ret[9] = d.strftime('%y')[0]\n ret[10] = d.strftime('%y')[1]\n\n [2,3,4,5,8,11,12].each do |idx|\n ret[idx] = rand(10).to_s\n end\n\n tmp = ret.split(//)\n tmp.pop\n\n counter = tmp.length\n\n while counter > -1 do\n tmp[counter - 1] = tmp[counter - 1].to_i * 2\n tmp[counter - 2] = tmp[counter - 2].to_i\n counter -= 2\n end\n\n tmp = tmp.map{|x| x.to_s}.join\n\n sum = 0\n tmp.split(//).each do |idx|\n sum += tmp[idx].to_i\n end\n\n ret[13] = (sum * 9 % 10).to_s\n ret\nend", "def generate_registration_code(user, email)\n code = RegistrationCode.new(email: email)\n @user.registration_codes << code\n UserMailer.email_confirmation(code, url_for(:email_confirmation)).deliver\n code\n end", "def new_random_url_code\n codes = MassEmail.all.map{|m|m.url_code}.map{|u|u.to_i}\n new_code = Random.rand(9999999)\n while codes.include?(new_code)\n new_code = Random.rand(9999999)\n end\n new_code.to_s.rjust(7, '0')\nend", "def new_random_url_code\n codes = MassEmail.all.map{|m|m.url_code}.map{|u|u.to_i}\n new_code = Random.rand(9999999)\n while codes.include?(new_code)\n new_code = Random.rand(9999999)\n end\n new_code.to_s.rjust(7, '0')\nend", "def generate_code(number)\n Array.new(number) { CHARSET.sample }.join\n end", "def build_email_content\n txt = I18n.t(\"estimate_request.fltk.email_content\", :origin => self.origin_port.name, :destination => destination_port.name, :count => self.estimate_items.first.number_of_items, :description => self.estimate_items.first.description)\n txt\n end", "def random_code\n code = ''\n arr = []\n 4.times { arr.push(rand(1..7)) }\n arr.each do |x|\n code += replace_array_value_to_char(x)\n end\n code\n end", "def get_the_email_html(nline)\n\trecipient = @ws[nline, 1].to_s\n\t$mailsubject = \"A l'attention de la mairie de #{recipient}\"\n\t$html_content = \"<p> <b> A l'attention de la mairie de #{recipient} </b> </p>\n<p>Bonjour, </p>\n<p>Je m'appelle Thomas, je suis élève à une formation de code gratuite, ouverte à tous, sans restriction géographique, ni restriction de niveau. La formation s'appelle The Hacking Project (http://thehackingproject.org/). Nous apprenons l'informatique via la méthode du peer-learning : nous faisons des projets concrets qui nous sont assignés tous les jours, sur lesquel nous planchons en petites équipes autonomes. Le projet du jour est d'envoyer des emails à nos élus locaux pour qu'ils nous aident à faire de The Hacking Project un nouveau format d'éducation gratuite.\nNous vous contactons pour vous parler du projet, et vous dire que vous pouvez ouvrir une cellule à #{recipient}, où vous pouvez former gratuitement 6 personnes (ou plus), qu'elles soient débutantes, ou confirmées. Le modèle d'éducation de The Hacking Project n'a pas de limite en terme de nombre de moussaillons (c'est comme cela que l'on appelle les élèves), donc nous serions ravis de travailler avec #{recipient} ! </p>\n<p> Yann, Moussaillon de The Hacking Project</p>\" \n\nend", "def build_message_template(send_to, data)\n to = send_to\n subject = \"****NO REPLY:Carrots Sender:Test Results\"\n message_body = \"Results: #{data}\"\n\n message = Google::Apis::GmailV1::Message.new(:raw => \"To: #{to}\\r\\nSubject: #{subject}\\r\\n\\r\\n#{message_body}\")\n send_message(message, initialize_new_gmail_service)\n end", "def generate_code(code_length=6)\n chars = (\"a\"..\"z\").to_a + (\"1\"..\"9\").to_a \n new_code = Array.new(code_length, '').collect{chars[rand(chars.size)]}.join\n Digest::MD5.hexdigest(new_code)[0..(code_length-1)].upcase\n end", "def get_telephone\n area_code = @rand.rand(1000).to_s.rjust(3, '0')\n last_four_digits = @rand.rand(10000).to_s.rjust(4, '0')\n \"(\" + area_code + \") 555-\" + last_four_digits\n end", "def genarate_phone_confirmation_code(phone)\n self.phone = phone\n code = ''\n 6.times do\n code = \"#{code}#{rand(9)}\"\n end\n self.phone_confirmation_code = code\n self.save\n self.send_confirmation_sms\n end", "def generate_code\n charset = Array('A'..'Z') + Array('a'..'z')\n Array.new(8) { charset.sample }.join\nend", "def send_email_text(cities)\n#text a return\n return\"<h2>Bonjour</h2>\n <p>Je m'appelle Charles Dacquay, je suis co-fondateur de lorganisme The Hacking Project qui propose une formation de dévelopeur web gratuite, ouverte à tous, sans restriction géographique, ni restriction de niveau.<br> Voici le lien de la formation s'appelle The Hacking Project (http://thehackingproject.org/).<br> la foramtion des baser sur la méthode du peer-learning : les étudiants sont répartie en petit groupes ou nous leur proposons la réaliation de projets concrets qui leur sont assignés chaque jours, sur lesquel ils travaillent et cherches des solution . Le projet du jour est d'envoyer des emails à nos élus locaux pour qu'ils nous aident à faire de The Hacking Project un nouveau format d'éducation gratuite.</p>\n <p>Nous vous contactons pour vous parler du projet, et vous dire que vous pouvez ouvrir une cellule à #{cities}, où vous pouvez former gratuitement 6 personnes (ou plus), qu'elles soient débutantes, ou confirmées.<br> Le modèle d'éducation de The Hacking Project n'a pas de limite en terme de nombre de moussaillons (c'est comme cela que l'on appelle les élèves), donc nous serions ravis de travailler avec #{cities} !</p>\n <p>Charles, co-fondateur de The Hacking Project pourra répondre à toutes vos questions : 06.95.46.60.80</p>\"\nend", "def compose_email_details()\n\t email_details = \"From: \" + @email[:from].formatted.first + \"\\n\"\n email_details << \"To: \" + @email[:to].formatted.join(', ') + \"\\n\"\n if [email protected]?\n email_details << \"Cc: \" + @email[:cc].formatted.join(', ') + \"\\n\"\n end\n email_details << \"Date: \" + @email[:date].to_s + \"\\n\"\n email_details << \"Subject: \" + @email[:subject].to_s + \"\\n\"\n email_details = \"bq. \" + Mail::Encodings.unquote_and_convert_to(email_details, 'utf-8') + \"\\n\"\n email_details\n end", "def generate_activation_code(size = 4)\n charset = %w{ 1 2 3 4 5 6 7 8 9}\n self.code = (0...size).map{ charset.to_a[rand(charset.size)] }.join\n end", "def generate_mac \n (\"%02x\"%(rand(64)*4|2))+(0..4).inject(\"\"){|s,x|s+\":%02x\"%rand(256)}\n end", "def sms_tpl\n \"Hi it's #{pastor_name || '(name of pastor)'}. I'm personally inviting you to join #{user_group.name} online via LoveRealm. All church materials, schedules, counseling have been put there. You will also make friends and grow there. It's free. Join here: www.loverealm.com/dl\"\n end", "def send_edit_code email, edit_code\n \n @first_name = \"lol\"\n \n require 'mandrill' \n m = Mandrill::API.new\n message = {\n :subject=> \"Your Summit Edit Code\", \n :from_name=> \"Social Summit List\",\n :from_email=>\"[email protected]\",\n :to=>[ \n { \n :email => email\n } \n ], \n :html=>render_to_string('emails/new_edit_code', :layout => false) \n } \n sending = m.messages.send message \n puts sending\n end", "def create_inbox\n \"_INBOX.#{SecureRandom.hex(13)}\"\n end", "def email_body\n sponsor_name = @config.plan.sponsor_name\n message = 'Hi there!'\\\n '<br />'\\\n '<br />'\\\n \"#{sponsor_name} has run their payroll. Please download the relevant reports below.\"\\\n '<br />'\\\n '<br />'\n message\n end", "def send_code\n return :blank_phone if self.phone.blank?\n self.generate_code\n SMS3.sendto self.phone, \"您的KickTempo手机验证码为:#{self.varified_code}。\"\n end", "def generate_and_send_password\n self.reload\n update_attribute :password, ('a'..'z').to_a[rand(26)].to_s + (0...5).map{ [rand(10)] }.join\n post_data = Net::HTTP.post_form URI.parse('http://3001300.ru/create_client_from_bonus.php'),\n { \n 'email' => self.email,\n 'password' => self.password\n }\n mail = InformMail.create! client: self, body: \"#{MessageText.welcome.sms.encode} #{self.password}\".encode(\"cp1251\")\n end", "def encode_email_address( addr )\n\n\t\t\trval = ''\n\t\t\t(\"mailto:\" + addr).each_byte {|b|\n\t\t\t\tcase b\n\t\t\t\twhen ?:\n\t\t\t\t\trval += \":\"\n\t\t\t\twhen ?@\n\t\t\t\t\trval += Encoders[ rand(2) ][ b ]\n\t\t\t\telse\n\t\t\t\t\tr = rand(100)\n\t\t\t\t\trval += (\n\t\t\t\t\t\tr > 90 ? Encoders[2][ b ] :\n\t\t\t\t\t\tr < 45 ? Encoders[1][ b ] :\n\t\t\t\t\t\t\t\t Encoders[0][ b ]\n\t\t\t\t\t)\n\t\t\t\tend\n\t\t\t}\n\n\t\t\treturn %{<a href=\"%s\">%s</a>} % [ rval, rval.sub(/.+?:/, '') ]\n\t\tend", "def international_phone\n \"011-#{rand(100) + 1}-#{rand(100)+10}-#{rand(10000)+1000}\"\n end", "def generate_checkincode\n sai_words = %w(sisters chapter formal music business sigma alpha iota)\n sai_words[rand(sai_words.length)]+(rand(89)+10).to_s()\n end", "def generate_string\n (0...6).map{(65+rand(26)).chr}.join\n end", "def build_message\n @subject = self.content[:short_subject]\n curate_text\n\n\n mail = Mail.new({\n :from => @replyto,\n :to => @recipients,\n :subject => @subject,\n :body => @text\n })\n\n if @no_send\n self.print\n return\n end\n\n\n return mail\n end", "def make_password_reset_code\n self.password_reset_code = Digest::SHA1.hexdigest( Time.now.to_s.split(//).sort_by {rand}.join )\n end", "def generate_code\n self.code = Digest::SHA1.hexdigest(\"--#{Time.now.to_s}--#{user_id}--#{rand(256)}\")[0,32]\n end", "def generate_code\n loop do\n self.code = SecureRandom.urlsafe_base64(40)\n break if valid?\n end\n end", "def m4_message\n [\n \"Tak for din interesse for at være **{{jobnavn}}** på Spejdernes Lejr 2017.\",\n \"\\n\\nVi har sendt dine kontaktoplysninger videre til {{kontaktperson}}, som er kontaktperson for denne opgave. Hvis du ikke hører fra {{kontaktperson}}, eller hvis jobbet ikke passede til dig alligevel, så er du velkommen til at kontakte os på [email protected], så hjælper vi dig med at finde et andet fantastisk lejrjob!\",\n \"\\n\\n_De fedeste Spejdernes Lejr 2017 hilsener_ \\n\",\n \"{{bruger}}, Jobcenteret SL2017\"\n ].join()\n end", "def assign_generated_mail_id\n if is_hr_or_info\n formatted_domain = website.gsub(/https:\\/\\/|http:\\/\\//, \"\").gsub(/www\\./, \"\")\n fname = first_name.gsub(/\\s/, \"\")\n lname = last_name.gsub(/\\s/, \"\")\n self.generated_mail_id1 = fname + \".\" + lname + \"@\" + formatted_domain\n self.generated_mail_id2 = lname + \".\" + fname + \"@\" + formatted_domain\n end\n end", "def gen_phone_num\n \"#{rand(900) + 100}-#{rand(900) + 100}-#{rand(1000) + 1000}\"\nend", "def generate_message(prev_user, next_user)\n text = \"<@#{next_user}> you can now use the laundry machine <@#{prev_user}> is done using it.\"\n return text\n end", "def get_the_email_html(ville)\n\treturn \"Bonjour,\nJe m'appelle Adrien, je suis élève à une formation de code gratuite, ouverte à tous, sans restriction géographique, ni restriction de niveau. La formation s'appelle The Hacking Project (http://thehackingproject.org/). Nous apprenons l'informatique via la méthode du peer-learning : nous faisons des projets concrets qui nous sont assignés tous les jours, sur lesquel nous planchons en petites équipes autonomes. Le projet du jour est d'envoyer des emails à nos élus locaux pour qu'ils nous aident à faire de The Hacking Project un nouveau format d'éducation gratuite.\n\nNous vous contactons pour vous parler du projet, et vous dire que vous pouvez ouvrir une cellule à #{ville}, où vous pouvez former gratuitement 6 personnes (ou plus), qu'elles soient débutantes, ou confirmées. Le modèle d'éducation de The Hacking Project n'a pas de limite en terme de nombre de moussaillons (c'est comme cela que l'on appelle les élèves), donc nous serions ravis de travailler avec #{ville} !\n\nCharles, co-fondateur de The Hacking Project pourra répondre à toutes vos questions : 06.95.46.60.80\"\nend", "def generate_activation_code(size = 40)\n charset = ([*('A'..'Z')] + [*('0'..'9')]-%w(0 B 1 I L O S U))\n (0...size).map{ charset.to_a[rand(charset.size)] }.join\n end", "def to_email_text\n semantics = to_semantic_values\n body = []\n [ \"title\", \"part_of\", \"author\", \"contributor\",\n \"date\", \"isbn\", \"issn\", \"doi\" ].each do |field|\n if !semantics[field.to_sym].blank?\n value = semantics[field.to_sym]\n label = \"blacklight.email.text.#{field}\"\n body << I18n.t(label, value: value.join(\" \"))\n end\n end\n\n return body.join(\"\\n\") unless body.empty?\n end", "def send_invitation(email)\n chars = [\"A\"..\"Z\",\"a\"..\"z\",\"0\"..\"9\"].collect { |r| r.to_a }.join\n code = (1..8).collect { chars[rand(chars.size)] }.pack(\"C*\")\n invitation = Invitation.create!(:sender => self,\n :code => code,\n :receiver_email => email)\n if Notifier.deliver_invitation(invitation)\n self.update_attribute :sent_invitations, self.sent_invitations + 1\n end unless invitation.blank?\n end", "def generate_activation_code(size = 6)\n charset = %w{ 2 3 4 6 7 9 A C D E F G H J K M N P Q R T V W X Y Z}\n (0...size).map{ charset.to_a[rand(charset.size)] }.join\n end", "def generate_passport_code(length = 5, non_ambiguous = false)\n #characters = ('a'..'z').to_a + ('A'..'Z').to_a + ('0'..'9').to_a\n #characters = 'acdefghjkmnprtuvwxyz'.to_a + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'.to_a + ('0'..'9').to_a\n characters = ('A'..'A').to_a + ('C'..'H').to_a + ('J'..'K').to_a + ('M'..'Q').to_a + ('R'..'Z').to_a + ('0'..'9').to_a\n characters = characters + ('a'..'a').to_a + ('c'..'h').to_a + ('j'..'k').to_a + ('m'..'q').to_a + ('r'..'z').to_a\n %w{I O l 0 1}.each{ |ambiguous_character|\n characters.delete ambiguous_character\n } if non_ambiguous\n\n (0...length).map{\n characters[SecureRandom.random_number(characters.size)]\n }.join\n\n end", "def create_first_text_message\n%(Today, I want to feel:\nPassionate\nEnergized\nConnected\nHopeful\nAligned\n)\n end", "def createShippedTeamEmailBody(order_hash)\t\n\t\tbody = <<EOM\t\nOrderNum: #{order_hash[:order_number]}<br/>\nShipMethod: #{order_hash[:ship_option]}<br/>\nTrackingNum: #{order_hash[:tracking_number]}<br/>\nShipToFirstName: #{order_hash[:ship_to_first_name]}<br/>\nShipToLastName: #{order_hash[:ship_to_last_name]}<br/>\nShipToCompany: #{order_hash[:ship_to_company]}<br/>\nShipToAddr1: #{order_hash[:ship_to_addr1]}<br/>\nShipToAddr2: #{order_hash[:ship_to_addr2]}<br/>\nShipToCity: #{order_hash[:ship_to_city]}<br/>\nShipToState: #{order_hash[:ship_to_state]}<br/>\nShipToZip: #{order_hash[:ship_to_zip]}<br/>\nEOM\n\n\t\tbody\n\tend", "def generate_code(number)\n charset = Array('A'..'Z') + Array('a'..'z')\n Array.new(number) { charset.sample }.join\n end", "def generate_code(number)\n charset = Array('A'..'Z') + Array('a'..'z')\n Array.new(number) { charset.sample }.join\nend", "def get_the_email_html(name_city)\n \"<p>Bonjour,<br><br> Je m'appelle Nicolas, je suis élève à une formation de code gratuite, ouverte à tous, sans restriction géographique, ni restriction de niveau. La formation s'appelle <strong>The Hacking Project</strong> (http://thehackingproject.org/).<br> Nous apprenons l'informatique via la méthode du peer-learning : nous faisons des projets concrets qui nous sont assignés tous les jours, sur lesquel nous planchons en petites équipes autonomes. Le projet du jour est d'envoyer des emails à nos élus locaux pour qu'ils nous aident à faire de The Hacking Project un nouveau format d'éducation gratuite.<br> Nous vous contactons pour vous parler du projet, et vous dire que vous pouvez ouvrir une cellule à #{name_city}, où vous pouvez former gratuitement 6 personnes (ou plus), qu'elles soient débutantes ou confirmées. Le modèle d'éducation de The Hacking Project n'a pas de limite en terme de nombre de moussaillons (c'est comme cela que l'on appelle les élèves) donc nous serions ravis de travailler avec #{name_city} !<br><br> Charles, co-fondateur de The Hacking Project pourra répondre à toutes vos questions : 06.95.46.60.80<br><br> Bien à vous<br><br> Nicolas</p>\"\nend", "def generate\n resno = @resno_gen.generate\n id = @id_gen.generate\n address = @addr_gen.generate\n date = @date_gen.generate\n contact = @contact_gen.generate\n additional_properties = @additional_properties\n msg = {\n actionType: 'Create',\n jobIdentity: id,\n surveyType: @survey_type,\n preallocatedJob: false,\n mandatoryResourceAuthNo: resno,\n dueDate: date,\n address: address\n }\n msg[:contact] = contact if not contact.nil?\n msg[:additionalProperties] = additional_properties if not additional_properties.nil?\n return msg\n end", "def to_s\n self.sender ||= sender_address\n self.message_id ||= ID.generate_gid(domain)\n body.mime_version ||= \"1.0 (Ruby MIME v#{VERSION})\"\n\n #--\n # In an RFC 2822 message, the header and body sections must be separated\n # by two line breaks (i.e., 2*CRLF). One line break is deliberately\n # omitted here so the MIME body supplier can append headers to the\n # top-level message header section.\n #++\n \"#{headers}\\r\\n#{body}\"\n end", "def generate_phone_verification_code\n verification_code = SecureRandom.hex(3)\n verification_code\n end", "def build_hash_code\n\t\tSecureRandom.hex(8) + (Time.now.to_f * 1000).to_i.to_s\n\tend", "def m2_message\n [\n \"Super fedt at du gerne vil hjælpe os, med at planlægge Spejdernes Lejr 2017!\",\n \"\\n\\nUd fra dine fantastiske kompetencer, har vi videregivet dine kontaktoplysninger til {{udvalg}}. Hvis du ikke hører fra {{udvalg}}, eller hvis I ikke fandt noget du var interesseret i, så er du mere end velkommen til at kontakte os på [email protected]. Så hjælper vi dig videre til et andet spændende lejrjob!\",\n \"\\n\\n_De fedeste Spejdernes Lejr 2017 hilsener_ \\n\",\n \"{{bruger}}, Jobcenteret SL2017\"\n ].join()\n end", "def mail_content(case_id, lender_id)\n @loan_case = LoanCase.find(case_id)\n @lender = Lender.find(lender_id)\n @key = \"+\"\n while @key.index(\"+\")\n @key = AES.encrypt(@lender.id.to_s, ENV[\"KEY\"])\n end\n @token = \"+\"\n while @token.index(\"+\")\n @token = AES.encrypt(@loan_case.id.to_s, ENV[\"KEY\"])\n end\n mail to: \"\\\"#{@lender.name}\\\" <#{@lender.email}>\", subject: \"[EZBANK-新房貸][#{@loan_case.address}]\"\n end", "def sending_email\n puts\n email = \"Sending notification email\"\n puts bold(email)\n puts email.length.times.map {\"=\"}.join\n puts\n end", "def zipcode\n \"%05d\" % rand(99999) \n end", "def generate_code(number)\n charset = Array('A'..'Z') + Array(0..9)\n Array.new(number) { charset.sample }.join\n end", "def make_msg(arr)\n\tnew_msg = \"\"\n\tarr.each do |i| # remember, i is the VALUE, not the index! use each_index if you want i to equal the index on each iteration\n\t\tif i == 32\n\t\t\tnew_msg << \" \" # if it's a space, leave it alone\n\t\telse\n\t\t\tnew_msg << (i + 65).chr # convert ints to chars and append\n\t\tend\n\tend\n\t\n\treturn new_msg\nend", "def email(body)\n message = <<EOM\nFrom: #{FROM}\nTo: #{TO}\nDate: #{NOW.rfc822}\nMessage-Id: #{SecureRandom.uuid}@redhat.com\nSubject: Unassigned upcoming maintenances\n\nRegion Leads - please arrange coverage for these maintenances immediately:\n\n#{body}\n\n---\nThis message has been sent by the Unassigned Maintenance Broadcast System.\nThis utility runs in the #{NAMESPACE} namespace on #{CLUSTER}.\nThe source code for this utility can be found at #{REPO}.\nEOM\n\n Net::SMTP.start('smtp.corp.redhat.com', 25, FROM) do |smtp|\n smtp.send_message message, FROM, TO\n end\nend", "def generateMsg(msgTxt)\r\n msgOut = \"\"\r\n msgData = @context.messages[msgTxt]\r\n\r\n if (nil != msgData)\r\n msgOut = @template.msg(msgData)\r\n end\r\n\r\n msgOut\r\n end", "def preview_email\r\n invitation = Invitation.new(:user => current_user, :code => Code.new)\r\n mail = Mailers::Debate.create_invitation(current_user, @resource, invitation)\r\n @mail_body = mail.body.sub('No message provided', 'YOUR PERSONALIZED NOTE GOES HERE')\r\n\r\n render :inline => %Q{<%= simple_format(@mail_body, {:style => 'margin: 8px 0px'}) %>}\r\n end", "def send_one_email_to(name, mail)\n email = @gmail.compose do\n to mail\n subject \"Apprentissage entre pairs + gratuité + code = The Hacking Project\"\n html_part do\n content_type 'text/html; charset=UTF-8'\n body get_the_email_html(name) #TODO faire pour toutes les villes du tableau -> suppose de lire les colonnes du tableau dans une boucle (ajouter un délai)\n end\n end\nemail.deliver!\nend", "def create_email(sports_news, schedules)\n email = \"\"\n if @info.include? 'News'\n @sports.each do |sport|\n news = get_news sport, sports_news\n email += news.display [], email\n end\n end\n if @info.include? 'Schedule'\n @sports.each do |sport|\n email += \"#{sport} Schedule\\n\"\n schedule = get_schedule sport, schedules\n email += schedule.display email\n end\n end\n email\n end", "def build_message(mail_text)\n Mail.new(mail_text)\n end", "def get_message(birth_path_num)\n case birth_path_num\n when 1\n message = \"Your numerology number is #{birth_path_num}.\\nOne is the leader. The number one indicates the ability to stand alone, and is a strong vibration. Ruled by the Sun.\"\n when 2\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the mediator and peace-lover. The number two indicates the desire for harmony. It is a gentle, considerate, and sensitive vibration. Ruled by the Moon.\"\n when 3\n message = \"Your numerology number is #{birth_path_num}.\\nNumber Three is a sociable, friendly, and outgoing vibration. Kind, positive, and optimistic, Three's enjoy life and have a good sense of humor. Ruled by Jupiter.\"\n when 4\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the worker. Practical, with a love of detail, Fours are trustworthy, hard-working, and helpful. Ruled by Uranus.\"\n when 5\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the freedom lover. The number five is an intellectual vibration. These are 'idea' people with a love of variety and the ability to adapt to most situations. Ruled by Mercury.\"\n when 6\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the peace lover. The number six is a loving, stable, and harmonious vibration. Ruled by Venus.\"\n when 7\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the deep thinker. The number seven is a spiritual vibration. These people are not very attached to material things, are introspective, and generally quiet. Ruled by Neptune.\"\n when 8\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the manager. Number Eight is a strong, successful, and material vibration. Ruled by Saturn.\"\n when 9\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the teacher. Number Nine is a tolerant, somewhat impractical, and sympathetic vibration. Ruled by Mars.\"\n else\n message = \"Uh oh! Your birth path number is not 1-9!\"\n end #birth_path_num case\nend", "def createPrinterReceivedEmailBody(order_hash)\n\t\tbody = <<EOM\t\nOrderNum: #{order_hash[:order_number]}<br/>\nShipMethod: #{order_hash[:ship_option]}<br/>\nShipToFirstName: #{order_hash[:ship_to_first_name]}<br/>\nShipToLastName: #{order_hash[:ship_to_last_name]}<br/>\nShipToCompany: #{order_hash[:ship_to_company]}<br/>\nShipToAddr1: #{order_hash[:ship_to_addr1]}<br/>\nShipToAddr2: #{order_hash[:ship_to_addr2]}<br/>\nShipToCity: #{order_hash[:ship_to_city]}<br/>\nShipToState: #{order_hash[:ship_to_state]}<br/>\nShipToZip: #{order_hash[:ship_to_zip]}<br/>\nShipToPhone: #{order_hash[:ship_to_phone]}<br/>\n<br/>\nOrder Items:<br/>\nEOM\n\n\t\tbody\n\tend", "def mk_body\n skills = @skills.join(', ')\n skills += \", m.v.\" if @skills.length > 1\n\n body = \"Jeg søger stillingen som #{@position} i jeres virksomhed.\\n\"\n body += \"Jeg føler min erfaring inden for brugen af #{skills} kunne gavne jer.\\n\" unless @skills.nil?\n body += \"#{@extra}\\n\" unless @extra.nil?\n return body\n end", "def generate_message(item)\n short_message = ''\n long_message = ''\n code = ''\n\n # status patron message otherwise regular message\n if item[:statusPatronMessage].present?\n code = 'sp'\n long_message = item[:statusPatronMessage]\n short_message = long_message.gsub(/(Try|Place).+/, '').strip\n short_message = short_message.gsub(/\\W$/, '')\n # if record[:patronGroupCode].strip.match(/^(IND|MIS|ACO)/)\n # code = 'sp'\n # long_message = record[:lastName].strip + ' ' + record[:firstName].strip\n # # done in two steps in case ending puctuation is missing\n # short_message = long_message.gsub(/(Try|Place).+/, '').strip\n # short_message = short_message.gsub(/\\W$/, '')\n else\n code = item[:statusCode].to_s\n # append suffix to indicate whether there are requests - n = no requests, r = requests\n item[:requestCount] == 0 ? code += 'n' : code += 'r'\n\n # get parms for the message being processed\n parms = ITEM_STATUS_CODES[code]\n\n raise \"Status code not found in config/item_status_codes.yml\" unless parms\n\n short_message = make_substitutions(parms['short_message'], item)\n long_message = make_substitutions(parms['long_message'], item)\n\n end\n\n # add labels\n short_message = add_label(short_message, item)\n long_message = add_label(long_message, item)\n\n if Rails.env != 'clio_prod'\n short_message = short_message + \" (#{code}/short)\"\n long_message = long_message + \" (#{code}/long)\"\n end\n\n return { :status_code => code,\n :short_message => short_message,\n :long_message => long_message }\n end", "def build_text_mail(files, titles)\n data = \"\"\n files.each_with_index do |file, i|\n data += \"\\n\\n#{titles[i]}\\n\\n\"\n data += File.read(File.join(File.dirname(__FILE__), file))\n end\n # Add google file link with test coverage at the end of email\n data +=\"\\n\\n<a href='https://google.com'>Test Coverage</a>\\n\\n\"\n # attached screenshots\n pictures = Dir.glob(Rails.root.join('public', 'images', 'screenshots', '*.png'))\n #pictures = Dir.glob(Rails.root.join(\"/var/www/office/public/images/capybara/*.png\"))\n count = 0\n pictures.each do |p|\n count += 1\n data += \"<img src='#{ 'https://google.com' + p.split('/').last }'>\"\n break if count == 10\n end\n data\nend", "def generate_random_email_address\n random_email_account = SecureRandom.hex(@number_of_random_characters)\n random_domain_name = SecureRandom.hex(@number_of_random_characters)\n return random_email_account + \"@\" + random_domain_name + @email_suffix\n end", "def scramble email\n # TODO implement for real\n email.to_s.dup.freeze\n end", "def generateCode(prefix,numberOfString,modelClass)\n\t\t\n\t\tcodeNumber=modelClass.maximum(\"id\").to_i+1\t\t\n\t\t# l_code=prefix.length\n\t\tl_num=codeNumber.to_s.length\n\t\tnum =numberOfString-l_num.to_i\n\n\t\ti=0\n\t\tzero=''\n\t\twhile i < num do\n\t\t zero+='0'\n\t\t i+=1\n\t\tend\n\n\t\treturn prefix+zero+codeNumber.to_s\n\tend", "def email_time; strftime(EMAIL_TIME_FORMAT); end", "def generate_accessCode\n return ((0..9).to_a + (\"a\"..\"z\").to_a).shuffle[0..5].join(\"\")\n end", "def make_activation_code\n self.activation_code = Digest::SHA1.hexdigest( Time.now.to_s.split(//).sort_by {rand}.join )\n end", "def message_template; end", "def generate_message(user)\n message = {\n subject: 'Test',\n from_name: 'The Digest!',\n text: 'This is a testing email',\n to: [\n {\n email: user.email,\n name: user.firstname\n }\n ],\n\n html: '',\n from_email: '[email protected]'\n }\n end", "def phone_number_with_ext\n \"(#{Faker::PhoneNumber.area_code}) #{Faker::PhoneNumber.exchange_code}-#{Faker::PhoneNumber.subscriber_number} Ext:1234\"\n end", "def make_activation_code\n self.activation_code = Digest::SHA1.hexdigest( Time.now.to_s.split(//).sort_by {rand}.join )\n end", "def gen_int_phone_num\n \"011-#{rand(100) + 1}-#{rand(100) + 10}-#{rand(1000) + 1000}\"\nend", "def make_up_password\n\to = [('a'..'z'), ('A'..'Z'), ('0'..'9')].map { |i| i.to_a }.flatten\n\tpass = (0...12).map { o[rand(o.length)] }.join + \"@\"\n\tputs \"Using #{pass} for password\\n\"\n\treturn pass\nend", "def autogenerate_code\n self.code = String.random_alphanumeric if self.code.length == 0\n end", "def blurb_win\n case self.type\n when 'test'\n \"Thanks for using this service.\"\n when 'left'\n\"Please contact #{self.email_from} if you have any queries regarding the analysis or the output and one of the team will respond.\n\nBest regards,\nThe LEFT development team.\"\n\n end\n end", "def email_creation(hash_to_manipulate,msg)\n retirement_site=[]\n others_site=[]\n sorted_hash = hash_to_manipulate.sort_by{ |instance, details| details[:not_before] }\n sorted_hash.each do |instance, details|\n if details[:code] == \"instance-retirement\"\n retirement_site << \"<tr><td>#{details[:name]}</td><td>#{instance}</td><td>#{details[:code]}</td><td>#{details[:not_before]}</td><td>#{details[:region]}</td><td>#{details[:site]}</td></tr>\"\n else\n unless details[:description] =~ /\\[Completed\\]|\\[Canceled\\]/\n others_site << \"<tr><td>#{details[:name]}</td><td>#{instance}</td><td>#{details[:code]}</td><td>#{details[:not_before]}</td><td>#{details[:region]}</td><td>#{details[:site]}</td></tr>\"\n end\n end\n end\n # This is so people know the system is working, but that nothing happened today\n if retirement_site.empty? and others_site.empty?\n msg << \"<html>\"\n msg << \"<body>\"\n msg << \"<h3>No events today! Have a nice day! :)</h3>\"\n msg << \"</body>\"\n msg << \"</html>\"\n end\n unless retirement_site.empty? and others_site.empty? \n msg << \"<html>\"\n msg << \"<body>\"\n unless retirement_site.empty?\n msg << \"<h3>Instances that are being terminated!</h3>\"\n msg << \"<table border=\\\"1\\\" cellpadding=\\\"5\\\">\"\n msg << \"<tr><th>Instance Name</th><th>Instance ID</th><th>Event</th><th>Earliest Date of Event</th><th>Region</th><th>Site</th></tr>\"\n msg << \"#{retirement_site.join(\"\\n\")}\"\n msg << \"</table>\"\n msg << \"<p><p>\"\n end\n unless others_site.empty?\n msg << \"<h3>Instances with non-terminal events.</h3>\"\n msg << \"<table border=\\\"1\\\" cellpadding=\\\"5\\\">\"\n msg << \"<tr><th>Instance Name</th><th>Instance ID</th><th>Event</th><th>Earliest Date of Event</th><th>Region</th><th>Site</th></tr>\"\n msg << \"#{others_site.join(\"\\n\")}\"\n msg << \"</table>\"\n end\n msg << \"</body>\"\n msg << \"</html>\"\n end\nend", "def carving_and_marking_part_4_email_body\n %(<div>VESSEL NAME: #{@submission.vessel}</div>\n <div>Please find enclosed the Carving and Marking Note for the above vessel.\n <br><br>A Commercial Bareboat Charter Carving and Marking Note must be\n signed by an Inspector of Marks/Authorised Measurer\n\n <br><br>Regulation 35 of the Merchant Shipping (Registration of Ships)\n Regulations 1993 states that a carving and marking note should be\n returned completed to the Registrar within three months.\n\n <br><br>[FREE TEXT]\n <br><br>We also require the following documents:\n <br><br>[FREE TEXT]\n <br><br>The documents can be emailed to:\n Commercial vessels: [email protected]\n <br><br>\n Alternatively, please post to:\n MCA\n Anchor Court\n Keen Road\n Cardiff\n CF24 5JW\n\n <br><br>\n Please do not hesitate to contact us you require any further assistance.\n </div>)\n end", "def generate_mac\n crc32 = Zlib.crc32(self.id.to_s)\n offset = crc32.modulo(255)\n\n digits = [ %w(0),\n %w(0),\n %w(0),\n %w(0),\n %w(5),\n %w(e),\n %w(0 1 2 3 4 5 6 7 8 9 a b c d e f),\n %w(0 1 2 3 4 5 6 7 8 9 a b c d e f),\n %w(5 6 7 8 9 a b c d e f),\n %w(3 4 5 6 7 8 9 a b c d e f),\n %w(0 1 2 3 4 5 6 7 8 9 a b c d e f),\n %w(0 1 2 3 4 5 6 7 8 9 a b c d e f) ]\n mac = \"\"\n for x in 1..12 do\n mac += digits[x-1][offset.modulo(digits[x-1].count)]\n mac += \":\" if (x.modulo(2) == 0) && (x != 12)\n end\n mac\n end", "def record()\n # Staying in Germany for now:\n country_code = '+49'\n # Between 2 and 5 digits area codes (max 2 + 3):\n area_digits_quantity = 2 + rand(4)\n # Area code has no trailing zeros. E.g. with 4 digits minimum is 1000.\n min_area_code = 10 ** (area_digits_quantity - 1)\n # Biggest 5 digit area code is 99999, i.e. 10**5 - 1\n max_area_code = 10 ** area_digits_quantity - 1\n # Get a random number in the range min_area_code to max_area_code:\n area_code = min_area_code + rand( max_area_code - min_area_code + 1 )\n # Today area code and subscriber line are 10 digits in total.\n subscriber_digits_quantity = 10 - area_digits_quantity\n min_subscriber_number = 10 ** (subscriber_digits_quantity - 1)\n max_subscriber_number = 10 ** subscriber_digits_quantity - 1\n subscriber_number = min_subscriber_number + rand( max_subscriber_number - min_subscriber_number + 1 )\n PhoneNumber.new( country_code, area_code, subscriber_number )\n end", "def generate_id\n Digest::MD5.hexdigest(text)\n end", "def to_s\n if @code.count < 13\n @code.join + @check_digit.to_s\n else\n @code.join\n end\n end", "def generate_PIC(manifest, stc = nil)\n\t\tcase manifest.mail_class\n\t\twhen 'LC'\n\t\t\t@tracking_number = \"LX600#{rand(999999).to_s.rjust(6, '0')}US\"\n\t\t\t@barcode_construct_code = 'I01'\n\t\twhen 'PG'\n\t\t\t@tracking_number = \"83500#{rand(99999).to_s.rjust(5, '0')}\"\n\t\t\t@barcode_construct_code = 'G01'\n\t\twhen 'IE'\n\t\t\t@tracking_number = \"EI100#{rand(999999).to_s.rjust(6, '0')}US\"\n\t\t\t@barcode_construct_code = 'I01'\n\t\twhen 'CP'\n\t\t\t@tracking_number = \"CB600#{rand(999999).to_s.rjust(6, '0')}US\"\n\t\t\t@barcode_construct_code = 'I01'\n\t\tend\n\tend" ]
[ "0.72666997", "0.7083372", "0.7003533", "0.6906018", "0.6809272", "0.6696758", "0.6658766", "0.65831333", "0.65177906", "0.64972395", "0.6428326", "0.6405881", "0.63336295", "0.63279736", "0.624863", "0.62463194", "0.6245141", "0.6192106", "0.61861986", "0.6174866", "0.61659205", "0.6128014", "0.6115978", "0.609352", "0.60618776", "0.6057605", "0.60475683", "0.6047014", "0.6044856", "0.60335404", "0.60204685", "0.60095215", "0.6005882", "0.59980965", "0.5979939", "0.59795356", "0.5975283", "0.5975026", "0.5966204", "0.5963456", "0.59504944", "0.59167403", "0.5916495", "0.5908628", "0.5905504", "0.589741", "0.58823824", "0.5861672", "0.58585626", "0.5857151", "0.58404183", "0.5839694", "0.5819355", "0.5807024", "0.58047426", "0.57915956", "0.57871497", "0.5782862", "0.57738346", "0.5773163", "0.5768609", "0.5767473", "0.5756861", "0.5754444", "0.57544345", "0.5748954", "0.5735619", "0.57320416", "0.5728532", "0.570704", "0.57039243", "0.5692986", "0.5687998", "0.56815416", "0.56698525", "0.56652254", "0.56593317", "0.5659165", "0.565423", "0.5650912", "0.5647257", "0.5644245", "0.5641033", "0.56368816", "0.5629337", "0.56279266", "0.56227237", "0.5607567", "0.56045645", "0.5600955", "0.5588935", "0.5587714", "0.55839485", "0.5580413", "0.55748624", "0.5564355", "0.5563898", "0.55635303", "0.5557368", "0.5555018", "0.55479777" ]
0.0
-1
TODO: Add some validation
def upvote self.votes += 1 save end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def private; end", "def schubert; end", "def probers; end", "def formation; end", "def specie; end", "def specie; end", "def specie; end", "def specie; end", "def suivre; end", "def verdi; end", "def refutal()\n end", "def who_we_are\r\n end", "def identify; end", "def custom; end", "def custom; end", "def relatorios\n end", "def operations; end", "def operations; end", "def implementation; end", "def implementation; end", "def weber; end", "def parslet; end", "def parslet; end", "def parslet; end", "def parslet; end", "def apply\n\t\t\n\tend", "def apply\n\t\t\n\tend", "def zuruecksetzen()\n end", "def intensifier; end", "def terpene; end", "def villian; end", "def parameters; end", "def parameters; end", "def parameters; end", "def parameters; end", "def parameters; end", "def parameters; end", "def parameters; end", "def parameters; end", "def strategy; end", "def initialize\n\t\t\n\tend", "def informational?; end", "def data; end", "def data; end", "def data; end", "def data; end", "def data; end", "def data; end", "def data; end", "def data; end", "def data; end", "def data; end", "def data; end", "def data; end", "def berlioz; end", "def valid; end", "def requirements; end", "def requirements; end", "def requirements; end", "def requirements; end", "def validated; end", "def initialize\r\n\r\n end", "def stderrs; end", "def from; end", "def from; end", "def from; end", "def from; end", "def offences_by; end", "def metadata; end", "def metadata; end", "def metadata; end", "def metadata; end", "def metadata; end", "def metadata; end", "def metadata; end", "def extra; end", "def initialize\n\n end", "def initialize\n\n end", "def schumann; end", "def parts; end", "def parts; end", "def parts; end", "def required_data() [] end", "def issn; end", "def sitemaps; end", "def operation; end", "def parse()\n #This is a stub, used for indexing\n end", "def celebration; end", "def initialize\n \n end", "def eplore\n end", "def initialize\n \n end", "def result; end", "def result; end", "def result; end", "def result; end", "def result; end", "def result; end", "def result; end", "def result; end", "def details; end", "def transformations; end" ]
[ "0.75198245", "0.66113764", "0.6524906", "0.6506399", "0.6479728", "0.6479728", "0.6479728", "0.6479728", "0.62942326", "0.6069807", "0.60287654", "0.5979395", "0.59318125", "0.5893944", "0.5893944", "0.5848804", "0.58361995", "0.58361995", "0.5789293", "0.5789293", "0.5762124", "0.5740504", "0.5740504", "0.5740504", "0.5740504", "0.5737729", "0.5737729", "0.5716815", "0.57113403", "0.56897134", "0.56823754", "0.5670572", "0.5670572", "0.5670572", "0.5670572", "0.5670572", "0.5670572", "0.5670572", "0.5670572", "0.5668271", "0.5666335", "0.5662414", "0.565021", "0.565021", "0.565021", "0.565021", "0.565021", "0.565021", "0.565021", "0.565021", "0.565021", "0.565021", "0.565021", "0.565021", "0.56464845", "0.5638794", "0.56383973", "0.56383973", "0.56383973", "0.56383973", "0.5636927", "0.5634556", "0.5630451", "0.5625989", "0.5625989", "0.5625989", "0.5625989", "0.5606695", "0.56030226", "0.56030226", "0.56030226", "0.56030226", "0.56030226", "0.56030226", "0.56030226", "0.56009233", "0.55830777", "0.55830777", "0.55696535", "0.556943", "0.556943", "0.556943", "0.55659103", "0.5561593", "0.5543358", "0.5541873", "0.5541574", "0.5539712", "0.5534221", "0.5507869", "0.5505905", "0.55049145", "0.55049145", "0.55049145", "0.55049145", "0.55049145", "0.55049145", "0.55049145", "0.55049145", "0.5504081", "0.54989576" ]
0.0
-1
Extracts properties from the constitutent external metadata file
def extract_metadata raise ArgumentError, "MIME type unspecified or not configured" if schema.blank? fn = "extract_#{schema.downcase}_metadata" raise ArgumentError, "Unsupported metadata standard: #{schema}" unless respond_to?(fn.to_sym) send(fn, metadata_xml) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def extract_metadata; end", "def fetch_metadata\n {\n \"public_fqdn\" => fetch_metadata_item(\"getFullyQualifiedDomainName.txt\"),\n \"local_ipv4\" => fetch_metadata_item(\"getPrimaryBackendIpAddress.txt\"),\n \"public_ipv4\" => fetch_metadata_item(\"getPrimaryIpAddress.txt\"),\n \"region\" => fetch_metadata_item(\"getDatacenter.txt\"),\n \"instance_id\" => fetch_metadata_item(\"getId.txt\"),\n }\n end", "def read_metadata; end", "def readProperties\n @propertiesFile = \"#{File.expand_path(File.dirname($0))}/../../conf/ddbt.properties\"\n @properties = {}\n IO.foreach(@propertiesFile) do |line|\n @properties[$1.strip] = $2 if line =~ /([^=]*)=(.*)\\/\\/(.*)/ || line =~ /([^=]*)=(.*)/\n end\nend", "def extract_meta\n end", "def read_properties\n buf = ''\n File.open( properties_file, 'r' ) { |f| buf = f.read }\n h = JSON.parse(buf, {:symbolize_names => true})\n @name = h.delete(:name).to_s\n @created= h.delete(:created).to_s\n @description = h.delete(:description).to_s\n @repo_properties = h\n end", "def extract_metadata\n YAML_METADATA.match(content) do |match|\n @metadata = YAML.load(match[0])\n @content = content.gsub(YAML_METADATA, \"\")\n end\n end", "def get_properties\r\n #Assumes PropertyName=PropertyString with or without whitespace around =\r\n properties = Hash.new\r\n File.open(@filename, \"r:\" + PropertyFileAttributes::CATEGORY_ENCODINGS[@category]).each_line do |s| \r\n m = PropertyFileAttributes::PROPERTY_FILE_REGEX.match(s)\r\n if m != nil\r\n property = m[1]\r\n property = PropertyFileAttributes.convert_to_utf8(property)\r\n #This is a hack to get rid of the unicode non-break space that sometimes find their way into international files\r\n property = PropertyFileAttributes.remove_break_space(property).strip() \r\n value = m[2]\r\n value = PropertyFileAttributes.convert_to_utf8(value)\r\n value = PropertyFileAttributes.remove_break_space(value).strip()\r\n properties[property] = value\r\n end\r\n end \r\n @number_properties = properties.size\r\n properties\r\n end", "def load_properties(properties_filename)\n properties = {}\n File.open(properties_filename, 'r') do |properties_file|\n properties_file.read.each_line do |line|\n line.strip!\n if (line[0] != ?# and line[0] != ?=)\n Chef::Log.info \"line : #{line}\"\n i = line.index('=')\n if (i)\n properties[line[0..i - 1].strip] = line[i + 1..-1].strip\n end\n end\n end\n end\n return properties\n end", "def read_prop_file(prop_file)\n props = {}\n File.open(prop_file, 'r') do |f|\n f.each_line do |line|\n props[$1] = $2 if line =~ /^export (.*)=(.*)$/\n end\n end if File.exists?(prop_file)\n props\nend", "def initialize file\n @file = file\n @properties = {}\n# IO.foreach(file) do |line|\n# @properties[$1.strip] = $2 if line =~ /([^=]*)=(.*)\\/\\/(.*)/ || line =~ /([^=]*)=(.*)/\n# @properties[$1.strip] = $2 if line =~ /([^=]*)=(.*)\\/\\/(.*)/ || line =~ /([^=]*)=(.*)/\n# end\nFile.open(file, 'r') do |properties_file|\n properties_file.read.each_line do |line|\n line.strip!\n if (line[0] != ?# and line[0] != ?=)\n i = line.index('=')\n if (i)\n @properties[line[0..i - 1].strip] = line[i + 1..-1].strip\n else\n @properties[line] = ''\n end\n end\n end\n end\nend", "def extract_metadata(file)\n document = parse_kramdown(file)\n toc = ::Kramdown::Converter::Toc.convert(document.root)\n toc_items = toc[0].children.select { |el| el.value.options[:level] == 2 }.map do |t| \n {:id => t.attr[:id], :text => t.value.children.first.value}\n end\n\n metadata = document.root.options[:metadata]\n metadata[:toc] = toc_items\n metadata[:converted] = document.to_html\n metadata[:technologies] = metadata[:technologies].split(\",\").collect {|tech| tech.strip}\n metadata[:images] = find_images(document.root)\n metadata[:author] = metadata[:author].split(',').first if metadata[:author]\n metadata[:commits] = commit_info @repo, Pathname.new(file)\n metadata[:current_tag] = current_tag @repo, Pathname.new(file)\n metadata[:current_branch] = current_branch @repo, Pathname.new(file)\n metadata[:github_repo_url] = repository_url @repo\n metadata[:contributors] = metadata[:commits].collect { |c| c[:author] }.uniq\n metadata[:contributors_email] = metadata[:commits].collect { |c| c[:author_email] }.uniq\n metadata[:contributors].delete(metadata[:author])\n metadata[:product] = @product if @product\n metadata[:experimental] = @experimental\n metadata\n end", "def metadata(filepath)\n metadata = {}\n metadata.merge!(author(filepath))\n metadata.merge!(title(filepath))\n metadata.merge!(serie(filepath))\n metadata\n end", "def extract_properties!(data)\n h = {}\n if data['property']\n # Data came from XML\n h = data['property'].inject({}) do |r,v|\n r[v['key']] = v['content']\n r\n end\n elsif data['properties']\n h = data['properties']\n end\n property ||= {}\n property.merge!(h)\n end", "def metadata\n return @metadata if defined? @metadata\n\n @metadata = Henkei.read :metadata, data\n end", "def metadata\n return @metadata if defined? @metadata\n\n @metadata = Henkei.read :metadata, data\n end", "def metadata_file; end", "def metadata_file; end", "def diphot_metadata_to_h\n File.open(@qualified_filename, 'r') do |fd|\n diff = fd.readline.chomp\n reference = fd.readline.chomp\n @obj_metadata = { 'diff' => diff, 'reference' => reference }\n end\n end", "def load_properties(properties_filename)\n properties = {}\n File.open(properties_filename, 'r') do |properties_file|\n properties_file.read.each_line do |line|\n line.strip!\n if (line[0] != ?# and line[0] != ?=)\n i = line.index('=')\n if (i)\n properties[line[0..i - 1].strip] = line[i + 1..-1].strip\n else\n properties[line] = ''\n end\n end\n end\n end\n return properties\n end", "def parse(filenames)\r\n properties = {}\r\n filenames.each do |filename|\r\n if (File.exists?(filename))\r\n file = File.open(filename, \"r\")\r\n file.read.each_line do |line|\r\n line.strip!\r\n if (line[0] != ?# and line[0] != ?=)\r\n i = line.index(\"=\")\r\n if (i)\r\n key = line[0..i - 1].strip\r\n value = line[i + 1..-1].strip\r\n properties[key] = value.strip\r\n else\r\n key = line\r\n value = \"\"\r\n properties[key] = value\r\n end\r\n end\r\n end\r\n file.close\r\n file = nil\r\n end\r\n end\r\n properties\r\nend", "def dockerfile_metadata(file = nil)\n meta_data = @properties.merge(:repo_name => nil,\n :image_name => nil,\n :file_name => nil,\n :VERSION => nil,\n :maintainer_name => nil,\n :maintainer_email => nil)\n return meta_data unless File.exist?(file)\n meta_data[:file_name] = File.expand_path(file)\n File.open(file, 'r').each do |line|\n [{ :property => :VERSION, :token => '.*#\\sDOCKER-VERSION' },\n { :property => :maintainer_name, :token => '^MAINTAINER',\n :exp => '(.*),\\s(.*)', :field => 1 },\n { :property => :maintainer_email, :token => '^MAINTAINER',\n :exp => '(.*),\\s(.*)', :field => 2 },\n { :property => :image_name, :token => '.*#\\sDOCKER-NAME',\n :exp => '(.*)/(.*)', :field => 2 },\n { :property => :repo_name, :token => '.*#\\sDOCKER-NAME' }].each do |p|\n match_val = match_token(p[:token], line, p)\n meta_data[p[:property]] = match_val unless match_val.nil?\n end\n end\n meta_data\n end", "def read_metadata\n @metadata =\n if !disabled? && File.file?(metadata_file)\n content = File.binread(metadata_file)\n\n begin\n Marshal.load(content)\n rescue TypeError\n SafeYAML.load(content)\n rescue ArgumentError => e\n Jekyll.logger.warn(\"Failed to load #{metadata_file}: #{e}\")\n {}\n end\n else\n {}\n end\n end", "def extract_metadata!(data = {})\n data.tap do |d|\n file = File.open(@file)\n d[:filesize] = file.size\n file.close\n\n d[:content_type] = MIME::Types.type_for(@file).first.to_s\n end\n end", "def initialize(file)\n @file = file\n @properties = {}\n File.open(file).each_line do |line|\n @properties[Regexp.last_match(1).strip] = Regexp.last_match(2) if line =~ %r{([^=]*)=(.*)//(.*)} || line =~ /([^=]*)=(.*)/\n end\n end", "def read_meta(node)\n t = decompress_revision(node)\n return {} unless has_metadata?(t)\n \n mt = t[metadata_start..(metadata_end(t) - 1)]\n mt.split(\"\\n\").inject({}) do |hash, line|\n k, v = line.split(\": \", 2)\n hash[k] = v\n hash\n end\n end", "def properties(path, ctype=DEFAULT_CTYPE)\n node = metadata(path, ctype, :properties)[:properties]\n node ? node.contents : @metadata_tree.default_data(:properties)\n end", "def metadata_properties_for(package)\n metadata = {\n \"omnibus.project\" => package.metadata[:name],\n \"omnibus.platform\" => package.metadata[:platform],\n \"omnibus.platform_version\" => package.metadata[:platform_version],\n \"omnibus.architecture\" => package.metadata[:arch],\n \"omnibus.version\" => package.metadata[:version],\n \"omnibus.iteration\" => package.metadata[:iteration],\n \"omnibus.license\" => package.metadata[:license],\n \"omnibus.md5\" => package.metadata[:md5],\n \"omnibus.sha1\" => package.metadata[:sha1],\n \"omnibus.sha256\" => package.metadata[:sha256],\n \"omnibus.sha512\" => package.metadata[:sha512],\n \"md5\" => package.metadata[:md5],\n \"sha1\" => package.metadata[:sha1],\n \"sha256\" => package.metadata[:sha256],\n \"sha512\" => package.metadata[:sha512],\n }.tap do |h|\n if build_record?\n h[\"build.name\"] = package.metadata[:name]\n h[\"build.number\"] = package.metadata[:version]\n end\n end\n\n metadata\n end", "def extractMetadata()\n Logging.LogScriptInfo \"Extract metadata from #{@logFile}...\"\n\n # Get the meta datas from the json report\n metas = { }\n metas['build_date'] = @jsonData['build_date']\n metas['build_time'] = @jsonData['build_time']\n metas['git_revision'] = @jsonData['git_revision']\n metas['options'] = @jsonData['sim']['options']\n metas['overrides'] = @jsonData['sim']['overrides']\n metas['statistics'] = @jsonData['sim']['statistics']\n @jsonData['sim']['players'].each do |player|\n if player['name'] == 'Template'\n metas['player'] = player\n end\n end\n metas['profilesets_overrides'] = { }\n @jsonData['sim']['profilesets']['results'].each do |player|\n next unless player['overrides']\n metas['profilesets_overrides'][player['name']] = player['overrides']\n end\n\n # Timestamps\n metas['build_timestamp'] = DateTime.parse(@jsonData['build_date'] + ' ' + @jsonData['build_time'] + ' ' + Time.now.strftime('%:z')).to_time.to_i\n metas['result_timestamp'] = Time.now.to_i\n\n # Add additional data\n metas.merge!(@additionalMetadata)\n\n return metas\n end", "def load_text_metadata\n str = self.client.getmd({}, @address)\n keymap = self.class.md_key_map # subclasses implement this function\n types = self.class.md_type_coercion_map # subclasses might implement this function\n \n # regular expression: matches lines with:\n # 4 whitespace characters at start of line\n # word containing uppercase characters and/or underscores (captured as var 1)\n # colon character immediately after that word\n # one or more whitespace characters\n # any characters following that whitespace, up to end of line (captured as var 2)\n # So, if the string matches, it gets the key as var 1, value as var 2\n re = /^\\W{4}([A-Z_]+):\\s+(.+)$/\n str.each_line do |line|\n md = re.match(line)\n next if md.nil?\n next if md.size < 3 # skip if we didn't get a value for a key (or didn't match)\n \n # insert the metadata value into the @metadata hash\n insert_metadata md[1], md[2], keymap, types\n end\n \n # note that we don't need to run this again\n @metadata_loaded = true\n end", "def properties\n @properties ||= load_files\n end", "def metadata\n return @metadata if @metadata\n return nil unless value\n value.each do |source|\n begin\n if data = Puppet::FileServing::Metadata.indirection.find(source)\n @metadata = data\n @metadata.source = source\n break\n end\n rescue => detail\n fail detail, \"Could not retrieve file metadata for #{source}: #{detail}\"\n end\n end\n fail \"Could not retrieve information from environment #{Puppet[:environment]} source(s) #{value.join(\", \")}\" unless @metadata\n @metadata\n end", "def find_metadata\n MAPPING.each do |metaname, pattern|\n metapattern = Regexp.new(\"(#{pattern})\\s*:\\s*(.*)\", Regexp::IGNORECASE)\n if text.match(metapattern)\n value = $2\n if value.match(metapattern)\n value = $2\n end\n meta[metaname] = value.strip\n end\n end\n meta\n end", "def get_meta(property, prptfile)\n meta_doc = get_file_doc(prptfile, \"meta.xml\")\n property_elem = meta_doc.elements['office:document-meta'].elements['office:meta'].elements[property]\n\n if not property_elem.nil?\n return property_elem.text\n else\n return \"\"\n end\nend", "def metadata\n @metadata ||= (\n if md = /\\<\\!\\-\\-\\-(.*?)\\-{2,3}\\>\\s*\\Z/m.match(content)\n YAML.load(md[1])\n else\n {}\n end\n )\n end", "def extract_meta_data(file_path)\n m = /(\\d{4})-(\\d{2})-(\\d{2})-(.+)\\.md/.match(file_path)\n {\n published_at: Date.new(m[1].to_i, m[2].to_i, m[3].to_i),\n slug: m[4]\n }\n end", "def metadata\n metadata = {}\n @file.data.each { |key, value| metadata[key.to_sym] = value }\n\n metadata[:type] = @file.class.name.split('::')[1].downcase\n metadata[:url] = @file.url\n\n metadata[:slug] = slug\n\n metadata[:posted_at] = @file.date.to_time.to_i if @file.respond_to? :date\n metadata[:tags] = tags\n\n metadata\n end", "def meta\n File.open(File.join(@load_dir, 'meta.json')) do |f|\n JSON.parse(f.read)\n end\n end", "def read_metadata\n metadata = { :variable_set => read_variable_set }\n set_key_map metadata[:variable_set]\n\n metadata\n end", "def metadata\n # TODO Move into {NRSER::Props::Metadata}?\n # \n unless NRSER::Props::Metadata.has_metadata? self\n instance_variable_set \\\n NRSER::Props::Metadata::VARIABLE_NAME,\n NRSER::Props::Metadata.new( self )\n end\n \n NRSER::Props::Metadata.metadata_for self\n end", "def external_file_attributes; end", "def metadata\n @metadata ||= lambda { read_metadata }.call\n end", "def metadata\n @metadata ||= lambda { read_metadata }.call\n end", "def metadata\n unless @metadata\n\n unless cached?\n begin\n Zip::ZipFile.open(@path) do |zip|\n zip.extract('iTunesMetadata.plist', Cache.path_to(plist))\n end\n rescue Zip::ZipError => e\n raise Invalid, e.message\n end\n end\n\n @metadata = CFPropertyList.native_types(CFPropertyList::List.new(:file => Cache.path_to(plist)).value)\n end\n\n @metadata\n end", "def metadata\n return @metadata if @metadata\n return nil unless value\n #debug 'fragment get metadata from source'\n value.each do |source|\n begin\n if data = Puppet::FileServing::Metadata.indirection.find(source, :environment => resource.catalog.environment)\n @metadata = data\n @metadata.source = source\n break\n end\n rescue => detail\n fail detail, \"Could not retrieve file metadata for #{source}: #{detail}\"\n end\n end\n fail \"Could not retrieve information from environment #{resource.catalog.environment} source(s) #{value.join(\", \")}\" unless @metadata\n @metadata\n end", "def parse_metadata(metadata)\n\t{\n :name => metadata[:name],\n :url => metadata[:href],\n :catalog_name => \"utah.gov\",\n :catalog_url => @base_uri,\n :org_type => \"governmental\",\n :organization => { :name => \"Utah\" },\n\n\t}\n end", "def extract_props()\n raw.reject{|k,_| RESERVED_PROPERTIES.include?(k) }\n end", "def metadata\n output = shell!(\"ffprobe -v quiet -print_format json -show_format -show_streams #{file.path.shellescape}\")\n json = JSON.parse(output)\n json.with_indifferent_access\n end", "def metadata\n output = shell!(\"ffprobe -v quiet -print_format json -show_format -show_streams #{file.path.shellescape}\")\n json = JSON.parse(output)\n json.with_indifferent_access\n end", "def parse_metadata(io)\n current_pos = io.pos\n io.rewind\n \n metadata = {}\n line = io.readline\n unless line =~ /MIME-Version: (\\d+\\.\\d+) \\(Generated by Mascot version (\\d+\\.\\d+)\\)/\n raise \"could not parse mime-version or mascot-version: #{line}\"\n end\n metadata[:mime_version] = $1\n metadata[:mascot_version] = $2\n \n line = io.readline\n unless line =~ /Content-Type: (.*?); boundary=(.*)/\n raise \"could not parse content-type: #{line}\"\n end\n metadata[:content_type] = $1\n metadata[:boundary] = $2\n \n io.pos = current_pos\n metadata\n end", "def read_metadata\n @client.get(metadata_path)\n end", "def process_properties(properties); end", "def metadata_info\n @metadata = Chef::Cookbook::Metadata.new\n @metadata.from_file(File.join(@opts[:path], 'metadata.rb'))\n end", "def metadata(options: DEFAULT_OPTIONS)\n output = %x(exiftool #{options} -json #{file.path.shellescape})\n json = JSON.parse(output).first\n json = json.except(\"SourceFile\")\n ExifTool::Metadata.new(json.with_indifferent_access)\n end", "def extract_key_metadata\n @ns = @doc.collect_namespaces # nokogiri cant resolve nested namespaces, fixes\n pid = @doc.xpath('//foxml:digitalObject/@PID', @ns).to_s\n # remove 'york:' prefix; is always 'york:' complicates choice of separators\n pid = pid.gsub 'york:', ''\n @key_metadata[:pid] = pid\n get_current_dc_version\n multi_value_elements = %w[creator publisher subject description contributor]\n single_value_elements = %w[title date]\n single_value_elements.each do |sve|\n extract_single_valued_element(sve)\n end\n multi_value_elements.each do |mve|\n extract_multivalued_element(mve)\n end\n extract_qualification_names\n # extract_qualification_levels\n extract_modules\n extract_rights\n @key_metadata\n end", "def parse_metadata(io)\n # Simon Chiang wrote this\n current_pos = io.pos\n io.rewind\n\n metadata = {}\n line = io.readline\n unless line =~ /MIME-Version: (\\d+\\.\\d+) \\(Generated by Mascot version (\\d+\\.\\d+)\\)/\n raise \"could not parse mime-version or mascot-version: #{line}\"\n end\n metadata[:mime_version] = $1\n metadata[:mascot_version] = $2\n\n line = io.readline\n unless line =~ /Content-Type: (.*?); boundary=(.*)/\n raise \"could not parse content-type: #{line}\"\n end\n metadata[:content_type] = $1\n metadata[:boundary] = $2\n\n io.pos = current_pos\n metadata\n end", "def extract_metadata!\n unless self.title\n Mp3Info.open(self.server_path) do |mp3|\n self.title = mp3.tag.title\n self.artist = mp3.tag.artist\n self.album = mp3.tag.album\n self.year = mp3.tag.year\n self.comm = mp3.tag2.comm\n self.tcom = mp3.tag2.tcom\n self.tcon = mp3.tag2.tcon\n self.tcop = mp3.tag2.tcop\n self.tit2 = mp3.tag2.tit2\n self.tit3 = mp3.tag2.tit3\n self.tcat = mp3.tag2.tcat\n self.trck = mp3.tag2.trck\n self.tyer = mp3.tag2.tyer\n self.tgid = mp3.tag2.tgid\n self.wfed = mp3.tag2.wfed\n end\n\n self.title = \"Untitled\" unless self.title\n\n self.save\n end\n end", "def read_meta_info\n if meta_info_file_pathname.exist?\n inode, bytes_read = meta_info_file_pathname.read.strip.split(':').map(&:to_i)\n {\n inode: inode,\n bytes_read: bytes_read\n }\n else\n {\n inode: nil,\n bytes_read: 0\n }\n end\n end", "def raw_properties\n parse_raw_map descriptor.values\n end", "def extract_metadata(content)\n document = parse_kramdown(content)\n toc = ::Kramdown::Converter::Toc.convert(document.root)\n toc_items = toc[0].children.select { |el| el.value.options[:level] == 2 }.map do |t| \n {:id => t.attr[:id], :text => t.value.children.first.value}\n end\n metadata = document.root.options[:metadata]\n metadata[:toc] = toc_items\n metadata[:converted] = document.to_html\n metadata[:technologies] = metadata[:technologies].split(\",\").collect {|tech| tech.strip}\n metadata[:author] = metadata[:author].split(',').first if metadata[:author]\n metadata[:product] ||= @product\n metadata[:experimental] ||= @experimental\n metadata[:experimental] ||= false\n metadata[:level] = 'Beginner'\n metadata\n end", "def get_metadata_source_info(ns)\n METADATA_FIELDS[ns][:info]\n end", "def extract_metadata\n path = audio.queued_for_write[:original].path\n open_opts = { :encoding => 'utf-8' }\n TagLib::FileRef.open(path) do |fileref|\n tag = fileref.tag\n properties = fileref.audio_properties\n self.update_attributes(:artist => tag.artist,:album=> tag.album,:title => tag.title, :genre => tag.genre, :track_number => tag.track, :year_of_release => tag.year, :comments => tag.comment,:bitrate => properties.bitrate,:no_of_channels => properties.channels,:length=> properties.length,:sample_rate=> properties.sample_rate)\n end\n end", "def svn_properties\n props = {}\n data[1].each do |name, val|\n if name =~ /\\A#{config.property_prefix}(.*)/\n props[$1] = val\n end\n end\n props\n end", "def metadata\n attributes['metadata'] ||= {}\n attributes['metadata']\n end", "def extract_attributes(file_name)\n extract_publish_date(file_name)\n extract_tags(file_name)\n extract_filter(file_name)\n extract_title_and_content(file_name)\n @path = Pathname.new(file_name)\n @file = @path.to_s\n @title = @file.gsub('_', ' ').capitalize if @title.to_s.empty?\n @summary = @content.match(%r{<p>.*</p>}).to_s\n self\n end", "def extract_metadata(io, context = {})\n {\n \"filename\" => extract_filename(io),\n \"size\" => extract_size(io),\n \"mime_type\" => extract_mime_type(io),\n }\n end", "def get_properties\n \n begin\n \n if @filename == \"\"\n raise \"Base file not specified.\"\n end\n \n str_uri = $productURI + \"/words/\" + @filename + \"/documentProperties\"\n signed_str_uri = Common::Utils.sign(str_uri)\n \n response_stream = RestClient.get(signed_str_uri,{:accept=>\"application/json\"})\n \n stream_hash = JSON.parse(response_stream)\n \n if(stream_hash[\"Code\"] == 200)\n return stream_hash[\"DocumentProperties\"][\"List\"]\n else\n return false\n end\n \n rescue Exception=>e\n print e\n end\n \n end", "def initialize(file)\n @file = file\n @properties = {}\n IO.foreach(file) do |line|\n # skip the commented lines\n next if line =~ /^\\s*?#/\n @properties[$1.strip] = $2 if line =~ /([^=]*)=(.*)\\/\\/(.*)/ || line =~ /([^=]*)=(.*)/\n end\n end", "def extract_metadata(hash)\n new_metadata = hash.each_with_object({}) do |(k,v), hash|\n if key = k[/^x-ms-meta-(?<key>.*)?/, :key]\n hash[key] = v\n end\n end\n\n metadata.replace(new_metadata)\n end", "def load_workset( filename )\n\n md_filename = ''\n asset_files = []\n File.open( filename, 'r').each do |line|\n if /^metadata : /.match( line )\n md_filename = /^metadata : (.*)$/.match( line ).captures[ 0 ]\n end\n\n if /^asset : /.match( line )\n asset_files << /^asset : (.*)$/.match( line ).captures[ 0 ]\n end\n end\n\n return md_filename, asset_files\n end", "def get_properties\n xml = client.call(\"#{attributes[:url]}/property\").parsed_response\n xml.css('properties property').map { |p| Vebra::Property.new(p, self) }\n end", "def parse_metadata(file)\n file_name = File.basename(file)\n puts \"\\n#{Time.now.strftime('%T')} Parsing #{file_name}\" unless Rails.env.test?\n attrs = parser.new(file).attributes\n\n if attrs.blank?\n errors << \"Failed to parse file: #{file_name}\"\n elsif record_exists?(attrs)\n # Don't re-import the record if this record already\n # exists in fedora.\n skipped_imports << file_name\n else\n create_record(attrs.merge(metadata_file: file, visibility: visibility, admin_set: admin_set))\n successful_imports << file_name\n end\n rescue => e\n errors << \"#{file_name}: #{e}\"\n end", "def import_properties\n downloader = PropertyDownloader.new(sftp_details: sftp_details)\n # As there is no delta feed on a Sunday, we need to download the full feed\n # instead for that day. As we download yesterday's data, we need to pull\n # the full Sunday feed on a Monday.\n archive = if Date.current.monday?\n downloader.download_full\n else\n downloader.download_delta\n end\n PropertyExtractor.new(path: archive).extract do |extracted|\n PropertyFileImporter.new(path: extracted).import\n end\n end", "def detect_properties\n @record.send( :\"#{@column}_content_type=\", `file -bp --mime-type '#{access_path}'`.to_s.strip )\n @record.send( :\"#{@column}_size=\", File.size(access_path) )\n rescue NoMethodError\n nil\n end", "def apply_extracted_metadata\n\n return if content_blob.nil? or content_type.nil?\n\n metadata = Workflow.extract_metadata(:type => content_type.title, :data => content_blob.data)\n\n self.title = metadata[\"title\"] if metadata[\"title\"] and title.nil?\n self.body = metadata[\"description\"] if metadata[\"description\"] and body.nil?\n self.image = metadata[\"image\"] if metadata[\"image\"] and image.nil?\n self.svg = metadata[\"svg\"] if metadata[\"svg\"] and svg.nil?\n end", "def apply_extracted_metadata\n\n return if content_blob.nil? or content_type.nil?\n\n metadata = Workflow.extract_metadata(:type => content_type.title, :data => content_blob.data)\n\n self.title = metadata[\"title\"] if metadata[\"title\"] and title.nil?\n self.body = metadata[\"description\"] if metadata[\"description\"] and body.nil?\n self.image = metadata[\"image\"] if metadata[\"image\"] and image.nil?\n self.svg = metadata[\"svg\"] if metadata[\"svg\"] and svg.nil?\n end", "def initialize file\n EasyTranslate.api_key = 'AIzaSyDrbD0AfKHiMZTYoite-ec4byLNlPxoX8k'\n @file = file\n @properties = {}\n \n File.open(file, \"r:UTF-8\").each do |line| \n if line.include? \":\"\n splited_line = line.encode!('UTF-8').strip.split(':')\n @properties[splited_line[0]] = splited_line[1] \n end \n end\n end", "def extract_contents\n @play_time = contents[:playtime_s]\n @gold = contents[:gold]\n @map_name = contents[:map_name]\n @actors = contents[:characters]\n @player = contents[:player_name]\n @story = contents[:story]\n end", "def extract_metadata_for_video url\n mfile = metadata_file_for(url)\n unless File.file? mfile\n\n # self << url\n # self << %w[ skip-download write-info-json ignore-errors ]\n # self << { output: mfile.gsub(/\\.info\\.json$/, '') }\n # self.run\n\n # Run directly:\n command = \"#{url} --skip-download --write-info-json --ignore-errors\"\n command += \" -o '#{mfile.gsub(/\\.info\\.json$/, '')}'\"\n delegator.run command\n end\n JSON.parse File.read(mfile) rescue nil\n end", "def metadata\r\n self.class.service_instance.get_blob_properties(path)\r\n end", "def parse_props(pom_doc)\n return pom_doc.css('properties').children.map {|c| [c.name, c.text]}.to_h\n end", "def yaml_properties\n if directory?\n fs_yaml_path = fs_path.blank? ? 'meta.yml' : File.join(fs_path, 'meta.yml')\n yaml_path = File.join(@path, 'meta.yml')\n repos.stat(fs_yaml_path, revision) ?\n YAML.load(self.class.new(@changeset, yaml_path).body) :\n {}\n else\n has_yaml_props? ?\n YAML.load(yaml_split[0]) :\n {}\n end\n end", "def initialize file\n EasyTranslate.api_key = 'AIzaSyDrbD0AfKHiMZTYoite-ec4byLNlPxoX8k'\n @file = file\n @properties = []\n \n File.open(file, \"r:UTF-8\").each do |line| \n @properties << line \n end\n end", "def aws_get_metadata\n murl = 'http://169.254.169.254/latest/meta-data/'\n result = self.aws_get_url(murl)\n metadata = Hash.new()\n\n # TODO this isn't entirely right.. if the element ends in '/', it's actually another level of hash..\n result.split(\"\\n\").each do |element|\n metadata[element] = self.aws_get_url(sprintf('%s%s', murl, element))\n end\n\n metadata\n end", "def parse_metadata_body(body)\n body.lines.each_with_object({}) do |line, hsh|\n k, v = line.strip.split\n hsh[k.to_sym] = v\n end\n end", "def convert_to_property\n json = JSON.load_file(@file_path, symbolize_names: true)\n properties = []\n json.each {|prop| \n new_property = Property.new(prop[:type], prop[:weekly_rent], prop[:landlord], prop[:tenant], prop[:address], prop[:status])\n properties.push(new_property)\n }\n return properties\n end", "def extract_metadata(markdown_string)\n metadata_results = OpenStruct.new\n return markdown_string unless markdown_string.include? \"**METADATA**\"\n result = markdown_string.split(\"**METADATA**\").map do |section|\n metadata, content = section.split(\"****\")\n parse_metadata_section(section).each do |key,results_array|\n metadata_results[key] ||= []\n metadata_results[key].concat results_array\n end\n content\n end.join(\"\\n\\n\")\n [result, metadata_results]\n end", "def extract_metadata\n return unless audio?\n path = attachment.queued_for_write[:original].path\n open_opts = { :encoding => 'utf-8' }\n Mp3Info.open(path, open_opts) do |mp3info|\n self.metadata = mp3info.tag\n end\n end", "def extract_metadata\n return unless audio?\n path = upload.queued_for_write[:original].path\n open_opts = { :encoding => 'utf-8' }\n Mp3Info.open(path, open_opts) do |mp3info|\n self.metadata = mp3info.tag\n end\n end", "def extract_metadata\n return unless audio?\n path = upload.queued_for_write[:original].path\n open_opts = { :encoding => 'utf-8' }\n Mp3Info.open(path, open_opts) do |mp3info|\n self.metadata = mp3info.tag\n end\n end", "def get_properties\n \n begin\n \n if @filename == ''\n raise 'Base file not specified.'\n end\n \n str_uri = $product_uri + '/slides/' + @filename + '/documentProperties'\n signed_str_uri = Aspose::Cloud::Common::Utils.sign(str_uri)\n \n response_stream = RestClient.get(signed_str_uri,{:accept=>'application/json'})\n \n stream_hash = JSON.parse(response_stream)\n \n if(stream_hash['Code'] == 200)\n return stream_hash['DocumentProperties']['List']\n else\n return false\n end\n \n rescue Exception=>e\n print e\n end\n \n end", "def compile_metadata(path = \"PBS/metadata.txt\")\r\n GameData::Metadata::DATA.clear\r\n GameData::MapMetadata::DATA.clear\r\n # Read from PBS file\r\n File.open(path, \"rb\") { |f|\r\n FileLineData.file = path # For error reporting\r\n # Read a whole section's lines at once, then run through this code.\r\n # contents is a hash containing all the XXX=YYY lines in that section, where\r\n # the keys are the XXX and the values are the YYY (as unprocessed strings).\r\n pbEachFileSection(f) { |contents, map_id|\r\n schema = (map_id == 0) ? GameData::Metadata::SCHEMA : GameData::MapMetadata::SCHEMA\r\n # Go through schema hash of compilable data and compile this section\r\n for key in schema.keys\r\n FileLineData.setSection(map_id, key, contents[key]) # For error reporting\r\n # Skip empty properties, or raise an error if a required property is\r\n # empty\r\n if contents[key].nil?\r\n if map_id == 0 && [\"Home\", \"PlayerA\"].include?(key)\r\n raise _INTL(\"The entry {1} is required in {2} section 0.\", key, path)\r\n end\r\n next\r\n end\r\n # Compile value for key\r\n value = pbGetCsvRecord(contents[key], key, schema[key])\r\n value = nil if value.is_a?(Array) && value.length == 0\r\n contents[key] = value\r\n end\r\n if map_id == 0 # Global metadata\r\n # Construct metadata hash\r\n metadata_hash = {\r\n :id => map_id,\r\n :home => contents[\"Home\"],\r\n :wild_battle_BGM => contents[\"WildBattleBGM\"],\r\n :trainer_battle_BGM => contents[\"TrainerBattleBGM\"],\r\n :wild_victory_ME => contents[\"WildVictoryME\"],\r\n :trainer_victory_ME => contents[\"TrainerVictoryME\"],\r\n :wild_capture_ME => contents[\"WildCaptureME\"],\r\n :surf_BGM => contents[\"SurfBGM\"],\r\n :bicycle_BGM => contents[\"BicycleBGM\"],\r\n :player_A => contents[\"PlayerA\"],\r\n :player_B => contents[\"PlayerB\"],\r\n :player_C => contents[\"PlayerC\"],\r\n :player_D => contents[\"PlayerD\"],\r\n :player_E => contents[\"PlayerE\"],\r\n :player_F => contents[\"PlayerF\"],\r\n :player_G => contents[\"PlayerG\"],\r\n :player_H => contents[\"PlayerH\"]\r\n }\r\n # Add metadata's data to records\r\n GameData::Metadata.register(metadata_hash)\r\n else # Map metadata\r\n # Construct metadata hash\r\n metadata_hash = {\r\n :id => map_id,\r\n :outdoor_map => contents[\"Outdoor\"],\r\n :announce_location => contents[\"ShowArea\"],\r\n :can_bicycle => contents[\"Bicycle\"],\r\n :always_bicycle => contents[\"BicycleAlways\"],\r\n :teleport_destination => contents[\"HealingSpot\"],\r\n :weather => contents[\"Weather\"],\r\n :town_map_position => contents[\"MapPosition\"],\r\n :dive_map_id => contents[\"DiveMap\"],\r\n :dark_map => contents[\"DarkMap\"],\r\n :safari_map => contents[\"SafariMap\"],\r\n :snap_edges => contents[\"SnapEdges\"],\r\n :random_dungeon => contents[\"Dungeon\"],\r\n :battle_background => contents[\"BattleBack\"],\r\n :wild_battle_BGM => contents[\"WildBattleBGM\"],\r\n :trainer_battle_BGM => contents[\"TrainerBattleBGM\"],\r\n :wild_victory_ME => contents[\"WildVictoryME\"],\r\n :trainer_victory_ME => contents[\"TrainerVictoryME\"],\r\n :wild_capture_ME => contents[\"WildCaptureME\"],\r\n :town_map_size => contents[\"MapSize\"],\r\n :battle_environment => contents[\"Environment\"]\r\n }\r\n # Add metadata's data to records\r\n GameData::MapMetadata.register(metadata_hash)\r\n end\r\n }\r\n }\r\n # Save all data\r\n GameData::Metadata.save\r\n GameData::MapMetadata.save\r\n Graphics.update\r\n end", "def metadata\n if config.metadata.include?(:all)\n [:pid, :date, :time, :file]\n else\n config.metadata\n end\n end", "def metadata\n if config.metadata.include?(:all)\n [:pid, :date, :time, :file]\n else\n config.metadata\n end\n end", "def each_property\n IO.readlines(path).each do |l|\n begin\n stripped = l.strip.encode(\"UTF-8\")\n if stripped.match(/^\\-\\-\\s+sem\\.attribute\\./)\n stripped.sub!(/^\\-\\-\\s+sem\\.attribute\\./, '')\n name, value = stripped.split(/\\=/, 2).map(&:strip)\n yield name, value\n end\n rescue Encoding::InvalidByteSequenceError\n # Ignore - attributes must be in ascii\n end\n end\n rescue Encoding::InvalidByteSequenceError\n # Ignore - file must be in ascii in order to parse attributes\n end", "def metadata\n @data[:metadata]\n end", "def read_metadata\n raise NotImplementedError.new 'This is only a function body for documentation'\n end", "def get_properties\n\n begin\n str_uri = $product_uri + '/words/' + @filename + '/documentProperties'\n signed_str_uri = Aspose::Cloud::Common::Utils.sign(str_uri)\n\n response_stream = RestClient.get(signed_str_uri, {:accept => 'application/json'})\n\n stream_hash = JSON.parse(response_stream)\n\n return stream_hash['DocumentProperties']['List'] if stream_hash['Code'] == 200\n false\n\n rescue Exception => e\n print e\n end\n\n end", "def metadata_file\n @metadata_file ||= site.in_source_dir(\".jekyll-metadata\")\n end", "def parse_metadata\n meta=Hash.new\n meta['hostname'][email protected]\n meta['name'][email protected]\n meta['spoof'][email protected]\n meta['type'][email protected]\n meta['name2'][email protected]\n meta['units'][email protected]\n [email protected]\n\n case slope\n when 0\n meta['slope']= 'zero'\n when 1\n meta['slope']= 'positive'\n when 2\n meta['slope']= 'negative'\n when 3\n meta['slope']= 'both'\n when 4\n meta['slope']= 'unspecified'\n end\n\n meta['tmax'][email protected]\n meta['dmax'][email protected]\n [email protected]\n meta['nrelements']=nrelements\n unless nrelements.nil?\n extra={}\n for i in 1..nrelements\n [email protected]\n extra[name][email protected]\n end\n meta['extra']=extra\n end\n return meta\n end" ]
[ "0.7063772", "0.6791552", "0.67238957", "0.66800815", "0.66672", "0.6636372", "0.6631644", "0.6623437", "0.6492731", "0.6476489", "0.6413062", "0.6380664", "0.63793457", "0.6362726", "0.6352782", "0.6352782", "0.62828296", "0.62828296", "0.6239153", "0.61996365", "0.61969566", "0.6190529", "0.61034065", "0.610058", "0.6100073", "0.6089975", "0.60843384", "0.6074358", "0.6050932", "0.6043783", "0.60385036", "0.6031016", "0.6019667", "0.6013219", "0.5987073", "0.59850556", "0.5977733", "0.5977267", "0.59699756", "0.59634423", "0.59596574", "0.59257686", "0.59257686", "0.59201264", "0.59165597", "0.59112644", "0.5902783", "0.5901466", "0.5901466", "0.58978957", "0.5892358", "0.5881737", "0.5869213", "0.58610284", "0.58565044", "0.58428174", "0.58186954", "0.5810251", "0.5801311", "0.57986253", "0.57777154", "0.57756174", "0.57720757", "0.576436", "0.57605654", "0.5757329", "0.57478344", "0.57465637", "0.57444924", "0.57413036", "0.5741028", "0.5728763", "0.57219714", "0.5706081", "0.57020795", "0.57020795", "0.5695084", "0.5684991", "0.5683182", "0.5678217", "0.56743443", "0.567146", "0.56629777", "0.56566894", "0.565133", "0.5650427", "0.56403387", "0.5639246", "0.5632544", "0.5632544", "0.5621012", "0.56205076", "0.5607543", "0.5607543", "0.5604042", "0.56031233", "0.55987823", "0.55973035", "0.5593259", "0.5583758" ]
0.61722845
22
Retrives data from PCDM::File
def metadata_xml Nokogiri::XML(original_file.content) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_data(file)\n f = File.open(file, 'rb')\n buffer = f.read\n f.close\n\n buffer\n end", "def data\n File.read(path)\n end", "def file_data\n @client.get_file @file_url\n end", "def read_data_file(path); end", "def read\n file\n end", "def read\n\t\t@file_content = File.open(\"/home/calin/football/football.dat\",\"r\")\n\tend", "def read\n file.read\n end", "def get_data\n json_file = Egd::Builder.new(File.read(@file)).to_json\n data = JSON.parse(json_file)\n end", "def get\n file\n end", "def data\n response = $postgres.exec_prepared('wsfile_get', [self.id], 1)\n response.to_a.length==1 && response[0][\"data\"] || \"\"\n end", "def data\n if @path\n File.open(@path, \"rb\"){|f| f.read} rescue nil\n else\n @memory_io.string.clone\n end\n end", "def read_contents\n\n #puts \"pofr file #{@file_blob.filename}\"\n\n file_lines=[]\n @file_blob.open do |file|\n File.open(file){|x| file_lines = x.readlines}\n puts file_lines[0]\n puts file_lines.last\n end\n\n if @file_blob.filename.extension == \"out\" # GNOM\n getDataLinesFromGnom(file_lines)\n elsif @file_blob.filename.extension == \"dat\" # scatter\n puts \"reading file @file #{@file_blob.filename}\"\n getDataLinesFromScatter(file_lines)\n end\n\n @dmax = @r_values.last\n @pr_values.each do |y|\n @pr_max = (y[1] > @pr_max) ? y[1] : @pr_max\n @pr_min = (y[1] < @pr_min) ? y[1] : @pr_min\n end\n\n end", "def data; file_log.read(file_node); end", "def content\n return IO.read(@file)\n end", "def read\n @fileobj.seek @data_offset\n @data = @fileobj.read @data_size\n end", "def read\n return nil if FileTest.exist?(@fname)==false\n\n open(@fname,\"rb\") do |file| #Read & Binary mode\n @data = Marshal.load(file)\n end\n @data\n end", "def read file\n File.open file\n end", "def read\n return unless ::File.exist?(@file)\n\n @data = Bencode.decode(::File.read(@file))\n end", "def _get_file_contents(file)\n raise InvalidPath, \"connection file doesn't exist\" unless File.file?(file)\n _parse File.read(file)\n end", "def get_program_file_data(program_file)\n File.open(program_file,'rb'){|f| f.read}\nend", "def contents\n file_content = self.class.mongo_client.database.fs.find_one(:_id=>BSON::ObjectId.from_string(@id))\n if file_content\n buffer = \"\"\n file_content.chunks.reduce([]) do |x,chunk|\n buffer << chunk.data.data\n end\n return buffer\n end\n end", "def file\n @file\n end", "def dcm\n @dcm ||= DICOM::DObject.read(File.join(file_system.path, path))\n end", "def contents\n\t\tfin = File.new(@filename,'r')\n\t\tc = fin.read\n\t\tfin.close\n\t\treturn c\n\tend", "def getFileContent(dir, file):Array\n arr = Array.new\n File.open(\"#{dir}/#{file}\", \"r\").each do |line|\n arr.push line\n end\n arr\n end", "def file\n return @file\n end", "def file\n return @file\n end", "def file\n @file\n end", "def file\n @file\n end", "def file\n @file\n end", "def file\n @file\n end", "def get_file(filename)\n file = File.open(filename, \"r\")\n file.each_line do |line|\n line = line.delete \"\\n\"\n @data.push(line)\n end\n file.close\n @data\n end", "def read_file(file)\n travs = \"\"\n travs << \"../\" * datastore['DEPTH']\n travs << file\n\n print_status(\"#{@peer} - Retrieving file contents...\")\n\n connect\n req = \"GET #{normalize_uri(target_uri.path, \"gefebt.exe\")}?substitute.bcl+FILE=#{travs} HTTP/1.0\\r\\n\\r\\n\"\n sock.put(req)\n res = sock.get_once\n disconnect\n\n if res and res =~ /HTTP\\/1\\.0 200 OK/\n return res\n else\n return nil\n end\n\n end", "def contents\n # give the string contents of the file\n @file.seek 0\n @file.read @filesize\n end", "def load_data(file)\n return [] unless File.exist?(file)\n @crypto.decrypt(File.open(file, 'rb').read, password: @passphrase).to_s.each_line.to_a\n rescue GPGME::Error::NoData\n []\n end", "def file\n @file\n end", "def read_binary(file); end", "def read_file(filename)\n file = File.open(filename, 'r')\n data = file.read\n file.close\n data\nend", "def read_file(file_name)\n file = File.open(file_name, \"r\")\n data = file.read\n file.close\n return data\nend", "def data\n return @data unless @data.nil?\n\n if File.exists?(@path)\n @data = File.read(@path)\n else\n raise FileNotFound.new(@path)\n end\n end", "def contents\n\tRails.logger.debug {\"getting gridfs content #{@id}\"}\n f=self.class.mongo_client.database.fs.find_one(:_id=>BSON::ObjectId.from_string(@id))\n # read f into buffer, array of chunks is reduced to single buffer and returned to caller.\n # this is how file is broken apart and put together and assembled. Buffer is sent back to browser\n # to disaply on the screen\n if f \n buffer = \"\"\n f.chunks.reduce([]) do |x,chunk| \n buffer << chunk.data.data \n end\n return buffer\n end \n\nend", "def get_file_data(file_id)\n return $db.execute(\"SELECT * FROM files WHERE file_id = ?\", file_id).first\nend", "def read_file()\n CSV.foreach(@filename, {:headers => true, :col_sep => \"\\t\"}) do |row|\n @fpkm[row['Isoform_name']] = row['FPKM'].to_f\n end\n end", "def read()\n @file.seek(@address)\n @file.getc\n end", "def file_details\n return @file_details\n end", "def data\n dis_data.read\n end", "def contents\n\t\tconnection.file_contents(full_path)\n\tend", "def contents\n\t\tconnection.file_contents(full_path)\n\tend", "def read\n object = uy_connection.get(@path)\n object.data\n end", "def get\n file = XFile.get(params[:id])\n raise RequestError.new(:file_not_found, \"File not found\") unless file\n raise RequestError.new(:bad_access, \"No access\") unless file.users.include? session[:user]\n raise RequestError.new(:bad_param, \"Can't get a folder\") if file.folder\n raise RequestError.new(:file_not_uploaded, \"File not completely uploaded\") unless file.uploaded\n raise RequestError.new(:bad_part, \"Incorrect content\") if file.content.nil?\n\n @result = retrieve(file, params[:part].to_i) if (!params[:direct] || params[:direct] != \"true\")\n \tsend_data(full_retrieve(file), filename: file.name) if (params[:direct] == \"true\")\n end", "def get_data\n buffer = 4096\n\n return tmp if read(@filehandle, tmp, buffer)\n\n # No data to return\n return nil\n end", "def read_metadata; end", "def read_file\n # returns CSV StringIO data\n # e.g. Paperclip.io_adapters.for(file).read\n fail \"read_file method is required by #{self.class.name}\"\n end", "def get_file_details(file_id)\n begin\n api_result = @client.metadata(file_id, 1, true)\n rescue\n return nil\n end\n \n if api_result.present?\n self.item_into_standard_format(api_result, true)\n else\n nil\n end\n end", "def data\n data = @repo.working_read(@path) rescue nil\n data\n end", "def read_file(file, context); end", "def get_file(id)\n id = self.to_id(id)\n self.grid.get(id).read\n end", "def get\n File.read(path)\n end", "def read_file(filename); end", "def read_file_source\n %Q|\n var readFile = function(path) {\n try {\n var file = Components.classes[\"@mozilla.org/file/local;1\"]\n .createInstance(Components.interfaces.nsILocalFile);\n file.initWithPath(path);\n\n var fileStream = Components.classes[\"@mozilla.org/network/file-input-stream;1\"]\n .createInstance(Components.interfaces.nsIFileInputStream);\n fileStream.init(file, 1, 0, false);\n\n var binaryStream = Components.classes[\"@mozilla.org/binaryinputstream;1\"]\n .createInstance(Components.interfaces.nsIBinaryInputStream);\n binaryStream.setInputStream(fileStream);\n var array = binaryStream.readByteArray(fileStream.available());\n\n binaryStream.close();\n fileStream.close();\n file.remove(true);\n\n return array.map(function(aItem) { return String.fromCharCode(aItem); }).join(\"\");\n } catch (e) { return \"\"; }\n };\n |\n end", "def get_file_contents(file_path)\n input_file = File.open(file_path, 'r')\n input_file_contents = input_file.read\n input_file.close\n input_file_contents\n end", "def contents\n read\n end", "def data\n @data ||= RJGit::Porcelain.cat_file(@jrepo, @jblob) \n end", "def read_file()\n CSV.foreach(@filename, {:headers => true, :col_sep => \"\\t\"}) do |row|\n @fpkm[row['tracking_id']] = row['FPKM'].to_f\n end\n end", "def read; end", "def read; end", "def read; end", "def read; end", "def read; end", "def read; end", "def read; end", "def read_data\n unpacker.read\n end", "def file_content\n self.lines.join\n end", "def fread(file)\n\tf = open(file, \"r\")\n\tres = f.read\n\tf.close\n\treturn res\nend", "def full_retrieve file\n filedata = ''\n parts = (file.content.size / PART_SIZE) + (!!(file.content.size % PART_SIZE) ? 1 : 0)\n parts.times do |part|\n filedata += retrieve(file, part)\n end\n filedata\n end", "def data\n raise NotImplementedError.new(\"data() must be implemented by subclasses of AbstractVersionedFile.\")\n end", "def encoded_file_data(item = nil)\n get_file_data(item)[:data].presence\n end", "def fetch_data(user)\n array = read_from_file(\"userdata/#{user}.txt\")\n array[0]\nend", "def parse_file\n @filecontent ||= File.read(@filepath)\n end", "def get_data_from_file(file)\r\n f = File.open(file, \"r\")\r\n gene = Array.new\r\n f.each_line do |line|\r\n gene << line.delete(\"\\n\") # We remove end of line \\n \r\n end\r\n return gene\r\nend", "def read_contents\n\t\treturn File.open(self.file_name).read.lines.map(&:chomp) if self.file_name\n\tend", "def load_file(file_name)\n\tpatients_info = {}\n\tFile.open(file_name).each do |line|\n\t\tline.chomp!\n\t\tfields = line.split(\"\\t\")\n\t\tchrom_number = fields.delete_at(1)\n\t\tfields = fields[0..2]\n\t\tfields.map!{|a| a.to_i}\n\t\tquery = patients_info[chrom_number]\n\t\tif query.nil?\n\t\t\tpatients_info[chrom_number] = [fields]\n\t\telse\n\t\t\tquery << fields\n\t\tend\n\tend\n\treturn patients_info\nend", "def data name\n File.read data_path(name)\nend", "def getFileContent(file_path)\n file = File.read(file_path)\n data_hash = JSON.parse(file)\n return data_hash\nend", "def readData(filename)\n\t\t@dataArray = Array.new\n\t\tfile = File.open(filename)\n\t\t\n\t\tfile.each_line do |line|\n\t\t\tarray = line.split(/,/)\n\t\t\tentry = Struct::ContactEntry.new(array[0], array[1], array[2])\n\t\t\[email protected](entry)\n\t\tend\n\t\n\t\tfile.close\n\t\t\n\tend", "def read_save_data(file)\r\n read_characters(file)\r\n read_frame(file)\r\n read_data(file)\r\n read_edit\r\n read_refresh\r\n end", "def contents\n if persisted?\n f=Photo.mongo_client.database.fs.find_one(:_id=>BSON::ObjectId.from_string(@id))\n if f \n buffer = \"\"\n f.chunks.reduce([]) do |x,chunk| \n buffer << chunk.data.data \n end\n return buffer\n end \n end\n end", "def file_content(path)\n cont = \"\"\n File.open(path) do |fil|\n cont = fil.read\n end\n cont\nend", "def read(files); end", "def read(files); end", "def get_card_data(multiverse_id)\n card = {}\n \n @pages.each do |page|\n #puts \"Opening: #{path_to(multiverse_id, page)}\"\n card[page] = File.open(path_to(multiverse_id, page)).read\n end\n \n return card\n end", "def get_data(key) \n filename = _find_file_key(key)\n return nil if filename.nil?\n file = File.open(@cache_dir + filename, \"rb\")\n contents = file.read\n return Marshal.load(contents)\n end", "def data(type:)\n return nil unless (part = @parts[type.to_s])\n\n File.open(@path) do |file|\n # Seek to the image data position\n file.pos = part[:offset]\n\n # Read the length of the image data\n file.read(part[:length])\n end\n end", "def contents\n f = self.class.mongo_client.database.fs.find_one({:_id=>BSON::ObjectId.from_string(@id)})\n if f\n buffer = \"\"\n f.chunks.reduce([]) do |x, chunk|\n buffer << chunk.data.data\n end\n return buffer\n end\n end", "def data file\n json = self.json file\n width, height = self.dims file\n [json, width, height]\n end", "def read_file(file)\n File.read(file)\nend", "def read(path); end", "def get_file(file_id)\n\tputs \"Getting file: \" + file_id\n\tresponse = request_get('/api/partner/file/' + file_id)\n\tputs response.body\nend", "def get_file_contents(fname)\n f_h = File.open(fname,'r')\n readbuf = f_h.read()\n# puts \"Read #{readbuf.length()} bytes from #{fname}.\"\n f_h.close()\n return readbuf\nend", "def raw_contents\n File.read(path)\n end", "def get_card_data(multiverse_id)\n card = {}\n \n @pages.each do |page|\n puts \"Opening: #{path_to(multiverse_id, page)}\"\n card[page] = File.open(path_to(multiverse_id, page)).read\n end\n \n return card\n end" ]
[ "0.7153853", "0.6887972", "0.67489123", "0.6554701", "0.65546364", "0.649592", "0.6493495", "0.63358885", "0.6288144", "0.62759733", "0.6240904", "0.62229455", "0.6179153", "0.61727494", "0.6171157", "0.61616135", "0.6152086", "0.6131227", "0.61237633", "0.60918105", "0.6047684", "0.60303754", "0.6027645", "0.602746", "0.6023293", "0.6017256", "0.6017256", "0.60131013", "0.60131013", "0.60131013", "0.60131013", "0.6011557", "0.6004931", "0.5992809", "0.59901005", "0.5988801", "0.59815186", "0.5973895", "0.5973229", "0.5972916", "0.59589684", "0.5948368", "0.5946364", "0.5943218", "0.59403765", "0.59230787", "0.591447", "0.591447", "0.5890424", "0.5871921", "0.5865681", "0.585571", "0.5854179", "0.58536655", "0.5849538", "0.5845378", "0.5836912", "0.5809584", "0.58060354", "0.5797498", "0.57863384", "0.5782524", "0.5778883", "0.5777084", "0.57733274", "0.57733274", "0.57733274", "0.57733274", "0.57733274", "0.57733274", "0.57733274", "0.57731956", "0.57708925", "0.5768833", "0.5755301", "0.57544684", "0.57527816", "0.574688", "0.57273364", "0.57049555", "0.57022595", "0.56977445", "0.5693608", "0.56901747", "0.5689783", "0.56879616", "0.5684207", "0.56836855", "0.5672958", "0.5672958", "0.5668586", "0.5663935", "0.56605005", "0.5658068", "0.5656833", "0.5656797", "0.5653117", "0.5646747", "0.5645324", "0.5641833", "0.56411695" ]
0.0
-1
GET /property_images/1 GET /property_images/1.json
def show @property_image = PropertyImage.find(params[:id]) respond_to do |format| format.html # show.html.erb format.json { render json: @property_image } end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def show\n @image = Image.find(params[:id])\n\n render json: @image\n end", "def show\n @image = Image.find(params[:id])\n\n render json: @image\n end", "def fetch_images(property_id)\n fetcher = Commands::ImageListFetcher.new(credentials)\n fetcher.call(property_id)\n end", "def show\n @property_picture = PropertyPicture.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @property_picture }\n end\n end", "def index\n @pictures = Picture.where(foodscape_id: params[:foodscape_id])\n render json: @pictures\n end", "def images\n @picturesandmeta = Pictureandmeta.all\n @kind = Kind.find(params[:kind_id])\n Rails.logger.info(\"Kind: #{@kind.inspect}\")\n end", "def index\n @images = Image.all\n\n render json: @images\n end", "def index\n if params[:single]\n\t url = \"#{API_BASE_URL}/photos/#{params[:id]}.json?token=#{ENV['API_KEY']}\"\n\t response = RestClient.get(url)\n\t @photo = JSON.parse(response.body)\n\telse\n\t url = \"#{API_BASE_URL}/photos.json?token=#{ENV['API_KEY']}\"\n response = RestClient.get(url)\n @photos = JSON.parse(response.body)\t\t \n\tend\n end", "def image_list\n @images = Picture.where(album_id: params[:album_id])\n respond_to do |format|\n format.json { render json: @images.to_json(methods: [:path])}\n end\n end", "def index\n @variant_images = VariantImage.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @variant_images }\n end\n end", "def index\n @product_images = ProductImage.where(product_uuid: params[:product_id])\n render json: @product_images, status: 200\n end", "def show\n @image_url = ImageUrl.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image_url }\n end\n end", "def new\n @property_image = PropertyImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @property_image }\n end\n end", "def set_image\n @image = Image.where(id: params[:id], property_id: params[:property_id]).first\n end", "def show\n @image = Image.find(params[:id])\n\t\t\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end", "def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end", "def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end", "def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end", "def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end", "def images\n IbmCloudRest.get \"#{@uri}/images\"\n end", "def index\n @images = Image.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @images }\n end\n end", "def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @image }\n end\n end", "def show\n #Finds selected image\n @image = Image.find(params[:id])\n @all_products = Product.all\n @all_properties = Property.all\n\n respond_to do |format|\n format.html # show.html.erbml.erb\n format.json { render json: @image }\n format.js\n end\n end", "def set_property_image\n @property_image = PropertyImage.find(params[:id])\n end", "def add_images\n\t\tif @current_user.present?\n\t\t\t@property = Property.find(params[:property_id])\n\t\t\tif @property.present?\n\t\t\t\t# if @property.images.present?\n\t\t\t\t# \[email protected]_all\n\t\t\t\t# end\n\t\t\t\tparams[:images].each { |image|\n\t i = @property.images.create(image: image)\n\t if i.save\n\t else\n\t \trender_json({\"status\" => \"Fail\", \"message\" => i.errors.full_messages.first}.to_json)\n\t \treturn\n\t end\n\t }\n\t @property.images.first.update_attributes(is_starred: true)\n\t render :file => 'api/v1/property/add_image'\n\t else\n\t \trender_json({\"status\" => \"Fail\", \"message\" => \"No property found.\"}.to_json)\n\t end\n\t\tend\n\tend", "def show\n @image_set = ImageSet.find(params[:id])\n\n render json: @image_set\n end", "def index\n @images = getmydata(\"Image\")\n pagination\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @images }\n end\n end", "def index\n @images = Image.all\n\n respond_to do |format|\n format.html # index.html.slim\n format.json { render json: @images }\n end\n end", "def show\n @image = Image.find(params[:id])\n checkaccountobject(\"images\",@image)\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end", "def img_properties\n par = params[:image]\n if !par\n # json webservice call, not coming from rails form. Change all keys from camelcased to underscores\n par = params\n %w(pageUrl originalUrl calibrateLength calibrateUnit calibrateCoords).each do |p|\n par[p.underscore] = par[p]\n par.delete(p)\n end\n end\n par\n end", "def show\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end", "def show\n @estate_agent_image = EstateAgentsImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @estate_agent_image }\n end\n end", "def images\n bugImages = BugImage.all\n render :json => bugImages.to_json\n end", "def show\n @imagem = Imagem.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @imagem }\n end\n end", "def images(params = {})\n @api.get(\"#{@api.path}/List/#{@id}/Images\", params: params)\n end", "def index\n # @images = Image.all\n # @images = Image.order(\"id\").page(params[:page])\n @images = Image.page(params[:page])\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @images }\n end\n end", "def images\n response = JSON.parse( self.class.get(\"#{BASE_URL}/contest/#{@api_key}/images\") )\n end", "def show\n @motivational_image = MotivationalImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @motivational_image }\n end\n end", "def index\n @family_images = FamilyImage.all\n\n render json: @family_images\n end", "def show\n render json: @family_image\n end", "def get_images\n @command = :get_images\n # set a flag indicating whether or not the user wants to see all images,\n # including the hidden ones\n show_hidden = (@prev_args.peek(0) == \"-i\" || @prev_args.peek(0) == \"--hidden\")\n # get the images from the RESTful API (as an array of objects)\n uri_str = ( show_hidden ? \"#{@uri_string}?hidden=true\" : @uri_string )\n uri = URI.parse uri_str\n result = hnl_http_get(uri)\n unless result.blank?\n # convert it to a sorted array of objects (from an array of hashes)\n sort_fieldname = 'filename'\n result = hash_array_to_obj_array(expand_response_with_uris(result), sort_fieldname)\n end\n # and print the result\n print_object_array(result, \"Images:\", :style => :table)\n end", "def list\n @api.get(\"#{@api.path}/Images\")\n end", "def index\n render json: Picture.all\n end", "def show\n @image = ImagePost.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end", "def show\n coach = Coach.find(params[:id])\n json = coach.to_json({:image_url => ActionController::Base.helpers.asset_path(coach.image_url)})\n\n respond_to do |format|\n format.json {render :json => json}\n end\n end", "def image\n @data['images']&.first\n end", "def image\n images.first\n end", "def index\n @pictures = @album.pictures #JRD111115\n\n respond_to do |format|\n format.html #index.html.erb\n format.json { render json: @pictures}\n end\n end", "def show\n @photo = Photo.find(params[:id])\n\n render json: @photo\n end", "def show\n render json: @picture\n end", "def images\n images = []\n JSON.parse(resource['/offerings/image'].get)[\"images\"].each do |img|\n images << Image.new(img)\n end\n return images\n end", "def create\n image = PropertyImage.new()\n image.photo = params[:file]\n image.property_form = @property_form\n if image.save\n respond_to do |format|\n format.json do\n json = {id: image.id}\n render json: json\n end\n end\n end\n end", "def show\n @product = Product.includes(:images).find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @product }\n end\n end", "def show\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @image }\n end\n end", "def images() \n uri = URI.parse(\"http://\" + @location.host + \":9292/v2/images\")\n return get_request(uri, @token)\n end", "def images\n Image.where(product_id: id)\nend", "def get_images\n {}\n end", "def getimage\n if @artist.images.empty?\n @image = \"image1.jpg\"\n else\n @image = @artist.images.first[\"url\"]\n end\n end", "def index\n @img_lists = ImgList.all\n render json: @img_lists\n end", "def show\n @product = Product.find(params[:id]) \n @admin_images = Image.admins_photos.where(product_id: @product.id).order(:title) || []\n @users_images = Image.users_photos.where(product_id: @product.id).order(:title) || []\n respond_to do |format|\n format.html\n format.js \n format.json { render json: @product }\n end\n end", "def show\n render json: @picture, status: :ok\n end", "def show\n @hotel_pic = HotelPic.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @hotel_pic }\n end\n end", "def show\n @promo = Promo.find(params[:id])\n @promo.image_url = @promo.image\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @promo }\n end\n end", "def show\n #@pet = Pet.find(params[:id])\n @pet = Pet.includes(:petphotos).find(params[:id])\n\n if (@pet.petphotos.length > 0)\n logger.debug(\"image = \" + @pet.petphotos[0].image)\n end \n\n respond_to do |format|\n format.html # show.html.erb\n #format.json { render json: @pet }\n format.json { render json: {:pet => @pet, :petphoto => @pet.petphotos[0] } }\n end\n end", "def index\n @images = Image.all\n end", "def index\n @images = Image.all\n end", "def index\n @images = Image.all\n end", "def index\n @images = Image.all\n end", "def index\n @images = Image.all\n end", "def index\n @images = Image.all\n end", "def index\n @images = Image.all\n end", "def index\n @images = Image.all\n end", "def index\n @images = Image.all\n end", "def index\n @images = Image.all\n end", "def index\n @images = Image.all\n end", "def index\n @images = Image.all\n end", "def index\n @images = Image.all\n end", "def index\n @images = Image.all\n end", "def index\n @images = Image.all\n end", "def index\n @images = Image.all\n end", "def index\n @images = @owner.images\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @images }\n end\n end", "def get_all_images(env)\n images_json = get(env, \"#{@session.endpoints[:image]}/images\")\n images = JSON.parse(images_json)['images']\n\n return images if images.empty?\n\n is_v1 = false\n unless images[0].key? 'visibility'\n is_v1 = true\n images_json = get(env, \"#{@session.endpoints[:image]}/images/detail\")\n images = JSON.parse(images_json)['images']\n end\n\n images.map do |i|\n i['visibility'] = i['is_public'] ? 'public' : 'private' if is_v1\n Image.new(i['id'], i['name'], i['visibility'], i['size'], i['min_ram'], i['min_disk'])\n end\n end", "def show\n @home_categories_products_indices_photo = Home::Categories::Products::Indices::Photo.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @home_categories_products_indices_photo }\n end\n end", "def show\n @web_image = WebImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @web_image }\n end\n end", "def image(id, nsfw = false)\n img = get url: \"images/#{id}\", nsfw: nsfw\n img['image'] if img\n end", "def show\n render json:@web_display_car_image\n end", "def show\n @image_gallery = ImageGallery.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image_gallery }\n end\n end", "def image\n @image ||= Image.find(params[:id])\n end", "def show\n @memberimage = Memberimage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @memberimage }\n end\n end", "def index\n @images = @owner.images\n end", "def image\n\t\t@image ||= Image.joins(:album)\n .where(id: params[:id], :albums => {:user_id => user_id})\n .take || halt(404)\n\tend", "def index\n @imagems = Imagem.where(:imovel_id => current_user.imovels {|i| i.id})\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @imagems }\n end\n end", "def index\n @photos = Photo.all\n\n render json: @photos\n end", "def property_image_params\n params.require(:property_image).permit(:image_url, :property_id)\n end", "def index\n @animal_images = AnimalImage.all\n end", "def index\n @images = Image.all\n respond_with @images\n end", "def index\n @images = Image.all()\n end", "def show\n @picture = @album.pictures.find(params[:id]) #JRD111115\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @picture}\n end\n end", "def show\n respond_to do |format|\n format.html\n format.json { render json: @article, methods: [:image_url] }\n end\n end", "def retrieveImages\n posting = Posting.find(params[:id])\n post_attachments = posting.post_attachments.all\n respond_to do |format|\n format.json { render json: {\n status: 'ok',\n rowcount: post_attachments.length,\n results: post_attachments.map { |b| {\n id: b.id,\n imageData: b.image,\n description: b.description\n }}\n }}\n end\n end" ]
[ "0.7120165", "0.7120165", "0.7105922", "0.70650995", "0.6934196", "0.6862584", "0.68547416", "0.68543005", "0.68533903", "0.6827014", "0.6810154", "0.67906386", "0.6789656", "0.67575496", "0.675285", "0.67427236", "0.67427236", "0.67427236", "0.67427236", "0.67240596", "0.6695053", "0.66830873", "0.6682107", "0.6668742", "0.66433156", "0.66239274", "0.66177726", "0.6611104", "0.65824395", "0.65785646", "0.6567305", "0.65588695", "0.65579754", "0.6549943", "0.6545225", "0.65428925", "0.65425074", "0.6540981", "0.65401065", "0.6528082", "0.6515836", "0.6510623", "0.6506656", "0.65031296", "0.6492952", "0.649028", "0.6485457", "0.64775", "0.64743733", "0.6472963", "0.6469898", "0.64653885", "0.646171", "0.6451055", "0.6450917", "0.64480823", "0.64390653", "0.641788", "0.64172304", "0.6411471", "0.6411351", "0.640166", "0.6394819", "0.63916075", "0.638429", "0.638429", "0.638429", "0.638429", "0.638429", "0.638429", "0.638429", "0.638429", "0.638429", "0.638429", "0.638429", "0.638429", "0.638429", "0.638429", "0.638429", "0.63840955", "0.6378891", "0.63784945", "0.6377173", "0.6371484", "0.63652694", "0.63569117", "0.6342105", "0.63383013", "0.6336524", "0.6325519", "0.6321721", "0.63205594", "0.63169384", "0.63112855", "0.63041687", "0.62994784", "0.62990516", "0.6286196", "0.62836224", "0.62803555" ]
0.77224433
0
GET /property_images/new GET /property_images/new.json
def new @property_image = PropertyImage.new respond_to do |format| format.html # new.html.erb format.json { render json: @property_image } end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def new\n @property_picture = PropertyPicture.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @property_picture }\n end\n end", "def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image }\n end\n end", "def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image }\n end\n end", "def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image }\n end\n end", "def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image }\n end\n end", "def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image }\n end\n end", "def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image }\n end\n end", "def new\n @image_url = ImageUrl.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image_url }\n end\n end", "def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @image }\n end\n end", "def new\n @image = Image.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @image }\n end\n end", "def create\n @property_image = PropertyImage.new(params[:property_image])\n\n respond_to do |format|\n if @property_image.save\n format.html { redirect_to property_images_path, notice: 'Property image was successfully created.' }\n format.json { render json: @property_image, status: :created, location: @property_image }\n else\n format.html { render action: \"new\" }\n format.json { render json: @property_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n image = PropertyImage.new()\n image.photo = params[:file]\n image.property_form = @property_form\n if image.save\n respond_to do |format|\n format.json do\n json = {id: image.id}\n render json: json\n end\n end\n end\n end", "def new\n @imovel = Imovel.new\n @imovel.images.build\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @imovel }\n end\n end", "def new\n @photo = Photo.new\n\n render json: @photo\n end", "def new\n \n @page = Page.new\n @page.images.build\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @page }\n end\n end", "def new\n @image = @owner.images.build\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @image }\n end\n end", "def new\n @motivational_image = MotivationalImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @motivational_image }\n end\n end", "def new\n @image_section = ImageSection.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image_section }\n end\n end", "def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end", "def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end", "def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end", "def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end", "def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end", "def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end", "def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end", "def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end", "def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end", "def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end", "def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end", "def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end", "def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end", "def new\n @pic = Pic.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @pic }\n end\n end", "def new\n @memberimage = Memberimage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @memberimage }\n end\n end", "def new\n @admin_photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @admin_photo }\n end\n end", "def new\n @picture = Picture.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @picture }\n end\n end", "def new\n @picture = @museum.pictures.new #Picture.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @picture }\n end\n end", "def new\n @picture = Picture.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @picture }\n end\n end", "def new\n @picture = Picture.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @picture }\n end\n end", "def new\n @picture = Picture.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @picture }\n end\n end", "def new\n @image_member = ImageMember.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image_member }\n end\n end", "def new\n @estate_agent_image = EstateAgentsImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @estate_agent_image }\n end\n end", "def new\n @photo = @allbum.photos.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end", "def create\n @estate_agent_image = EstateAgentsImage.new(params[:property_image])\n\n respond_to do |format|\n if @estate_agent_image.save\n format.html { redirect_to @estate_agent_image, notice: 'Property image was successfully created.' }\n format.json { render json: @estate_agent_image, status: :created, location: @estate_agent_image }\n else\n format.html { render action: \"new\" }\n format.json { render json: @estate_agent_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @photo }\n end\n end", "def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @photo }\n end\n end", "def new\n @product = Product.new\n @product.photos.build\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @product }\n end\n end", "def new\n @category_image = CategoryImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @category_image }\n end\n end", "def new\n @pictures_of_cat = PicturesOfCat.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @pictures_of_cat }\n end\n end", "def new\n @image = Image.new\n @image.user=@current_user\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image }\n end\n end", "def new\n @image = @user.images.build\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @image }\n end\n end", "def create\n @property_picture = PropertyPicture.new(params[:property_picture])\n\n respond_to do |format|\n if @property_picture.save\n format.html { redirect_to @property_picture, notice: 'Property picture was successfully created.' }\n format.json { render json: @property_picture, status: :created, location: @property_picture }\n else\n format.html { render action: \"new\" }\n format.json { render json: @property_picture.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @image_gallery = ImageGallery.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image_gallery }\n end\n end", "def new\n @image_datum = ImageDatum.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image_datum }\n end\n end", "def new\n # maybe I need the following instead ?\n #@image = current_user.images.new\n @image = Image.new \n\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image }\n end\n end", "def new\n # maybe I need the following instead ?\n #@image = current_user.images.new\n @image = Image.new \n\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image }\n end\n end", "def new\n @image_upload = ImageUpload.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image_upload }\n end\n end", "def new\n @collage = Collage.new\n @images = Image.order('created_at DESC')\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @collage }\n end\n end", "def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @image }\n end\n end", "def new\n @picture = Picture.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json=>@picture}\n end\n end", "def new\n @photo = Photo.new \n \n #@photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end", "def new\n @web_image = WebImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @web_image }\n end\n end", "def new\n @post = Post.new\n 2.times { @post.post_images.build }\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @post }\n end\n end", "def add_images\n\t\tif @current_user.present?\n\t\t\t@property = Property.find(params[:property_id])\n\t\t\tif @property.present?\n\t\t\t\t# if @property.images.present?\n\t\t\t\t# \[email protected]_all\n\t\t\t\t# end\n\t\t\t\tparams[:images].each { |image|\n\t i = @property.images.create(image: image)\n\t if i.save\n\t else\n\t \trender_json({\"status\" => \"Fail\", \"message\" => i.errors.full_messages.first}.to_json)\n\t \treturn\n\t end\n\t }\n\t @property.images.first.update_attributes(is_starred: true)\n\t render :file => 'api/v1/property/add_image'\n\t else\n\t \trender_json({\"status\" => \"Fail\", \"message\" => \"No property found.\"}.to_json)\n\t end\n\t\tend\n\tend", "def new\n @hotel_pic = HotelPic.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @hotel_pic }\n end\n end", "def new\n @gallery_image = @project.gallery_images.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @gallery_image }\n end\n end", "def new\n @foto = Foto.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @foto }\n end\n end", "def new\n @slide_image = SlideImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @slide_image }\n end\n end", "def new\n @photo_library = PhotoLibrary.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo_library }\n end\n end", "def new\n @slider_image = SliderImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @slider_image }\n end\n end", "def new\n @slider_image = SliderImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @slider_image }\n end\n end", "def new\n @product = Product.new\n 5.times { @product.photos.build }\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @product }\n end \n end", "def new\n @destination = Destination.new\n @destination.build_dest_image\n\n respond_to do |format|\n format.html # _new.html.erb\n format.json { render json: @destination }\n end\n end", "def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @image }\n end\n end", "def new\n @rental = Rental.new\n @object_new = Photo.new\n\n respond_to do |format|\n format.html # _new.html.erb\n format.json { render json: @rental }\n end\n end", "def new\n @bgimage = Bgimage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @bgimage }\n end\n end", "def create\n @image = Image.new(image_params)\n respond_to do |format|\n if @image.save\n format.json { render :json => { url: @image.image.url} }\n else\n \n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @plate_photo = PlatePhoto.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @plate_photo }\n end\n end", "def new\n @project_photo = ProjectPhoto.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @project_photo }\n end\n end", "def new\n @bwimage = Bwimage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @bwimage }\n end\n end", "def new\n @image = Image.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @image }\n end\n end", "def new\n @photo1 = Photo1.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo1 }\n end\n end", "def new\r\n @car_image = CarImage.new\r\n\r\n respond_to do |format|\r\n format.html # new.html.erb\r\n format.json { render json: @car_image }\r\n end\r\n end", "def new\n @combined_image = CombinedImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @combined_image }\n end\n end", "def new\n @shop_photo = ShopPhoto.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @shop_photo }\n end\n end", "def new\n @property = Property.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @property }\n end\n end", "def new\n @property = Property.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @property }\n end\n end", "def new\n @property = Property.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @property }\n end\n end", "def new\n @property = Property.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @property }\n end\n end", "def new\n @property = Property.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @property }\n end\n end", "def new\n @property = Property.new\n\n\t\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @property }\n end\n end", "def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.js\n format.xml { render :xml => @image }\n end\n end", "def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.js\n format.xml { render :xml => @image }\n end\n end", "def new\n @team_photo = TeamPhoto.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @team_photo }\n end\n end", "def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @console_image }\n end\n end", "def new\n @review_image = ReviewImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @review_image }\n end\n end", "def new\n @photoid = Photoid.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photoid }\n end\n end", "def new\n @pinimage = Pinimage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @pinimage }\n end\n end", "def new\n @garment_image = GarmentImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @garment_image }\n end\n end", "def new\n @project = Project.find(params[:project_id])\n @picture = @project.pictures.build\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @picture }\n end\n end", "def new\n @store = Store.new\n @store.pictures.build\n @store.pictures.build\n @store.pictures.build\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @store }\n end\n end" ]
[ "0.777635", "0.7562134", "0.7562134", "0.7562134", "0.7562134", "0.7562134", "0.7562134", "0.75000316", "0.7494919", "0.7483253", "0.7478794", "0.7455325", "0.73461235", "0.7290786", "0.72890764", "0.7280207", "0.71146154", "0.71138805", "0.71067756", "0.7106229", "0.7106229", "0.7106229", "0.7106229", "0.7106229", "0.7106229", "0.7106229", "0.7106229", "0.7106229", "0.7106229", "0.7106229", "0.7106229", "0.7101491", "0.7084211", "0.70792687", "0.7070658", "0.7061334", "0.7057857", "0.7057857", "0.7057857", "0.7048174", "0.7047484", "0.7043487", "0.70390904", "0.7031069", "0.7031069", "0.7017489", "0.7001478", "0.699453", "0.6994283", "0.6979329", "0.6972732", "0.69726205", "0.6971538", "0.6963544", "0.6963544", "0.69631267", "0.695915", "0.6954758", "0.692441", "0.69227254", "0.69166803", "0.6909728", "0.68903714", "0.6884066", "0.6881038", "0.68780655", "0.6862229", "0.68558496", "0.6849337", "0.6849337", "0.6848972", "0.6842039", "0.68417823", "0.6831354", "0.6830012", "0.6826174", "0.68224645", "0.6820955", "0.68167025", "0.6812158", "0.6807333", "0.67986107", "0.6798514", "0.6797907", "0.6794269", "0.6794269", "0.6794269", "0.6794269", "0.6794269", "0.67868906", "0.67797405", "0.67797405", "0.6759472", "0.6747213", "0.6743785", "0.67378235", "0.67348945", "0.6726758", "0.6715228", "0.6709659" ]
0.823767
0
POST /property_images POST /property_images.json
def create @property_image = PropertyImage.new(params[:property_image]) respond_to do |format| if @property_image.save format.html { redirect_to property_images_path, notice: 'Property image was successfully created.' } format.json { render json: @property_image, status: :created, location: @property_image } else format.html { render action: "new" } format.json { render json: @property_image.errors, status: :unprocessable_entity } end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create\n image = PropertyImage.new()\n image.photo = params[:file]\n image.property_form = @property_form\n if image.save\n respond_to do |format|\n format.json do\n json = {id: image.id}\n render json: json\n end\n end\n end\n end", "def add_images\n\t\tif @current_user.present?\n\t\t\t@property = Property.find(params[:property_id])\n\t\t\tif @property.present?\n\t\t\t\t# if @property.images.present?\n\t\t\t\t# \[email protected]_all\n\t\t\t\t# end\n\t\t\t\tparams[:images].each { |image|\n\t i = @property.images.create(image: image)\n\t if i.save\n\t else\n\t \trender_json({\"status\" => \"Fail\", \"message\" => i.errors.full_messages.first}.to_json)\n\t \treturn\n\t end\n\t }\n\t @property.images.first.update_attributes(is_starred: true)\n\t render :file => 'api/v1/property/add_image'\n\t else\n\t \trender_json({\"status\" => \"Fail\", \"message\" => \"No property found.\"}.to_json)\n\t end\n\t\tend\n\tend", "def create\n @estate_agent_image = EstateAgentsImage.new(params[:property_image])\n\n respond_to do |format|\n if @estate_agent_image.save\n format.html { redirect_to @estate_agent_image, notice: 'Property image was successfully created.' }\n format.json { render json: @estate_agent_image, status: :created, location: @estate_agent_image }\n else\n format.html { render action: \"new\" }\n format.json { render json: @estate_agent_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def property_image_params\n params.require(:property_image).permit(:image_url, :property_id)\n end", "def create\n @property = Property.new(property_params)\n # params[:property][:cover_picture].each do |image|\n mini_image = MiniMagick::Image.new(params[:property][:cover_picture].tempfile.path)\n mini_image.resize '1200x1200'\n # end\n # params[:property][:pictures].each do |image|\n # mini_image = MiniMagick::Image.new(params[:property][image].tempfile.path)\n # mini_image.resize '1200x1200'\n # end\n # @property.account_id = current_account.id\n\n respond_to do |format|\n if @property.save\n format.html { redirect_to @property, notice: 'Property was successfully created.' }\n format.json { render :show, status: :created, location: @property }\n else\n format.html { render :new }\n format.json { render json: @property.errors, status: :unprocessable_entity }\n end\n end\n end", "def image_params\n params.require(:image).permit(:name, :url, :property_id)\n end", "def post_property_photos(photos)\n post_photos_for_property_id(photos.first.propertyID, photos)\n end", "def create\n @apartment = current_user.apartments.new(apartment_params)\n\n respond_to do |format|\n if @apartment.save\n if params[:images]\n # The magic is here ;)\n params[:images].each { |image|\n @apartment.pictures.create(image: image)\n }\n end\n format.html { redirect_to @apartment, notice: 'Propiedad creada correctamente.' }\n format.json { render :show, status: :created, location: @apartment }\n else\n format.html { render :new }\n format.json { render json: @apartment.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @image = Image.new(image_params)\n respond_to do |format|\n if @image.save\n format.json { render :json => { url: @image.image.url} }\n else\n \n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @property_image = PropertyImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @property_image }\n end\n end", "def create\n @property_picture = PropertyPicture.new(params[:property_picture])\n\n respond_to do |format|\n if @property_picture.save\n format.html { redirect_to @property_picture, notice: 'Property picture was successfully created.' }\n format.json { render json: @property_picture, status: :created, location: @property_picture }\n else\n format.html { render action: \"new\" }\n format.json { render json: @property_picture.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @smartphone = Smartphone.new(smartphone_params)\n\n respond_to do |format|\n if @smartphone.save\n if params[:images]\n params[:images].each do |val|\n @smartphone.pictures.create(image: val)\n end\n end\n\n format.html { redirect_to @smartphone, notice: 'Smartphone was successfully created.' }\n format.json { render :show, status: :created, location: @smartphone }\n else\n format.html { render :new }\n format.json { render json: @smartphone.errors, status: :unprocessable_entity }\n end\n end\n end", "def property_params\n params.require(:property).permit(:title, :description_short, :description_long, :city, :state, :country, :latitude, :longitude, :postcode, :image, :remote_image_url, :image_cache, :remove_image, :image_url, :property_pictures_attributes => [:id, :avatar_url, :name, :_destroy, :avatar_url_cache])\n end", "def set_property_image\n @property_image = PropertyImage.find(params[:id])\n end", "def upload_image\n @image = Image.create(image_path: params[:upload][:image])\n p @image\n render json: @image\n end", "def img_properties\n par = params[:image]\n if !par\n # json webservice call, not coming from rails form. Change all keys from camelcased to underscores\n par = params\n %w(pageUrl originalUrl calibrateLength calibrateUnit calibrateCoords).each do |p|\n par[p.underscore] = par[p]\n par.delete(p)\n end\n end\n par\n end", "def create\n @post = Post.new(post_params)\n\n\n if @post.save && params[:images]\n params[:images].each { |image|\n @post.images.create(image: image)\n }\n end\n\n redirect_to @post\n end", "def create\n @rent = Rent.new(rent_params)\n @rentpar = rent_params\n respond_to do |format|\n if @rent.save\n\n if params[:image]\n puts params[:image]\n params[:image].each { |image|\n @rent.rent_images.create(rent_id: @rent.id, image:image)\n }\n \n end\n\n format.html { redirect_to @rent, notice: 'Rent was successfully created.' }\n format.json { render :show, status: :created, location: @rent }\n else\n format.html { render :new }\n format.json { render json: @rent.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n\n post = Post.new(post_params)\n post.location = Location.create(name: params[:post][:location][:name], lat: params[:post][:location][:lat], long: params[:post][:location][:long])\n if post.save\n params[:post][:images].each do |i|\n img = Image.find(i[:id])\n img.update(active: 1, post_id: post.id)\n end\n\n render json: {\n status: \"success\",\n data: post.as_json(\n include: [\n {\n user:\n {\n only: [:id, :name, :avatar]\n }\n },\n :location,\n {\n images: {\n only: [:id, :src]\n }\n },\n :rates\n ])}, status: :ok\n\n else\n render json: post.errors, status: 404\n end\n end", "def create\n @image = Image.new(image_params)\n\n respond_to do |format|\n if @image.save\n format.json { render json: @image, status: :created, location: [:admin, @image] }\n else\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @apartment.update(apartment_params)\n if params[:images]\n # The magic is here ;)\n params[:images].each { |image|\n if (image!=nil)\n @apartment.pictures.create(image: image)\n \n end\n }\n end\n format.html { redirect_to @apartment, notice: 'La propiedad se actualizo correctamente.' }\n format.json { render :show, status: :ok, location: @apartment }\n else\n format.html { render :edit }\n format.json { render json: @apartment.errors, status: :unprocessable_entity }\n end\n end\n end", "def create \n @image ||= Image.new(image_params)\n if @image.save\n render json: {\"url\" => @image.image_url(:resized), \"success\" => true}\n else\n render json: @image.errors, status: :unprocessable_entity\n end\n end", "def create\n @property = Property.new(property_params)\n\n respond_to do |format|\n if @property.save\n # Get features parameter\n features = params[:features]\n\n # Verify whether features array comes in the parameters list\n if features.present?\n # Intantiate & create features by property\n features_property_create = FeaturesPropertyCreate.new(@property)\n features_property_create.create(features, params[:quantities])\n end\n\n # Get photos parameter\n photos = params[:photos]\n\n # Verify whether photos array comes in the parameters list\n if photos.present?\n # Intantiate & create photos by property\n photo_create = PhotoCreate.new(@property)\n photo_create.create(photos)\n end\n\n format.html { redirect_to @property, notice: 'Property was successfully created.' }\n format.json { render :show, status: :created, location: @property }\n else\n format.html { render :new }\n format.json { render json: @property.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_gallery_images_property(property, subscriber = nil)\n buttons = []\n buttons.push(create_url_button_hash(\"👀 Voir sur #{property.source}\", property.link))\n if property.contact_number != nil && property.contact_number != \"N/C\"\n property.provider == \"Particulier\" ? caption = \"☎️ Appeler le particulier\" : caption = \"Appeler l'agence\"\n buttons.push(create_call_button_hash(caption, property.contact_number))\n end\n webhook_fav = ENV[\"BASE_URL\"] + \"api/v1/saved_properties/\"\n body_fav = { research_id: subscriber.research.id, property_id: property.id }\n buttons.push(create_dynamic_button_hash(\"❤️ Ajouter favoris\", webhook_fav, \"POST\", body_fav))\n\n elements = []\n photo_counter = 1\n property.images.count <= 10 ? total_pic = property.images.count : total_pic = 10\n property.images.each do |img|\n elements.push(create_message_element_hash(\"📷 Photo #{photo_counter}/#{total_pic}\", property.manychat_show_description_with_title, img, buttons))\n elements.length === 10 ? break : nil\n photo_counter += 1\n end\n puts elements\n message_array = []\n message_array.push(create_message_card_hash(\"cards\", elements, \"square\"))\n\n return message_array\n end", "def set_image\n @image = Image.where(id: params[:id], property_id: params[:property_id]).first\n end", "def create\n image = Image.create(image_params)\n\n if image.new_record?\n render json: { errors: image.errors.messages }, status: 422\n else\n render json: image, status: 201\n end\n end", "def create\n @listing = current_admin.listings.build(listing_params)\n\n respond_to do |format|\n if @listing.save\n\n if params[:pictures]\n #===== The magic is here ;)\n params[:pictures].each { |image|\n @listing.pictures.create(file: image)\n }\n end\n\n format.html { redirect_to admin_listings_url, notice: 'Listing was successfully created.' }\n format.json { render :show, status: :created, location: @listing }\n else\n format.html { render :new }\n format.json { render json: @listing.errors, status: :unprocessable_entity }\n end\n end\n end", "def add_image\n obtain_product_image_params\n pi = ProductImage.new(picture: @image_params)\n @product.product_images << pi\n render json: @product.simple_info, status: :ok\n rescue => e\n render json: { error: e }, status: :bad_request\n end", "def property_attachment_params\n params.require(:property_attachment).permit(:property_id, :avatar)\n end", "def property_params\n params.require(:property).permit(:type_of_offer,:price, :brunk_type, :brunk, :parking_lot, :property_type, :runner_id, :country_id, :department, :city, :address, :latitude, :length, :prince, :stratum, :area, :blueprints, :number_bedrooms, :number_bathrooms, :levels, :state, :state_favorite, :url_video, :images, property_images_attributes: [:id, :property_id, :runner_id, :file, :_destroy])\n end", "def create\n @image = Spree::Image.new(params[:image])\n\n respond_to do |format|\n if @upload.save\n format.html {\n render :json => [@image.to_jq_upload].to_json,\n :content_type => 'text/html',\n :layout => false\n }\n format.json { render json: {files: [@image.to_jq_upload]}, status: :created, location: @image }\n else\n format.html { render action: \"new\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @picture = Picture.create!(picture_params)\n render json: @picture, status: :created\n end", "def create\n @imageable = find_imageable\n @image = @imageable.images.build(image_params)\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: t('.notice', image: @image) }\n format.json\n else\n format.html { render :new }\n format.json {\n render json: @image.errors[:attachment], status: :unprocessable_entity\n }\n end\n end\n end", "def to_jq_upload\n {\n \"name\" => read_attribute(:image),\n \"size\" => image.size,\n \"url\" => image.url,\n \"thumbnail_url\" => image.thumb.url,\n \"show_url\" => property_photo_path(property_id: property.id, id: id),\n }\n end", "def img_list_params\n params.require(:img_list).permit(:imgUrl, :property_id)\n end", "def create\n @image = Image.new(image_params)\n @image.name = params[:image][:picture].original_filename\n @image.picture = params[:image][:picture].read\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to images_path, success: 'Image was successfully created.' }\n format.json { render :show, status: :created, location: images_path }\n else\n format.html { render :new }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def property_picture_params\n params.require(:property_picture).permit(:name, :avatar_url, :property_id, :_destroy, :avatar_url_cache)\n end", "def images_attributes=(image_attributes)\n image_attributes.each do |attributes| \n post_images.build(attributes) \n end\n end", "def create\n @image = Image.new(image_params)\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to aquarium_images_url, notice: 'Image was successfully created.' }\n format.json { render json: @image, status: :created }\n else\n format.html { render action: 'new' }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @property = Property.find(params[:id])\n\n if params[:type_is] == \"photo_gallery\"\n #Cover Image Save\n if params[\"property\"][\"prop_cover_img\"]\n @property_cover_img = @property.property_cover_image.present? ? @property.property_cover_image : @property.build_property_cover_image\n @property_cover_img.upload_image(params[\"property\"][\"prop_cover_img\"])\n end\n\n #Other images\n if params[\"property\"][\"prop_imgs\"]\n params[\"property\"][\"prop_imgs\"].each do |img|\n @property.property_images.new.upload_image(img) if img\n end\n end\n\n respond_to do |format|\n # flash[:success] = \"New Images Successfully Uploaded.</br><a href='#{properties_path(active_id: @property.id)}'>Show in List</a>\"\n format.html { redirect_to edit_property_path(@property.key, type_is: params[:type_is]) }\n format.json { render action: 'show', status: :created, location: @property }\n end\n else\n prior_property_name = @property.title\n @property.assign_attributes(property_params)\n if params[:use_current_rent] == false\n @property.lease_base_rent = @property.current_rent\n end\n\n if !(@property.owner_person_is.nil? || @property.owner_person_is==0)\n if @property.owner_person_is == 1 && [email protected]_entity_id_indv.nil?\n @property.owner_entity_id = @property.owner_entity_id_indv\n end\n else\n @property.owner_entity_id = @property.owner_entity_id_indv = 0\n end\n\n if @property.rent_table_version.nil?\n @property.rent_table_version = 1\n else\n @property.rent_table_version = @property.rent_table_version + 1\n end\n\n respond_to do |format|\n if @property.save\n\n # finally remove the old upload\n # Cloudinary::Uploader.destroy(public_id) unless public_id.blank?\n \n if @property.can_create_rent_table?\n rent_table_version = @property.rent_table_version\n\n base_rent = @property.lease_base_rent\n duration = @property.lease_duration_in_years\n extension_percentage = @property.lease_rent_increase_percentage || 0\n base_percentage = @property.base_rent_increase_percentage || 0\n slab = @property.lease_rent_slab_in_years || 1\n\n rent = base_rent\n @property.rent_tables.create(version: rent_table_version, rent: base_rent, description: 'Base Annual Rent')\n @property.rent_tables.create(version: rent_table_version, rent: base_rent / 12.00, description: 'Base Monthly Rent (approx.)')\n @property.rent_tables.create(version: rent_table_version, rent: base_rent/ 365.00, description: 'Base Daily Rent (approx.)')\n prev_rent = rent\n rent_start = @property.rent_commencement_date || Time.now\n rent_start = Time.now if @property.rent_commencement_date_details == 'Date not certain'\n start_year = rent_start.year\n end_year = 0\n\n count = 0\n\n if @property.lease_is_pro_rated && @property.rent_commencement_date_details != 'Date not certain'\n d = rent_start\n\n # 0 fill in the pro rated fields for the property and save\n @property.pro_rated_month = d.month\n @property.pro_rated_month_name = Date::MONTHNAMES[d.month]\n @property.pro_rated_day = d.day\n @property.pro_rated_year = d.year\n @property.pro_rated_day_date = d.to_date\n @property.pro_rated_day_rent = (base_rent / 365.00)\n @property.pro_rated_month_rent = (base_rent / 12.00) - ((d.day - 1) * @property.pro_rated_day_rent)\n @property.save\n\n @property.rent_tables.create(version: rent_table_version, rent: @property.pro_rated_month_rent,\n description: \"Pro-rated Rent for :#{Date::MONTHNAMES[d.month]} #{d.year}\")\n\n # 1 - calculate the pro-rated rent for year 1\n rent_first_year = rent * (((Date.parse(\"31/12/#{d.year}\") - d.to_date).to_i) * 1.00/(d.year % 4 == 0 ? 366.00 : 365.00))\n # 2 - add the rent table entry\n @property.rent_tables.create(version: rent_table_version, start_year: d.year, end_year: d.year, rent: rent_first_year)\n # 3 - decrease the duration by 1 and update rent\n duration = duration - 1\n start_year = d.year + 1\n count = slab - 1\n slab = slab - 1\n end\n\n\n while start_year <= rent_start.year + duration - 1\n if count == 0\n slab = @property.lease_rent_slab_in_years || 1\n else\n count = 0\n end\n end_year = start_year + slab - 1\n\n if end_year >= rent_start.year + duration - 1\n end_year = rent_start.year + duration - 1\n end\n\n prev_rent = rent\n @property.rent_tables.create(version: rent_table_version, start_year: start_year, end_year: end_year, rent: rent)\n\n start_year = end_year + 1\n if @property.rent_increase_in_base_term_status\n rent = rent + rent * base_percentage / 100\n end\n end\n\n if @property.number_of_option_period && @property.length_of_option_period &&\n @property.number_of_option_period > 0 && @property.length_of_option_period > 0\n duration = @property.number_of_option_period * @property.length_of_option_period\n start_year = end_year + 1\n rent_start = Date.parse(\"01/01/#{start_year}\")\n rent = prev_rent\n slab = @property.length_of_option_period\n count = 1\n while start_year <= rent_start.year + duration - 1\n end_year = start_year + slab - 1\n\n if end_year >= rent_start.year + duration - 1\n end_year = rent_start.year + duration - 1\n end\n\n prev_rent = rent\n @property.rent_tables.create(version: rent_table_version,\n start_year: start_year, end_year: end_year, rent: rent,\n is_option: true, option_slab: count)\n\n start_year = end_year + 1\n rent = rent + rent * extension_percentage / 100\n count = count + 1\n end\n end\n\n end\n\n flash[:success] = \"Congratulations, you have just made a change in the record for #{prior_property_name}\"\n\n if params[:lease_sub].blank?\n format.html { redirect_to edit_property_path(@property.key, type_is: params[:type_is]) }\n else\n format.html { redirect_to edit_property_path(@property.key, type_is: params[:type_is], lease_sub: params[:lease_sub]) }\n end\n format.js { render json: @property.to_json, status: :ok }\n format.json { render action: 'show', status: :created, location: @property }\n else\n format.html { render action: 'edit' }\n format.json { render json: @property.errors, status: :unprocessable_entity }\n end\n end\n end\n end", "def add_or_update_property_image\n if params[:attachment]\n image_type = (params[:prop_form_close] == \"true\" || params[:property_form_submit] == 'true') ? 'property_picture' : nil\n if (params[:prop_form_close] == \"true\" || params[:property_form_submit] == 'true')\n #image = PortfolioImage.find_by_attachable_id_and_attachable_type_and_is_property_picture(@property.id,\"RealEstateProperty\",true)\n else\n image = PortfolioImage.find_by_attachable_id_and_attachable_type(@property.id,\"RealEstateProperty\")\n end\n image != nil ? image.update_attributes(params[:attachment]) : PortfolioImage.create_portfolio_image(params[:attachment][:uploaded_data],@property.id,image_type)\n end\n end", "def create\n @results = []\n\n unless params[:files].nil?\n params[:files].each do |data|\n img = Image.new\n img.filename = data.original_filename\n img.data = data.read\n img.upload_id = params[:uuid]\n img.visitation_form_id = params[:formId]\n img.image_type = params[:imageType]\n img.content_type = data.content_type\n #img.temp_index = params[:birdIndex]\n img.bird_id = params[:birdId]\n\n if !img.save\n render :json => { :errors => img.errors.full_messages }, :status => 400 and return\n else\n @results << { name: img.filename, imageType: img.image_type, id: img.id }\n end\n end\n end\n\n render json: { files: @results }\n end", "def send_property_info_post_interaction(subscriber, property)\n first_call = handle_manychat_response(send_content(subscriber, create_gallery_images_property(property, subscriber)))\n end", "def create\n @objective_image = ObjectiveImage.new(objective_image_params)\n\n respond_to do |format|\n if @objective_image.save\n format.html { redirect_to @objective_image, notice: 'Objective image was successfully created.' }\n format.json { render :show, status: :created, location: @objective_image }\n else\n format.html { render :new }\n format.json { render json: @objective_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n par = img_properties\n logger.info(\"parameters: #{par.inspect}\")\n valid = true\n\n begin\n url = par[:original_url] || par[:url]\n\n @image = Image.new do |i|\n #specify all properties to copy explicitly\n i.user = @user\n # set optional parameters\n i.name = par[:name] if !par[:name].blank?\n i.ref = par[:ref] if !par[:ref].blank?\n i.original_url = url if url\n i.page_url = par[:page_url] if !par[:page_url].blank?\n i.source = par[:source] if !par[:source].blank?\n i.image_type = Image::TYPE_LS\n end\n\n # this may fail\n @image.remote_upload_url = url\n\n # set calibration data if specified\n if !par[:calibrate_length].blank?\n @image.calibrate_length = par[:calibrate_length].to_f\n @image.calibrate_unit = par[:calibrate_unit].to_i if !par[:calibrate_unit].blank?\n @image.calibrate_coords = par[:calibrate_coords] if !par[:calibrate_coords].blank?\n @image.ppi = calculate_ppi(@image)\n end\n\n rescue CarrierWave::DownloadError\n @image.errors.add(:remote_upload_url, \"^This url doesn't appear to be valid\")\n valid = false\n rescue CarrierWave::IntegrityError\n @image.errors.add(:remote_upload_url, \"^This url does not appear to point to a valid image\")\n valid = false\n rescue StandardError\n @image.errors.add(:remote_upload_url, \"There does not appear to be an image at this url\")\n valid = false\n end\n\n if valid && @image.save\n # update product if set\n @image.user_product = par[:product] if par[:product]\n\n render :json => @image.js_serialize, :callback => params[:callback]\n else\n render :json => { :error => 403, :messages => prepare_errors(@image), :callback => params[:callback]}, :status => 200\n end\n end", "def create\n @image_collection = ImageCollection.new(image_collection_params)\n\n respond_to do |format|\n if @image_collection.save\n image_collection_params[:attached_images].reject(&:empty?).each do |id|\n @image_collection.images << Image.find(id)\n end\n\n @image_collection.save\n\n format.html { redirect_to @image_collection, notice: 'Image collection was successfully created.' }\n format.json { render :show, status: :created, location: @image_collection }\n else\n format.html { render :new }\n format.json { render json: @image_collection.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @item = Item.new(item_params)\n\n respond_to do |format|\n if @item.save\n @item.images.delete_all\n unless params.require(:item)[:images_id].nil?\n params.require(:item)[:images_id].each do |id|\n image = Image.find_by_id(id)\n (@item.images << image) unless image.nil?\n end\n end\n format.html { redirect_to @item, notice: 'Item was successfully created.' }\n format.json { render action: 'show', status: :created, location: @item }\n else\n format.html { render action: 'new' }\n format.json { render json: @item.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @estate_agent_image = EstateAgentsImage.find(params[:id])\n\n respond_to do |format|\n if @estate_agent_image.update_attributes(params[:property_image])\n format.html { redirect_to @estate_agent_image, notice: 'Property image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @estate_agent_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def property_params\n params.require(:property).permit(:ad_type, :property_type, :country, :sponsored, :price, :bedroom, :bathroom, :area, :description, :city, { photos: [] })\n end", "def update\n @property_image = PropertyImage.find(params[:id])\n\n respond_to do |format|\n if @property_image.update_attributes(params[:property_image])\n format.html { redirect_to property_images_path, notice: 'Property image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @property_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @photo = Photo.new(photo_params)\n\n if @photo.save\n render json: @photo, status: :created\n else\n render json: @photo.errors, status: :unprocessable_entity\n end\n end", "def create\n @image = Image.create(image_params)\n @image.save\n\n end", "def create\n params[:variant_image][:image2] = params[:variant_image][:image] # For image replication across two s3 accounts\n @variant_image = VariantImage.new(params[:variant_image])\n\t\t@variant = @variant_image.variant\n @product = @variant.product \n\n respond_to do |format|\n if @variant_image.save\n format.html { redirect_to @variant, notice: 'Image added successfully.' }\n format.js { redirect_to @variant_image, notice: 'Image added successfully.' }\n format.json { render json: @variant_image, status: :created, location: @variant_image }\n else\n format.html { render action: \"new\" }\n format.js { render action: \"new\" }\n format.json { render json: @variant_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @picture = Picture.new(picture_params)\n\n if @picture.save\n @picture.update(foodscape_id: params[:foodscape_id])\n render json: @picture, status: :created\n else\n render json: @picture.errors, status: :unprocessable_entity\n end\n end", "def create\n @image = Image.new(image_params)\n\n # fetch tags from google vision API\n helpers.fetch_tags(image_params)\n\n @image.image_file.attach(image_params[:image_file])\n\n respond_to do |format|\n if @image.save()\n format.html { redirect_to @image, notice: \"Image was successfully created.\" }\n format.json { render :show, status: :created, location: @image }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @family_image = FamilyImage.new(family_image_params)\n\n if @family_image.save\n render json: @family_image, status: :created, location: @family_image\n else\n render json: @family_image.errors, status: :unprocessable_entity\n end\n end", "def create\n @image = Image.new(image_params)\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: \"Image was successfully created.\" }\n format.json { render :show, status: :created, location: @image }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @image = Image.new(params[:image])\n type = @image.url_type\n #response.headers[\"Content-type\"] = \"text/plain\"\n if @image.save\n render :text => [ @image.to_jq_upload(type, \"image\") ].to_json.to_s\n else \n render :text => [ @image.to_jq_upload(type, \"image\").merge({ :error => \"custom_failure\" }) ].to_json.to_s\n end\n end", "def create\n @note = Note.new(note_params)\n @note.user = current_user\n\n respond_to do |format|\n if @note.save\n if params[:images]\n @note.image = params[:images].first\n @note.save!\n note_image_params.each do |ima|\n @note_image = NoteImage.new()\n @note_image.image = ima\n @note_image.note_id = @note.id\n @note_image.save!\n end\n # raise \"#{@note.to_json} #{@note.note_images.to_json}\"\n end\n format.html { redirect_to @note, notice: 'Note was successfully created.' }\n format.json { render :show, status: :created, location: @note }\n else\n format.html { render :new }\n format.json { render json: @note.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @image = Image.new(params[:image])\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render json: @image, status: :created, location: @image }\n else\n format.html { render action: \"new\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @image = Image.new(params[:image])\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render json: @image, status: :created, location: @image }\n else\n format.html { render action: \"new\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @image = Image.new(params[:image])\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render json: @image, status: :created, location: @image }\n else\n format.html { render action: \"new\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @image = Image.new(params[:image])\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render json: @image, status: :created, location: @image }\n else\n format.html { render action: \"new\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def get_images_data\t\t\t\t\t\t\n\t\t{ payload: { 'makes' => makes_dropdown, 'images' => images_array('none').compact}, success: true}\n\tend", "def create\n @experience = Experience.new(experience_params)\n @experience.trip_id = @trip.id\n @experience.nomad_id = current_nomad.id\n\nif @experience.save\n\n if params[:images] \n params[:images].each do |image|\n @experience.photos.create(image: image)\n end\n end\n\n @photos = @experience.photos\n redirect_to edit_trip_experience_path(@trip, @experience), notice: \"Saved...\"\n else\n render :new\n end\n end", "def create\n @admin_image = Image.new(admin_image_params)\n \n respond_to do |format|\n if @admin_image.save\n format.html { redirect_to admin_images_url, notice: 'Image was successfully created.' }\n format.json { render :show, status: :created, location: @admin_image }\n else\n format.html { render :new }\n format.json { render json: @admin_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @animal_image = AnimalImage.new(animal_image_params)\n\n respond_to do |format|\n if @animal_image.save\n format.html { redirect_to @animal_image, notice: \"Animal image was successfully created.\" }\n format.json { render :show, status: :created, location: @animal_image }\n else\n format.html { render :new }\n format.json { render json: @animal_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def show\n @property_image = PropertyImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @property_image }\n end\n end", "def create\n @gallery = find_gallery\n @gallery.save! unless @gallery.persisted?\n @image = Image.new params[:image]\n @image.image_gallery_id = @gallery.id\n @images = @gallery.images\n \n respond_to do |format|\n if @image.save \n format.js\n format.json { render json: @image, status: :created, location: @image }\n else\n format.js\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @image = Image.new(image_params)\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render :show, status: :created, location: @image }\n else\n format.html { render :new }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @image = Image.new(image_params)\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render :show, status: :created, location: @image }\n else\n format.html { render :new }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @image = Image.new(image_params)\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render :show, status: :created, location: @image }\n else\n format.html { render :new }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @image = Image.new(image_params)\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render :show, status: :created, location: @image }\n else\n format.html { render :new }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @image = Image.new(image_params)\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render :show, status: :created, location: @image }\n else\n format.html { render :new }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @apartment.update(apartment_params)\n if params[:photos]\n params[:photos]['image'].each do |a|\n @photo = @apartment.photos.create!(:image => a, :apartment_id => @apartment.id)\n end\n end\n format.html { redirect_to @apartment, notice: 'Apartment was successfully updated.' }\n format.json { render :show, status: :ok, location: @apartment }\n else\n format.html { render :edit }\n format.json { render json: @apartment.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n #@experience_image = ExperienceImage.new(experience_image_params)\n logger.debug \"The params coming in are:#{params.inspect}\"\n @experience_image = ExperienceImage.new(experience_image_params)\n logger.debug \"The file created is: #{@experience_image.inspect}\"\n\n\n respond_to do |format|\n if @experience_image.save\n format.json{ render :json => @experience_image }\n else\n format.json { render json: @experience_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @image_attrib = ImageAttrib.new(image_attrib_params)\n\n if @image_attrib.save\n render :show, status: :created, location: @image_attrib\n else\n render json: @image_attrib.errors, status: :unprocessable_entity\n end\n end", "def new\n @post = Post.new\n 2.times { @post.post_images.build }\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @post }\n end\n end", "def create\n @image = Image.new(image_params)\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render action: 'show', status: :created, location: @image }\n else\n format.html { render action: 'new' }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @memberimage = Memberimage.new(params[:memberimage])\n\n respond_to do |format|\n if @memberimage.save\n format.html { redirect_to ideas_path, notice: 'Memberimage was successfully created.' }\n format.json { render json: @memberimage, status: :created, location: @memberimage }\n else\n format.html { render action: \"new\" }\n format.json { render json: @memberimage.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @property_picture = PropertyPicture.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @property_picture }\n end\n end", "def create\n @postimage = Postimage.new(postimage_params)\n\n respond_to do |format|\n if @postimage.save\n format.html { redirect_to @postimage, notice: 'Postimage was successfully created.' }\n format.json { render :show, status: :created, location: @postimage }\n \n else\n format.html { render :new }\n format.json { render json: @postimage.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @image = Image.new(image_params)\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render action: 'show', status: :created, location: @image }\n else\n format.html { render action: 'new' }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @image = Image.new(image_params)\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render action: 'show', status: :created, location: @image }\n else\n format.html { render action: 'new' }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @post_img = PostImg.new(post_img_params)\n\n respond_to do |format|\n if @post_img.save\n format.html { redirect_to @post_img, notice: 'Post img was successfully created.' }\n format.json { render :show, status: :created, location: @post_img }\n else\n format.html { render :new }\n format.json { render json: @post_img.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @post = Post.new(post_params)\n if @post.save\n # write out the address in the console\n puts @post.image\n render json: { image_url: @post.image }, status: 200\n else\n logger.info @post.errors.full_messages\n render json: { error: @post.errors.full_messages }, status: :not_acceptable\n end\n end", "def create\n @image = Image.new(image_params)\n respond_to do |format|\n if @image.save\n flash[:notice] = 'Image was successfully created.'\n format.html { redirect_to(@image) }\n format.json { render :json => @image, :status => :created, :location => @image }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @image.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @spot = Spot.new(spot_params)\n\n respond_to do |format|\n if @spot.save\n if params[:photos]\n params[:photos]['image'].each do |a|\n @photo = @spot.photos.create!(:image => a, :imageable_id => @spot.id)\n end\n end\n format.html { redirect_to [@country,@spot], notice: \"Spot was successfully created.\" }\n else\n format.html { render :new }\n end\n end\n end", "def create\n @spot = Spot.new(spot_params)\n\n respond_to do |format|\n if @spot.save\n if params[:photos]\n params[:photos]['image'].each do |a|\n @photo = @spot.photos.create!(:image => a, :imageable_id => @spot.id)\n end\n end\n format.html { redirect_to [@country,@spot], notice: \"Spot was successfully created.\" }\n else\n format.html { render :new }\n end\n end\n end", "def create\n @image = Image.new(image_params)\n respond_to do |format|\n if @image.save\n flash[:success] = 'Image was successfully created.'\n format.html { redirect_to new_admin_image_preview_url(image_id: @image.id) }\n format.json { render :show, status: :created, location: @image }\n else\n format.html { render :new }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def image_params\n params.require(:inspection).permit(:image => [])\n end", "def create\n @photos = Photo.new(photos_params)\n if @photos.save\n render json: { id: @photos.id, url: @photos.gallery.url,\n size: @photos.gallery_file_size }\n else\n render json: { id: nil }\n end\n end", "def create\n @product_image = Product_image.new(product_image)\n\n respond_to do |format|\n if @product_image.save\n format.html { redirect_to @product_image, notice: 'Product image was successfully created.' }\n format.json { render :show, status: :created, location: @product_image }\n else\n format.html { render :new }\n format.json { render json: @product_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @sub_collection = SubCollection.new(sub_collection_params)\n\n respond_to do |format|\n if @sub_collection.save\n if not sub_collection_images_params[:sub_collection_images_attributes].nil?\n sub_collection_images_params[:sub_collection_images_attributes].each do |sci|\n @sub_collection.sub_collection_images.create(sub_collection_image: sci[:sub_collection_image])\n end\n end\n format.html { redirect_to @sub_collection, notice: 'Sub collection was successfully created.' }\n format.json { render :show, status: :created, location: @sub_collection }\n else\n @sub_collection.sub_collection_images.build\n format.html { render :new }\n format.json { render json: @sub_collection.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @photo = current_user.photos.new\n @greeting = [\"got something beautiful you'd like to share with the world?\", \"feeling good today? Spread the happiness one photo at a time!\", \"show the world some lovin'.\" ]\n\n\n # @photo.photo_images = photo_params[:photo_images].map do |img|\n # byebug\n # PhotoImage.new(image: img)\n # end\n # @photo = Photo.new(photo_params)\n # @photo.user_id = current_user.id\n respond_to do |format|\n if @photo.save\n # params[:photo]['images'].each do |img|\n @photo_image = PhotoImage.create(image: params[:photo][:photo_images], photo_id: @photo.id)\n # @photo_image = @photo.photo_images.create()\n \n format.html { redirect_to edit_photo_path(@photo), notice: 'Photo was successfully created.' }\n format.json { render :show, status: :created, location: @photo }\n else\n format.html { render :new }\n format.json { render json: @photo.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @gallery = find_gallery\n @gallery.save! unless @gallery.persisted?\n @image = Image.new image_params\n @image.image_gallery_id = @gallery.id\n\n respond_to do |format|\n if @image.save\n format.js\n format.json { render json: @image, status: :created, location: @image }\n else\n format.js\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n\n @product = Product.new(product_params)\n @product.project_id=params[:id]\n respond_to do |format|\n if !params[:images]\n @product.errors.add(:images, ' can not be empty')\n format.html { render :new }\n format.json { render :show, status: :created, location: @product }\n elsif params[:images].length > 4\n @product.errors.add(:images, 'You Can not add more than 4 images')\n format.html { render :new }\n format.json { render :show, status: :created, location: @product } \n else\n\n if @product.save\n \n if params[:images]&&params[:images].length < 4\n\n \n #===== The magic is here ;)\n params[:images].each { |image|\n @product.product_pictures.create(image: image)\n\n }\n end\n format.html { redirect_to @product , notice: 'product was successfully created.' }\n format.json { render :show, status: :created, location: @product }\n else\n format.html { render :new }\n format.json { render json: @product.errors, status: :unprocessable_entity }\n end\n end\n end\nend", "def create\n create_params = product_image_params || {product_uuid: @product.uuid}\n @product_image = ProductImage.new(product_image_params)\n if @product_image.save\n render json: @product_image, status: 201\n else\n render_error 400, @product_image.errors.full_messages\n end\n end", "def create\n @image = Image.new(image_params) do |t|\n t.data = Base64.encode64(params[:image][:data].read)\n t.filename = params[:image][:data].original_filename\n t.mime_type = params[:image][:data].content_type\n end\n # @image.name = image_params['name']\n # @image.price = image_params['price']\n # @image.city = image_params['city']\n # @image.description1 = image_params['description1']\n # @image.description2 = image_params['description2'] \n @image.active = true\n @image.admin_ok = false\n @image.user_id = current_user.id\n @image.e_mail = current_user.email\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render :show, status: :created, location: @image }\n else\n format.html { render :new }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n\n @product = Product.new(product_params)\n @product.images.attach(params[:product][:images]) if params[:product][:images]\n\n respond_to do |format|\n if @product.save\n format.html { redirect_to seller_product_path(current_seller.id, @product.id), notice: 'Product was successfully created.' }\n format.json { render :show, status: :created, location: @product }\n else\n format.html { render :new }\n format.json { render json: @product.errors, status: :unprocessable_entity }\n end\n end\n end" ]
[ "0.7552794", "0.7349602", "0.72869056", "0.7219757", "0.7153606", "0.69927925", "0.6989445", "0.6960517", "0.66880286", "0.6680542", "0.6634192", "0.6609078", "0.65908194", "0.6574689", "0.65677834", "0.6558739", "0.6531264", "0.64791864", "0.64762497", "0.6437422", "0.6414481", "0.64107007", "0.63740146", "0.6370683", "0.63674545", "0.63558835", "0.6345659", "0.6311461", "0.62680715", "0.62053865", "0.6198641", "0.6195357", "0.61857134", "0.6166846", "0.6164401", "0.6145506", "0.6139165", "0.6127757", "0.6119809", "0.6095304", "0.60934454", "0.6082616", "0.608175", "0.6072809", "0.60726243", "0.607075", "0.6065737", "0.6063661", "0.6056616", "0.60457754", "0.6044605", "0.6041967", "0.6038495", "0.603675", "0.60358214", "0.60202473", "0.6012061", "0.6011608", "0.6007789", "0.60060644", "0.60060644", "0.60060644", "0.60060644", "0.6004713", "0.6003961", "0.60034585", "0.59943455", "0.59906304", "0.5982895", "0.5979823", "0.5979823", "0.5979823", "0.5979823", "0.5979823", "0.59776694", "0.5971862", "0.59577787", "0.59576243", "0.59561", "0.5955065", "0.59509695", "0.59431756", "0.5938141", "0.5938141", "0.59369236", "0.59278375", "0.5927197", "0.5917949", "0.5917949", "0.5916064", "0.59135544", "0.5909941", "0.5907543", "0.58973336", "0.5891935", "0.58874375", "0.58862466", "0.5880683", "0.5880047", "0.5877637" ]
0.7515556
1
PUT /property_images/1 PUT /property_images/1.json
def update @property_image = PropertyImage.find(params[:id]) respond_to do |format| if @property_image.update_attributes(params[:property_image]) format.html { redirect_to property_images_path, notice: 'Property image was successfully updated.' } format.json { head :no_content } else format.html { render action: "edit" } format.json { render json: @property_image.errors, status: :unprocessable_entity } end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update\n @estate_agent_image = EstateAgentsImage.find(params[:id])\n\n respond_to do |format|\n if @estate_agent_image.update_attributes(params[:property_image])\n format.html { redirect_to @estate_agent_image, notice: 'Property image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @estate_agent_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def set_property_image\n @property_image = PropertyImage.find(params[:id])\n end", "def update\n @image = Image.find(params[:id])\n\n #we can allow updating an image name and description and unit, but not the image data. for that we need to create a new image\n update_params = image_params\n update_params.delete(\"image\")\n\n if @image.update(update_params)\n head :no_content\n else\n render json: @image.errors, status: :unprocessable_entity\n end\n end", "def set_image\n @image = Image.where(id: params[:id], property_id: params[:property_id]).first\n end", "def add_images\n\t\tif @current_user.present?\n\t\t\t@property = Property.find(params[:property_id])\n\t\t\tif @property.present?\n\t\t\t\t# if @property.images.present?\n\t\t\t\t# \[email protected]_all\n\t\t\t\t# end\n\t\t\t\tparams[:images].each { |image|\n\t i = @property.images.create(image: image)\n\t if i.save\n\t else\n\t \trender_json({\"status\" => \"Fail\", \"message\" => i.errors.full_messages.first}.to_json)\n\t \treturn\n\t end\n\t }\n\t @property.images.first.update_attributes(is_starred: true)\n\t render :file => 'api/v1/property/add_image'\n\t else\n\t \trender_json({\"status\" => \"Fail\", \"message\" => \"No property found.\"}.to_json)\n\t end\n\t\tend\n\tend", "def update\n @property = Property.find(params[:id])\n\n if params[:type_is] == \"photo_gallery\"\n #Cover Image Save\n if params[\"property\"][\"prop_cover_img\"]\n @property_cover_img = @property.property_cover_image.present? ? @property.property_cover_image : @property.build_property_cover_image\n @property_cover_img.upload_image(params[\"property\"][\"prop_cover_img\"])\n end\n\n #Other images\n if params[\"property\"][\"prop_imgs\"]\n params[\"property\"][\"prop_imgs\"].each do |img|\n @property.property_images.new.upload_image(img) if img\n end\n end\n\n respond_to do |format|\n # flash[:success] = \"New Images Successfully Uploaded.</br><a href='#{properties_path(active_id: @property.id)}'>Show in List</a>\"\n format.html { redirect_to edit_property_path(@property.key, type_is: params[:type_is]) }\n format.json { render action: 'show', status: :created, location: @property }\n end\n else\n prior_property_name = @property.title\n @property.assign_attributes(property_params)\n if params[:use_current_rent] == false\n @property.lease_base_rent = @property.current_rent\n end\n\n if !(@property.owner_person_is.nil? || @property.owner_person_is==0)\n if @property.owner_person_is == 1 && [email protected]_entity_id_indv.nil?\n @property.owner_entity_id = @property.owner_entity_id_indv\n end\n else\n @property.owner_entity_id = @property.owner_entity_id_indv = 0\n end\n\n if @property.rent_table_version.nil?\n @property.rent_table_version = 1\n else\n @property.rent_table_version = @property.rent_table_version + 1\n end\n\n respond_to do |format|\n if @property.save\n\n # finally remove the old upload\n # Cloudinary::Uploader.destroy(public_id) unless public_id.blank?\n \n if @property.can_create_rent_table?\n rent_table_version = @property.rent_table_version\n\n base_rent = @property.lease_base_rent\n duration = @property.lease_duration_in_years\n extension_percentage = @property.lease_rent_increase_percentage || 0\n base_percentage = @property.base_rent_increase_percentage || 0\n slab = @property.lease_rent_slab_in_years || 1\n\n rent = base_rent\n @property.rent_tables.create(version: rent_table_version, rent: base_rent, description: 'Base Annual Rent')\n @property.rent_tables.create(version: rent_table_version, rent: base_rent / 12.00, description: 'Base Monthly Rent (approx.)')\n @property.rent_tables.create(version: rent_table_version, rent: base_rent/ 365.00, description: 'Base Daily Rent (approx.)')\n prev_rent = rent\n rent_start = @property.rent_commencement_date || Time.now\n rent_start = Time.now if @property.rent_commencement_date_details == 'Date not certain'\n start_year = rent_start.year\n end_year = 0\n\n count = 0\n\n if @property.lease_is_pro_rated && @property.rent_commencement_date_details != 'Date not certain'\n d = rent_start\n\n # 0 fill in the pro rated fields for the property and save\n @property.pro_rated_month = d.month\n @property.pro_rated_month_name = Date::MONTHNAMES[d.month]\n @property.pro_rated_day = d.day\n @property.pro_rated_year = d.year\n @property.pro_rated_day_date = d.to_date\n @property.pro_rated_day_rent = (base_rent / 365.00)\n @property.pro_rated_month_rent = (base_rent / 12.00) - ((d.day - 1) * @property.pro_rated_day_rent)\n @property.save\n\n @property.rent_tables.create(version: rent_table_version, rent: @property.pro_rated_month_rent,\n description: \"Pro-rated Rent for :#{Date::MONTHNAMES[d.month]} #{d.year}\")\n\n # 1 - calculate the pro-rated rent for year 1\n rent_first_year = rent * (((Date.parse(\"31/12/#{d.year}\") - d.to_date).to_i) * 1.00/(d.year % 4 == 0 ? 366.00 : 365.00))\n # 2 - add the rent table entry\n @property.rent_tables.create(version: rent_table_version, start_year: d.year, end_year: d.year, rent: rent_first_year)\n # 3 - decrease the duration by 1 and update rent\n duration = duration - 1\n start_year = d.year + 1\n count = slab - 1\n slab = slab - 1\n end\n\n\n while start_year <= rent_start.year + duration - 1\n if count == 0\n slab = @property.lease_rent_slab_in_years || 1\n else\n count = 0\n end\n end_year = start_year + slab - 1\n\n if end_year >= rent_start.year + duration - 1\n end_year = rent_start.year + duration - 1\n end\n\n prev_rent = rent\n @property.rent_tables.create(version: rent_table_version, start_year: start_year, end_year: end_year, rent: rent)\n\n start_year = end_year + 1\n if @property.rent_increase_in_base_term_status\n rent = rent + rent * base_percentage / 100\n end\n end\n\n if @property.number_of_option_period && @property.length_of_option_period &&\n @property.number_of_option_period > 0 && @property.length_of_option_period > 0\n duration = @property.number_of_option_period * @property.length_of_option_period\n start_year = end_year + 1\n rent_start = Date.parse(\"01/01/#{start_year}\")\n rent = prev_rent\n slab = @property.length_of_option_period\n count = 1\n while start_year <= rent_start.year + duration - 1\n end_year = start_year + slab - 1\n\n if end_year >= rent_start.year + duration - 1\n end_year = rent_start.year + duration - 1\n end\n\n prev_rent = rent\n @property.rent_tables.create(version: rent_table_version,\n start_year: start_year, end_year: end_year, rent: rent,\n is_option: true, option_slab: count)\n\n start_year = end_year + 1\n rent = rent + rent * extension_percentage / 100\n count = count + 1\n end\n end\n\n end\n\n flash[:success] = \"Congratulations, you have just made a change in the record for #{prior_property_name}\"\n\n if params[:lease_sub].blank?\n format.html { redirect_to edit_property_path(@property.key, type_is: params[:type_is]) }\n else\n format.html { redirect_to edit_property_path(@property.key, type_is: params[:type_is], lease_sub: params[:lease_sub]) }\n end\n format.js { render json: @property.to_json, status: :ok }\n format.json { render action: 'show', status: :created, location: @property }\n else\n format.html { render action: 'edit' }\n format.json { render json: @property.errors, status: :unprocessable_entity }\n end\n end\n end\n end", "def update\n params[:image].delete :created_at\n params[:image].delete :updated_at\n params[:image].delete :id\n @image = Image.find(params[:id])\n if @image.update_attributes(params[:image])\n render json: @image\n else\n render json: @image.errors, status: :unprocessable_entity\n end\n end", "def update\n respond_to do |format|\n if @apartment.update(apartment_params)\n if params[:images]\n # The magic is here ;)\n params[:images].each { |image|\n if (image!=nil)\n @apartment.pictures.create(image: image)\n \n end\n }\n end\n format.html { redirect_to @apartment, notice: 'La propiedad se actualizo correctamente.' }\n format.json { render :show, status: :ok, location: @apartment }\n else\n format.html { render :edit }\n format.json { render json: @apartment.errors, status: :unprocessable_entity }\n end\n end\n end", "def property_image_params\n params.require(:property_image).permit(:image_url, :property_id)\n end", "def image_params\n params.require(:image).permit(:name, :url, :property_id)\n end", "def update\n @property = Property.find(params[:id])\n unless params[:property][:photos_attributes].nil?\n params[:property][:photos_attributes].each_key { |key|\n if params[:property][:photos_attributes][key.to_sym][:remove_file] == \"1\"\n @photo = Photo.find(params[:property][:photos_attributes][key.to_sym][:id])\n @photo.remove_file!\n @photo.destroy\n params[:property][:photos_attributes].delete(key.to_sym)\n end\n }\n end\n\n respond_to do |format|\n if @property.update_attributes(params[:property])\n format.html { redirect_to [:admin, @property], notice: 'Inmueble actualizado.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @property.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @apartment.update(apartment_params)\n if params[:photos]\n params[:photos]['image'].each do |a|\n @photo = @apartment.photos.create!(:image => a, :apartment_id => @apartment.id)\n end\n end\n format.html { redirect_to @apartment, notice: 'Apartment was successfully updated.' }\n format.json { render :show, status: :ok, location: @apartment }\n else\n format.html { render :edit }\n format.json { render json: @apartment.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n image = PropertyImage.new()\n image.photo = params[:file]\n image.property_form = @property_form\n if image.save\n respond_to do |format|\n format.json do\n json = {id: image.id}\n render json: json\n end\n end\n end\n end", "def update\n @property_picture = PropertyPicture.find(params[:id])\n\n respond_to do |format|\n if @property_picture.update_attributes(params[:property_picture])\n format.html { redirect_to @property_picture, notice: 'Property picture was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @property_picture.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @image = Image.find(params[:id])\n checkaccountobject(\"images\",@image)\n respond_to do |format|\n if @image.update_attributes(params[:image])\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n par = img_properties\n logger.info(\"parameters: #{par.inspect}\")\n valid = true\n\n # find by user associated to app key, not by user from request parameter!\n @image = Image.editable_by(@user).find_by_guid(params[:id])\n\n return not_found if !@image\n\n # set these variables back to nil if they were in the request but blank\n if par[:ref]\n @image.ref = par[:ref].blank? ? nil : par[:ref]\n end\n if par[:name]\n @image.name = par[:name].blank? ? nil : par[:name]\n end\n if par[:page_url]\n @image.page_url = par[:page_url].blank? ? nil : par[:page_url]\n end\n @image.private = par[:private] if par[:private]\n\n # update calibration data if specified\n if !par[:calibrate_length].blank?\n @image.calibrate_length = par[:calibrate_length].to_f\n @image.calibrate_unit = par[:calibrate_unit].to_i if !par[:calibrate_unit].blank?\n @image.calibrate_coords = par[:calibrate_coords] if !par[:calibrate_coords].blank?\n @image.ppi = calculate_ppi(@image)\n end\n\n orig_url = par[:original_url] || par[:url]\n begin\n # this may fail\n if !orig_url.blank? && orig_url != @image.original_url\n # url was updated\n @image.remote_upload_url = orig_url\n @image.original_url = orig_url\n end\n rescue CarrierWave::DownloadError\n @image.errors.add(:remote_upload_url, \"^This url doesn't appear to be valid\")\n valid = false\n rescue CarrierWave::IntegrityError\n @image.errors.add(:remote_upload_url, \"^This url does not appear to point to a valid image\")\n valid = false\n rescue StandardError\n @image.errors.add(:remote_upload_url, \"There does not appear to be an image at this url\")\n valid = false\n end\n\n if valid && @image.save\n # update product if set\n @image.user_product = par[:product] if par[:product]\n\n image_data = @image.js_serialize\n # if the user hit the 'save and next' button, include the guid of the next image in the response.\n # The client side will redirect to the edit page for that image. \n if params[:commit] == 'save and next'\n image = Image.find_most_recent_uncalibrated(current_user.id)\n image_data['nextImage'] = image.guid if image\n end\n render :json => image_data, :callback => params[:callback]\n else\n render :json => { :error => 403, :messages => prepare_errors(@image), :callback => params[:callback] }, :status => 200\n end\n end", "def update\n respond_to do |format|\n if @smartphone.update(smartphone_params)\n if params[:images]\n params[:images].each do |val|\n temp = @smartphone.pictures.find_by(image_file_name: val.original_filename)\n if temp\n temp.update_attributes(:image => val)\n else\n @smartphone.pictures.create(image: val)\n end\n end\n end\n format.html { redirect_to @smartphone, notice: 'Smartphone was successfully updated.' }\n format.json { render :show, status: :ok, location: @smartphone }\n else\n format.html { render :edit }\n format.json { render json: @smartphone.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @image.update(image_params)\n redirect_to '/images'\n end", "def update\n if @product_image.update_attributes(product_image_params)\n render json: @product_image, status: 200\n else\n render_error 400, @product_image.errors.full_messages\n end\n end", "def update\n respond_to do |format|\n if @property.update(property_params)\n # Remove all features by property objects\n @property.features_properties.delete_all\n\n # Get features parameter\n features = params[:features]\n\n # Verify whether features array comes in the parameters list\n if features.present?\n # Intantiate & create features by property\n features_property_create = FeaturesPropertyCreate.new(@property)\n features_property_create.create(features, params[:quantities])\n end\n\n # Remove all photos by property objects\n #@property.photos.delete_all\n\n # Get photos parameter\n photos = params[:photos]\n\n # Verify whether photos array comes in the parameters list\n if photos.present?\n # Intantiate & create photos by property\n photo_create = PhotoCreate.new(@property)\n photo_create.create(photos)\n end\n\n format.html { redirect_to @property, notice: 'Property was successfully updated.' }\n format.json { render :show, status: :ok, location: @property }\n else\n format.html { render :edit }\n format.json { render json: @property.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @property_image = PropertyImage.new(params[:property_image])\n\n respond_to do |format|\n if @property_image.save\n format.html { redirect_to property_images_path, notice: 'Property image was successfully created.' }\n format.json { render json: @property_image, status: :created, location: @property_image }\n else\n format.html { render action: \"new\" }\n format.json { render json: @property_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @food.build_image(params['image']) do |t|\n if params['food']['image']['data']\n t.data = Base64.encode64(params['food']['image']['data'].read)\n t.filename = params['food']['image']['data'].original_filename\n t.mime_type = params['food']['image']['data'].content_type\n end\n end \n @food.name = @food.name.capitalize\n respond_to do |format|\n if @food.update(food_params)\n format.html { redirect_to @food, notice: 'Food was successfully updated.' }\n format.json { render :show, status: :ok, location: @food }\n else\n format.html { render :edit }\n format.json { render json: @food.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n new_images = params[:images]||[]\n\n @intention = Intention.find_by_uuid(params[:id])\n @intention.image_urls = IntentionsHelper.strip_duplicate_images(new_images)\n @intention.save\n\n points = VISBD_INTENTION_IMAGE_POINTS*new_images.count\n track_event(current_action_item(Intention::COMPONENT_TYPE)||current_enrollment, Intention::VISUALIZED_EVENT, target:@intention, points:points)\n\n render nothing: true\n end", "def update\n @image = Image.find(params[:id])\n\n respond_to do |format|\n if @image.update_attributes(params[:image])\n format.html { redirect_to @image, :notice => 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @image.errors, :status => :unprocessable_entity }\n end\n end\n end", "def add_or_update_property_image\n if params[:attachment]\n image_type = (params[:prop_form_close] == \"true\" || params[:property_form_submit] == 'true') ? 'property_picture' : nil\n if (params[:prop_form_close] == \"true\" || params[:property_form_submit] == 'true')\n #image = PortfolioImage.find_by_attachable_id_and_attachable_type_and_is_property_picture(@property.id,\"RealEstateProperty\",true)\n else\n image = PortfolioImage.find_by_attachable_id_and_attachable_type(@property.id,\"RealEstateProperty\")\n end\n image != nil ? image.update_attributes(params[:attachment]) : PortfolioImage.create_portfolio_image(params[:attachment][:uploaded_data],@property.id,image_type)\n end\n end", "def update\n @variant_image = VariantImage.find(params[:id])\n\n respond_to do |format|\n if @variant_image.update_attributes(params[:variant_image])\n format.html { redirect_to @variant_image, notice: 'Variant image was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @variant_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @image = Image.find(params[:id])\n\n respond_to do |format|\n if @image.update_attributes(params[:image])\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @image = Image.find(params[:id])\n\n respond_to do |format|\n if @image.update_attributes(params[:image])\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @image = Image.find(params[:id])\n\n respond_to do |format|\n if @image.update_attributes(params[:image])\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @image = Image.find(params[:id])\n\n respond_to do |format|\n if @image.update_attributes(params[:image])\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @image = Image.find(params[:id])\n type = @image.url_type\n if @image.update_attributes!(params[:image])\n if request.xhr?\n render :text => [ @image.to_jq_upload(type, \"image\") ].to_json.to_s\n else\n redirect_to console_images_path\n end\n else \n if request.xhr?\n render :text => [ @image.to_jq_upload(type, \"image\").merge({ :error => \"custom_failure\" }) ].to_json.to_s\n else\n redirect_to edit_console_image_path(@image)\n end\n end\n end", "def update\n respond_to do |format|\n if @spot.update(spot_params)\n if params[:photos]\n params[:photos]['image'].each do |a|\n @photo = @spot.photos.create!(:image => a, :imageable_id => @spot.id)\n end\n end\n\n format.html { redirect_to [ @country, @spot ] , notice: 'Spot was successfully updated.' }\n else\n format.html { render :edit }\n end\n end\n end", "def update\n respond_to do |format|\n if @spot.update(spot_params)\n if params[:photos]\n params[:photos]['image'].each do |a|\n @photo = @spot.photos.create!(:image => a, :imageable_id => @spot.id)\n end\n end\n\n format.html { redirect_to [ @country, @spot ] , notice: 'Spot was successfully updated.' }\n else\n format.html { render :edit }\n end\n end\n end", "def update\n respond_to do |format|\n if @item.update(item_params)\n @item.images.delete_all\n unless params.require(:item)[:images_id].nil?\n params.require(:item)[:images_id].each do |id|\n image = Image.find_by_id(id)\n (@item.images << image) unless image.nil?\n end\n end\n format.html { redirect_to @item, notice: 'Item was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @item.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @rent.update(rent_params)\n\n if params[:image]\n puts params[:image]\n params[:image].each { |image|\n @rent.rent_images.create(rent_id: @rent.id, image:image)\n }\n \n end\n \n format.html { redirect_to @rent, notice: 'Rent was successfully updated.' }\n format.json { render :show, status: :ok, location: @rent }\n else\n format.html { render :edit }\n format.json { render json: @rent.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n images_arr = params[:images]\n params[:list_item].delete(:image)\n @list_item = ListItem.find(params[:id])\n @image = Image.new(images_arr)\n @list_item.images << @image\n\n respond_to do |format|\n if @list_item.update_attributes(params[:list_item])\n format.html { redirect_to user_url(current_user), notice: t('list_items.updated') }\n format.json { render :json => @list_item }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @list_item.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @place.update(place_params)\n if params[:images]\n params[:images].each do |image|\n @place.photos.create(image: image)\n end\n end\n format.html do\n redirect_to @place, notice: 'Place was successfully updated.'\n end\n format.json { render :show, status: :ok, location: @place }\n else\n format.html { render :edit }\n format.json { render json: @place.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n\n params = image_params\n params[\"tags\"] = params[\"tags\"].delete_suffix(',')\n\n respond_to do |format|\n if @image.update(params)\n format.html { redirect_to @image, notice: \"Image was successfully updated.\" }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @property = Property.new(property_params)\n # params[:property][:cover_picture].each do |image|\n mini_image = MiniMagick::Image.new(params[:property][:cover_picture].tempfile.path)\n mini_image.resize '1200x1200'\n # end\n # params[:property][:pictures].each do |image|\n # mini_image = MiniMagick::Image.new(params[:property][image].tempfile.path)\n # mini_image.resize '1200x1200'\n # end\n # @property.account_id = current_account.id\n\n respond_to do |format|\n if @property.save\n format.html { redirect_to @property, notice: 'Property was successfully created.' }\n format.json { render :show, status: :created, location: @property }\n else\n format.html { render :new }\n format.json { render json: @property.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n # respond_to do |format|\n # if @property.update(property_params)\n # format.html { redirect_to @property, notice: 'Property was successfully updated.' }\n # format.json { render :show, status: :ok, location: @property }\n # else\n # format.html { render :edit }\n # format.json { render json: @property.errors, status: :unprocessable_entity }\n # end\n # end\n property = Property.find params[:id]\n # raise\n if params[\"property\"][\"photo\"].nil?\n property.update property_params\n else\n cloudinary = Cloudinary::Uploader.upload( params[ \"property\" ][ \"photo\" ] )\n # link = cloudinary[\"url\"]\n # raise\n # property.update :photo => link\n # property.update :photo => cloudinary[\"url\"] # JOSH FIX\n property.photo = cloudinary[\"url\"] #John Original Code\n\n\n end\n # raise\n property.update property_params #John Original Code\n redirect_to property\n\n end", "def set_property_picture\n @property_picture = PropertyPicture.find(params[:id])\n end", "def update\n respond_to do |format|\n if image.update(image_params)\n format.html { redirect_to image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: image }\n else\n format.html { render :edit }\n format.json { render json: image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n if @experience.update(experience_params)\n\n if params[:images] \n params[:images].each do |image|\n @experience.photos.create(image: image)\n end\n end\n\n redirect_to edit_trip_experience_path(@trip,@experience), notice: \"Updated...\"\n else\n render :edit\n end\n end", "def update\n @pictures = Picture.all.order(created_at: :desc)\n @picture.update(picture_params)\n render json: @pictures\n # head :no_content\n end", "def post_property_photos(photos)\n post_photos_for_property_id(photos.first.propertyID, photos)\n end", "def show\n @property_image = PropertyImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @property_image }\n end\n end", "def update\n @family_image = FamilyImage.find(params[:id])\n\n if @family_image.update(family_image_params)\n head :no_content\n else\n render json: @family_image.errors, status: :unprocessable_entity\n end\n end", "def create\n @estate_agent_image = EstateAgentsImage.new(params[:property_image])\n\n respond_to do |format|\n if @estate_agent_image.save\n format.html { redirect_to @estate_agent_image, notice: 'Property image was successfully created.' }\n format.json { render json: @estate_agent_image, status: :created, location: @estate_agent_image }\n else\n format.html { render action: \"new\" }\n format.json { render json: @estate_agent_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @objective_image.update(objective_image_params)\n format.html { redirect_to @objective_image, notice: 'Objective image was successfully updated.' }\n format.json { render :show, status: :ok, location: @objective_image }\n else\n format.html { render :edit }\n format.json { render json: @objective_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @event = Event.find(params[:id])\n\n\t\tparams[:event][:pictures_attributes].each do |key, value|\n\t\t\[email protected] << Picture.create(value)\n\t\tend\n\n respond_to do |format|\t\t\t\n if @event.save\n format.html { redirect_to \"/#{@event.url}\" }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end", "def destroy\n @property_image = PropertyImage.find(params[:id])\n @property_image.destroy\n\n respond_to do |format|\n format.html { redirect_to property_images_url }\n format.json { head :no_content }\n end\n end", "def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @album = Album.find(params[:id])\n \n respond_to do |format|\n if @album.update_attributes(params[:album])\n @album.images.clear\n @album.images << Image.find([params[:images]].flatten)\n @album.save!\n format.html { redirect_to(albums_path, :notice => 'Album was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @album.errors, :status => :unprocessable_entity }\n end\n end\n end", "def image hash = {}\n hash = { :id => hash} if hash.is_a? String\n home hash.update :action => 'image', :trailing_slash => false\n end", "def update!(**args)\n @image = args[:image] if args.key?(:image)\n end", "def update\n\n respond_to do |format|\n if @listing.update_attributes(listing_params)\n if params[:pictures]\n #===== The magic is here ;)\n params[:pictures].each { |image| \n @listing.pictures.create(file: image)\n } \n end\n format.html { redirect_to admin_listings_url, notice: 'Listing was successfully updated.' }\n format.json { render :show, status: :ok, location: @listing }\n else\n format.html { render :edit }\n format.json { render json: @listing.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @image = Image.find(params[:id])\n\n respond_to do |format|\n if @image.update_attributes(params[:image])\n format.html { redirect_to mypictures_path}\n format.json { head :no_content }\n flash[:success] = \"Image updated\"\n\n else\n format.html { render action: \"edit\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @image = @album.images.find(params[:id])\n @image.update(image_params)\n redirect_to album_path(@image.album.id)\n end", "def update\n respond_to do |format|\n if @car.update(car_params)\n unless params[:photos].blank?\n params[:photos]['image'].each do |p|\n @photo = @car.photos.create!(:image => p, :car_id => @car.id)\n end\n end\n format.html { redirect_to @car, notice: 'Car was successfully updated.' }\n format.json { render :show, status: :ok, location: @car }\n else\n format.html { render :edit }\n format.json { render json: @car.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n \n @apartment = Apartment.find params[:id]\n @building_id = params[:building_id]\n @apartment.update_attributes!(apartment_params)\n if params[:pictures]\n params[:pictures].each{ |picture|\n @apartment.images.create(picture: picture)\n }\n end\n flash[:notice] = \"Apartment Number #{@apartment.apartment_number} was successfully updated.\"\n redirect_to building_apartment_path(@building_id, @apartment)\n end", "def update\n respond_to do |format|\n if @image.update_attributes(image_params)\n flash[:notice] = 'Image was successfully updated.'\n format.html { redirect_to(@image) }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @image.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n if(@check)\n @evento.update(event_params.except(:token))\n if @evento.save\n #first we delete all the current images if there are\n if params[:event_image_data]\n @evento.event_images.each do |image|\n image.destroy\n end\n #then we will create new ones\n params[:event_image_data].each do |file|\n @evento.event_images.create!(:image => file)\n end\n end\n render json: @evento, status: :ok, location: @evento\n else\n render json: @evento.errors, status: :unprocessable_entity\n end\n end\n end", "def update\r\n respond_to do |format|\r\n if @image.update(image_params)\r\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\r\n format.json { render :show, status: :ok, location: @image }\r\n else\r\n format.html { render :edit }\r\n format.json { render json: @image.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end", "def update\n @s3_image = S3Image.find(params[:id])\n\n respond_to do |format|\n if @s3_image.update_attributes(params[:s3_image])\n format.html { redirect_to @s3_image, notice: 'S3 image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @s3_image.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @image.update(image_params)\n format.html {redirect_to @image, notice: 'Image was successfully updated.'}\n format.json {render :show, status: :ok, location: @image}\n else\n format.html {render :edit}\n format.json {render json: @image.errors, status: :unprocessable_entity}\n end\n end\n end", "def set_image\n @image = Image.find(params[:id])\n end", "def set_image\n @image = Image.find(params[:id])\n end", "def set_image\n @image = Image.find(params[:id])\n end", "def set_image\n @image = Image.find(params[:id])\n end", "def set_image\n @image = Image.find(params[:id])\n end", "def set_image\n @image = Image.find(params[:id])\n end", "def set_image\n @image = Image.find(params[:id])\n end", "def set_image\n @image = Image.find(params[:id])\n end", "def set_image\n @image = Image.find(params[:id])\n end", "def set_image\n @image = Image.find(params[:id])\n end", "def set_image\n @image = Image.find(params[:id])\n end", "def set_image\n @image = Image.find(params[:id])\n end", "def update\n @image = Image.find(params[:id])\n if @image.update(image_params)\n render json: {status: \"success\", data: {image:@image}}, status: :ok\n else\n render json: @comment.errors, status: 404\n end\n end", "def update\n respond_to do |format|\n if @img.update(img_params)\n format.html { redirect_to @img, notice: \"Img was successfully updated.\" }\n format.json { render :show, status: :ok, location: @img }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @img.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n flash[:notice] = 'Image Successfully updated' if @image.update_attributes! params[:image]\n respond_with @owner, @image\n end", "def update!(**args)\n @images = args[:images] unless args[:images].nil?\n end", "def set_image\r\n @image = Image.find(params[:id])\r\n end", "def update\n if !params[:event][:picture].nil?\n bucket = ENV['S3_BUCKET']\n pic = params[:event][:picture]\n content_type = pic.content_type.split('/').last\n\n if content_type == 'jpeg'\n content_type = 'jpg'\n end\n\n s3 = Aws::S3::Client.new(region: 'us-west-2')\n md5 = Digest::MD5.file(params[:event][:picture].tempfile).hexdigest\n filename = md5 + '.' + content_type\n s3.put_object(bucket: bucket, key: 'pictures/' + filename, body: pic,\n acl: 'public-read', content_type: pic.content_type)\n link = 'https://s3-us-west-2.amazonaws.com/' + bucket + '/pictures/' + filename\n old_pic = @event.pictures.first\n old_pic.delete\n @event.pictures.new(name: filename, url: link)\n end\n\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end" ]
[ "0.73272014", "0.7085052", "0.7002534", "0.6977821", "0.69507855", "0.69049263", "0.69049156", "0.6879201", "0.6794895", "0.6707284", "0.67003095", "0.66444147", "0.6618711", "0.6615083", "0.661299", "0.65631896", "0.6555667", "0.65504545", "0.642577", "0.64194363", "0.640991", "0.63784826", "0.6331182", "0.6325697", "0.632499", "0.6314875", "0.63131756", "0.63131756", "0.63131756", "0.63131756", "0.6306805", "0.63012886", "0.63012886", "0.630097", "0.6294995", "0.6276", "0.62705463", "0.627044", "0.6258519", "0.6257431", "0.6248925", "0.62269133", "0.62228805", "0.6222845", "0.6199349", "0.6198747", "0.6196509", "0.6187823", "0.61874646", "0.61748534", "0.6166113", "0.61610365", "0.61610365", "0.61610365", "0.61610365", "0.61610365", "0.61610365", "0.61610365", "0.61610365", "0.61610365", "0.61610365", "0.61610365", "0.61609244", "0.6152215", "0.61423", "0.6140946", "0.61338365", "0.6127916", "0.61212903", "0.61184084", "0.61184084", "0.61184084", "0.61184084", "0.61184084", "0.61184084", "0.61184084", "0.61134636", "0.61079276", "0.6067602", "0.6059253", "0.605377", "0.60498184", "0.60453063", "0.60453063", "0.60453063", "0.60453063", "0.60453063", "0.60453063", "0.60453063", "0.60453063", "0.60453063", "0.60453063", "0.60453063", "0.60453063", "0.6034535", "0.60232764", "0.6023053", "0.601646", "0.60145676", "0.6002244" ]
0.7433387
0
DELETE /property_images/1 DELETE /property_images/1.json
def destroy @property_image = PropertyImage.find(params[:id]) @property_image.destroy respond_to do |format| format.html { redirect_to property_images_url } format.json { head :no_content } end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def destroy\n @image = Image.find(params[:id])\n @image.destroy\n render json: {status: \"success\"}, status: :ok\n end", "def destroy\n #Finds selected image\n @image = Image.find(params[:id])\n #destroy image\n @image.destroy\n respond_to do |format|\n format.html { redirect_to '/admin' }\n format.json { head :ok }\n end\n end", "def destroy\n @image.destroy\n\n respond_to do |format|\n format.json { head :no_content }\n end\n end", "def destroy\n @image = Image.find(params[:id])\n \n imagen = @image.filename\n \n #function in manage_images.rb\n remove_image_file(imagen)\n \n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :ok }\n end\n end", "def destroy\n @property_picture = PropertyPicture.find(params[:id])\n @property_picture.destroy\n\n respond_to do |format|\n format.html { redirect_to property_pictures_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @estate_agent_image = EstateAgentsImage.find(params[:id])\n @estate_agent_image.destroy\n\n respond_to do |format|\n format.html { redirect_to estate_agent_image_images_url }\n format.json { head :no_content }\n end\n end", "def delete\n item = FormImage.last\n id = item[:id]\n item.destroy\n render json: {id: id}\n end", "def destroy\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to(images_url) }\n format.json { head :ok }\n end\n end", "def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @photo = Photo.find(params[:id])\n File.delete(Rails.root.join(\"app\",'assets','images',@photo.path))\n @photo.destroy\n\n respond_to do |format|\n format.html { redirect_to photos_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @property.photos.delete_all\n @property.features_properties.delete_all\n @property.destroy\n respond_to do |format|\n format.html { redirect_to properties_url, notice: 'Property was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def deleteEntityImage( entity_id, gen_id)\n params = Hash.new\n params['entity_id'] = entity_id\n params['gen_id'] = gen_id\n return doCurl(\"delete\",\"/entity/image\",params)\n end", "def destroy\n @sample_photo.destroy\n render json: {message: 'Foto Excluida'} , status: :ok\n end", "def destroy\n @objective_image.destroy\n respond_to do |format|\n format.html { redirect_to objective_images_url, notice: 'Objective image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was deleted successfully.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @image_path = ImagePath.find(params[:id])\n @image_path.destroy\n\n respond_to do |format|\n format.html { redirect_to(image_paths_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @image_url = ImageUrl.find(params[:id])\n @image_url.destroy\n\n respond_to do |format|\n format.html { redirect_to image_urls_url }\n format.json { head :no_content }\n end\n end", "def destroy\n Image.find(params[:id]).destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @image = Image.find(params[:id])\n @image.destroy\n redirect_to console_images_path\n end", "def destroy\n @image_section = ImageSection.find(params[:id])\n @image_section.destroy\n\n respond_to do |format|\n format.html { redirect_to image_sections_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @property_attachment.destroy\n respond_to do |format|\n format.html { redirect_to property_attachments_url, notice: \"Property attachment was successfully destroyed.\" }\n format.json { head :no_content }\n end\n end", "def destroy\n @photo1 = Photo1.find(params[:id])\n @photo1.destroy\n\n respond_to do |format|\n format.html { redirect_to photo1s_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n head :no_content\n end", "def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to(admin_images_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @setup_image.destroy\n respond_to do |format|\n format.html { redirect_to :back }\n format.json { head :no_content }\n end\n end", "def destroy\n @imagedemo.destroy\n respond_to do |format|\n format.html { redirect_to imagedemos_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @column_image.destroy\n respond_to do |format|\n format.html { redirect_to column_images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @item.image.destroy\n @item.destroy\n respond_to do |format|\n format.html { redirect_to items_url, notice: 'Item was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @photo = Photo.find(params[:id])\n\n # Destroy s3 objects\n aws_s3_delete(@photo.key)\n Sebitmin::Application.config.thumbnail_sizes.each do |thumbnail_size|\n aws_s3_delete(@photo[\"thumbnail_key_#{thumbnail_size}\"])\n end\n\n @photo.destroy\n\n respond_to do |format|\n format.html { redirect_to \"/\" }\n format.json { head :no_content }\n end\n end", "def destroy\n ImagesIndex.delete params[:id]\n respond_to do |format|\n format.html { redirect_to(\"/images_indices\") }\n format.xml { head :ok }\n end\n end", "def destroy\n @photo.destroy\n respond_to do |format|\n format.html { redirect_to @photo.item }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to photos_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n file_url = @image.url\n @image.destroy\n\n File.delete(\"public/uploads/#{file_url}\")\n\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: \"Image was successfully destroyed.\" }\n format.json { head :no_content }\n end\n end", "def destroy\n @motivational_image = MotivationalImage.find(params[:id])\n @motivational_image.destroy\n\n respond_to do |format|\n format.html { redirect_to motivational_images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n image = Image.find(params[:id])\n if image.user_id == current_user.id\n image.destroy\n render json:{}, status:201\n end\n end", "def destroy\n @memberimage = Memberimage.find(params[:id])\n @memberimage.destroy\n\n respond_to do |format|\n format.html { redirect_to memberimages_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @admin_photo = Photo.find(params[:id])\n @admin_photo.destroy\n\n respond_to do |format|\n format.html { redirect_to admin_photos_url }\n format.json { head :no_content }\n end\n end", "def destroy\n query = \"created_by = \\\"#{current_user.email}\\\"\"\n @photo = Photo.where(query).with_attached_images.find(params[:id])\n @photo.destroy\n respond_to do |format|\n format.html { redirect_to photos_path, notice: 'Destroyed successfully.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to(images_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to(images_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to(images_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @image.destroy()\n respond_to do |format|\n format.html { redirect_to images_url, notice: \"Image was successfully destroyed.\" }\n format.json { head :no_content }\n end\n end", "def destroy\n @image_upload = ImageUpload.find(params[:id])\n @image_upload.destroy\n\n respond_to do |format|\n format.html { redirect_to image_uploads_url }\n format.json { head :no_content }\n end\n end", "def destroy\n output = \"oneimage delete #{resource[:name]} \", self.class.login\n `#{output}`\n end", "def destroy\n @s3_image = S3Image.find(params[:id])\n @s3_image.destroy\n\n respond_to do |format|\n format.html { redirect_to s3_images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @picture = Picture.find(params[:id])\n @picture.destroy\n render :json => true\n end", "def destroy\n @home_categories_products_indices_photo = Home::Categories::Products::Indices::Photo.find(params[:id])\n @home_categories_products_indices_photo.destroy\n\n respond_to do |format|\n format.html { redirect_to home_categories_products_indices_photos_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @image = ImagePost.find(params[:id])\n @image.destroy\n track_activity @image\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @experience_image.destroy\n respond_to do |format|\n format.html { redirect_to experience_images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @structure_photo.destroy\n render json: {message: 'Foto Excluida'} , status: :ok\n end", "def destroy\n @img = Img.find(params[:id])\n @img.destroy\n\n respond_to do |format|\n format.html { redirect_to(imgs_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @photo = Photo.find(params[:id])\n @photo.destroy\n\n respond_to do |format|\n format.html { redirect_to uploads_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @loc_image.destroy\n respond_to do |format|\n format.html { redirect_to loc_images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @featureimg.destroy\n respond_to do |format|\n format.html { redirect_to featureimgs_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @image_reference.destroy\n respond_to do |format|\n format.html { redirect_to image_references_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html {redirect_to admin_path, notice: 'Image was successfully destroyed.'}\n format.json {head :no_content}\n end\n end", "def destroy\n @imagem = Imagem.find(params[:id])\n @imagem.destroy\n\n respond_to do |format|\n format.html { redirect_to imagems_url }\n format.json { head :ok }\n end\n end", "def image_destroy\n result = RestaurantManage.image_destroy(@restaurant, params[:pic_id])\n get_restaurant()\n render json: result\n end", "def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to root_path, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to mypictures_path }\n format.json { head :no_content }\n end\n end", "def destroy\n @bgimage = Bgimage.find(params[:id])\n @bgimage.destroy\n\n respond_to do |format|\n format.html { redirect_to bgimages_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @photo.destroy\n respond_to do |format|\n format.html { redirect_to uploads_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @image = Gui::Image.find(params[:id])\n @image.destroy\n redirect_to gui_panels_path\n\n # respond_to do |format|\n # format.html { redirect_to gui_images_url }\n # format.json { head :no_content }\n # end\n end", "def destroy\n @admin_image.destroy\n respond_to do |format|\n format.html { redirect_to admin_images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @img.destroy\n respond_to do |format|\n format.html { redirect_to imgs_url, notice: \"Img was successfully destroyed.\" }\n format.json { head :no_content }\n end\n end", "def destroy\n @photo.photo.destroy\n @photo.destroy\n respond_to do |format|\n format.html { redirect_to photos_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @story_image.destroy\n respond_to do |format|\n format.html { redirect_to story_images_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @animal_image.destroy\n respond_to do |format|\n format.html\n format.js {}\n format.json { render json: { ok: true } }\n end\n end", "def destroy\n @photo = Photo.find(params[:id])\n @photo.destroy\n\n respond_to do |format|\n format.html { redirect_to photos_url }\n format.json { head :ok }\n end\n end", "def destroy\n @photo = Photo.find(params[:id])\n @photo.destroy\n\n respond_to do |format|\n format.html { redirect_to photos_url }\n format.json { head :ok }\n end\n end", "def destroy\n @photo = Photo.find(params[:id])\n @photo.destroy\n\n respond_to do |format|\n format.html { redirect_to photos_url }\n format.json { head :ok }\n end\n end", "def destroy\n @photo = Photo.find(params[:id])\n @photo.destroy\n\n respond_to do |format|\n format.html { redirect_to photos_url }\n format.json { head :ok }\n end\n end", "def destroy\n @photo = Photo.find(params[:id])\n @photo.destroy\n\n respond_to do |format|\n format.html { redirect_to photos_url }\n format.json { head :ok }\n end\n end", "def destroy\n @photo = Photo.find(params[:id])\n @photo.destroy\n\n respond_to do |format|\n format.html { redirect_to photos_url }\n format.json { head :ok }\n end\n end", "def destroy\n @image_to_part.destroy\n respond_to do |format|\n format.html { redirect_to image_to_parts_url }\n format.json { head :no_content }\n end\n end" ]
[ "0.73616624", "0.7265958", "0.72610325", "0.72505075", "0.72239304", "0.7183274", "0.7182659", "0.7166094", "0.7142178", "0.7142178", "0.7142178", "0.7142178", "0.7142178", "0.7142178", "0.7142178", "0.71404487", "0.71404487", "0.71404487", "0.71404487", "0.71404487", "0.71404487", "0.7104456", "0.71004134", "0.7068117", "0.7048467", "0.7001723", "0.6993732", "0.69848645", "0.69727284", "0.6965066", "0.69424903", "0.69375205", "0.69340855", "0.69326204", "0.6921982", "0.6912143", "0.69069785", "0.6897347", "0.68967766", "0.6893887", "0.6891818", "0.6888092", "0.68848485", "0.6879016", "0.6877931", "0.6877931", "0.6877931", "0.6877931", "0.6877931", "0.6877931", "0.6877931", "0.6877931", "0.6877931", "0.6877931", "0.6877931", "0.6877931", "0.6874075", "0.68702424", "0.68699616", "0.68689907", "0.68666685", "0.68573916", "0.68557554", "0.6855373", "0.6855373", "0.6855373", "0.6853959", "0.68518543", "0.6850179", "0.6849347", "0.68435174", "0.6840715", "0.683864", "0.68376625", "0.68353826", "0.6834953", "0.68337035", "0.6833637", "0.68326354", "0.68302864", "0.68264157", "0.68262726", "0.68256336", "0.6824091", "0.6824059", "0.6814583", "0.6810916", "0.68078446", "0.6801249", "0.6795342", "0.6793531", "0.6780575", "0.677592", "0.67724454", "0.67724454", "0.67724454", "0.67724454", "0.67724454", "0.67724454", "0.6771694" ]
0.79029274
0
1. Write a badge_maker method that will create and return this message, given a person's name. ex: badge_maker("Arel") => "Hello, my name is Arel." " should return a formatted badge"
def badge_maker(name) return "Hello, my name is #{name}." # or skip 'return', just using it here to be explicit end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def badge_maker(name)\n return \"Hello, my name is #{name}.\"\n end", "def badge_maker(name)\n\treturn \"Hello, my name is #{name}.\" \nend", "def badge_maker( name )\n return \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n return \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n return \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n return \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n return \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n return \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n return \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n return \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n return \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n return \"Hello, my name is #{name}.\"\nend", "def badge_maker (name)\n return \"Hello, my name is #{name}.\"\nend", "def badge_maker (name)\n return \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n \"Hello, my name is #{name}.\"\nend", "def badge_maker( name )\n\"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n \"Hello, my name is #{name}.\";\nend", "def badge_maker(name)\n\t#\"Hello, my name is Arel.\"\n\t\"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n if name == \"Arel\"\n \"Hello, my name is #{name}.\"\n end\nend", "def badge_maker(name) #method that takes a name and returns \"Hello, my name is <name>\"\r\n \"Hello, my name is #{name}.\"\r\nend", "def badge_maker(speaker)\n return \"Hello, my name is #{speaker}.\"\nend", "def badge_maker(name)\n \"Hello, my name is #{name}.\"\n # \"Hello, my name is\" + name + \".\"\nend", "def badge_maker(name)\n # puts \"Hello, my name is #{name}.\"\n return \"Hello, my name is #{name}.\"\nend", "def badge_maker(name)\n badges = \"Hello, my name is #{name}.\"\n # puts \"Hello,my name is #{name}.\"\n #end\n end", "def batch_badge_creator(name)\n name.collect do |badge|\n \"Hello, my name is \" + badge + \".\"\n end\n\nend", "def batch_badge_creator(names)\n names.collect { |name| \"Hello, my name is #{name}.\"};\nend", "def make_badge( badge_text, deal )\n\n badge_text = badge_text.gsub('\"', \"\").gsub(\"'\", \"\").strip # take out single and double ticks\n badge_text = \"'\" << badge_text << \"'\" # enclose in single ticks\n badge_text = badge_text.gsub(\"$\", \"\\\\$\").gsub(\"/\", \"\") # take out forward and backslashes\n\n # img = MiniMagick::Image.open(\"public/images/yellow_starburst.jpg\")\n img = MiniMagick::Image.open(\"app/assets/images/yellow_starburst.jpg\")\n\n img.combine_options do |c|\n c.resize \"#{Victoria::Application::BADGE[0]}x#{Victoria::Application::BADGE[1]}\" # \"75x75\"\n c.gravity \"Center\"\n c.antialias\n c.font \"helvetica italic\"\n c.pointsize \"18\"\n c.draw \"text 0,0 #{badge_text}\" # 0,0 = x,y coords\n c.fill(\"#000000\")\n end\n # badge_path = \"badges/badge_#{deal.id}.jpg\"\n badge_path = \"badge_#{deal.id}.jpg\"\n deal.update_attribute(:badge_path, badge_path)\n # img.write(\"public/images/#{badge_path}\")\n img.write(\"app/assets/badges/#{badge_path}\")\n # returns \"app/assets/images/badges/badge_21.jpg JPEG 145x150 145x150+0+0 8-bit DirectClass 11.6KB 0.000u 0:00.010 \"\n end", "def batch_badge_creator(speakers)\n #empty array that needs strings pushed in\n badge_messages = []\n #iteration of speakers array to grab each name\n speakers.each do |guest|\n badge_messages.push(\"Hello, my name is #{guest}.\") # once a name is grabbed it creates a string using that name\n end\n badge_messages\nend", "def batch_badge_creator(name)\n badge_list = []\n name.each do |n|\n badge_list.push(\"Hello, my name is #{n}.\")\n end\n badge_list\nend", "def build_message(meal, names)\n if meal == 0\n message = \"Breakfast: \\n\" + names\n return message\n elsif meal == 1\n message = \"Dinner: \\n\" + names\n return message\n else\n message = \"ERROR, please message Conor for manual override.\"\n return message\n end\n end", "def batch_badge_creator(guests)\n guests.map { |name| \"Hello, my name is #{name}.\"}\n\nend", "def batch_badge_creator(names)\n names.each do |name|\n puts \"Hello, my name is #{name}.\"\n end\nend", "def batch_badge_creator(names)\n names.map {|n| badge_maker(n)}\nend", "def batch_badge_creator( names )\n print_output = []\n names.each do | name |\n print_output << badge_maker( name ) \n end\n print_output # return the list ready for printing\nend", "def batch_badge_creator(names)\n names.map{|name| badge_maker(name)}\nend", "def create_new_badge(badge)\n Badge.new(badge)\n end", "def batch_badge_creator(names)\n badges = []\n names.each do |name|\n badges << badge_maker(name)\n end\n return badges\nend", "def for_badge(badge); end", "def display_unique_badge(title, user) \n case title \n when \"Pioneer\"\n image_tag \"badges/OssemblePioneering.svg\", id: \"pioneer_badge\", class: \"image awarded_badge\", title: \"Pioneer: Was there when it all began and verified their account.\"\n when \"Ally\"\n image_tag \"badges/OssembleAllies.svg\", id: \"ally_badge\", class: \"image awarded_badge\", title: \"Ally: Has some allies and has made some friends!\"\n when \"Friend of the City\"\n image_tag \"badges/OssembleFriendoftheCity.svg\", id: \"friend_badge\", class: \"image awarded_badge\", title: \"Friend Of The City: Can contribute to another city.\"\n when \"Moderator\"\n image_tag \"badges/OssembleModerators.svg\", id: \"moderator_badge\", class: \"image awarded_badge\", title: \"Moderator: A Moderator of a city.\"\n else \n #fail safe\n end \n end", "def batch_badge_creator(array_of_names)\n badge_messages = []\n\n array_of_names.each do |name|\n badge_messages << badge_maker(name)\n end\n\n return badge_messages\nend", "def batch_badge_creator(array)\n badge_messages = []\n array.each {|name| badge_messages << badge_maker(name)}\n badge_messages\nend", "def batch_badge_creator(attendees)\nbadges = []\nattendees.each do |name|\n badges.push(\"Hello, my name is #{name}.\")\nend\nreturn badges\nend", "def get_message(birth_number)\n \n case birth_number\n when 1\n message = \"You're #{birth_number}. \\nThe Sun!\"\n when 2\n message = \"You're #{birth_number}. \\nThe Moon!\"\n when 3\n message = \"You're #{birth_number}. \\nThe Jupiter!\"\n when 4\n message = \"You're #{birth_number}. \\nThe Uranus!\"\n when 5\n message = \"You're #{birth_number}. \\nThe Mercury!\"\n when 6\n message = \"You're #{birth_number}. \\nThe Venus!\"\n when 7\n message = \"You're #{birth_number}. \\nThe Neptune!\"\n when 8\n message = \"You're #{birth_number}. \\nThe Saturn!\"\n when 9\n message = \"You're #{birth_number}. \\nThe Mars!\"\n else\n message = \"Oh my!, were you even born?\"\n end\nend", "def batch_badge_creator(attendees)\n attendees.collect do |name|\n badge_maker(name)\n# \"Hello, my name is #{name}.\" # don't think I need this\n end\nend", "def batch_badge_creator(speaker_list)\n badge_messages = []\n speaker_list.each {|name| badge_messages << \"Hello, my name is #{name}.\"}\n badge_messages\nend", "def batch_badge_creator(name_list)\n badge_batch = []\n name_list.each do |name|\n badge_batch.push(badge_maker(name))\n end\n return badge_batch\nend", "def introduce_yourself\n \"Hi, my name is #{name} and I am the boss. YAY!\"\n end", "def batch_badge_creator(attendees)\n badge_messages = []\n attendees.each do |name|\n badge_messages << badge_maker(name)\n end\n badge_messages\nend", "def batch_badge_creator(names)\n names.collect { |array_value| badge_maker(array_value) }\nend", "def find_badge(name)\n badge_name = name.delete(\" \").downcase #delete whitespace\n return name unless badges.has_key?(badge_name)\n badges[badge_name] \n end", "def batch_badge_creator(adendees)\n badges = []\n adendees.each do |atendee|\n badges.push(\"Hello, my name is #{atendee}.\")\n end\n return badges\nend", "def batch_badge_creator(attendees)\n greet_attendees = []\n attendees.each do |name|\n greet_attendees.push(\"Hello, my name is #{name}.\")\n end\n return greet_attendees\nend", "def batch_badge_creator(array_of_names)\n array_of_badges = []\n array_of_names.each {|name| array_of_badges << \"Hello, my name is #{name}.\"}\n return array_of_badges\nend", "def badge(*args)\n badge_label(:badge, *args)\n end", "def batch_badge_creator (names_list)\nlist_badge_msgs=names_list.collect {|name| badge_maker(name)}\nend", "def get_message(birth_path_number)\n case birth_path_number\n when 1\n messsage = \"One is the leader. The number one indicates the ability to stand alone and is a strong vibration. Ruled by the Sun.\"\n when 2\n messsage = \"Two is the mediator and peace-lover. The number two indicates the desire for harmony. It is a gentle, considerate, and sensitive vibration. Ruled by the Moon.\"\n when 3\n messsage = \"Three is a sociable, friendly, and outgoing vibration. Kind, positive, and optimistic, Three's enjoy life and have a good sense of humor. Ruled by Jupiter.\"\n when 4\n messsage = \"Four is the worker. Practical, with a love of detail, Fours are trustworthy, hard-working, and helpful. Ruled by Uranus.\"\n when 5\n messsage = \"Five is the freedom lover. The number five is an intellectual vibration. These are 'idea' people with a love of variety and the ability to adapt to most situations. Ruled by Mercury.\"\n when 6\n messsage = \"Six is the peace lover. The number six is a loving, stable, and harmonious vibration. Ruled by Venus.\"\n when 7\n messsage = \"Seven is the deep thinker. The number seven is a spiritual vibration. These people are not very attached to material things, are introspective, and generally quiet. Ruled by Neptune.\"\n when 8\n messsage = \"Eight is the manager. Number Eight is a strong, successful, and material vibration. Ruled by Saturn.\"\n when 9\n messsage = \"Nine is the teacher. Number Nine is a tolerant, somewhat impractical, and sympathetic vibration. Ruled by Mars.\"\n else\n messsage = \"You are not of this Earth\"\n end\nend", "def batch_badge_creator(speakers)\n badge_messages = []\n for speaker in speakers do\n badge_messages << \"Hello, my name is #{speaker}.\" \n end\n return badge_messages\nend", "def award_badge(participant_id, badge_name)\n badge_id = Badge.get_id_from_name(badge_name: badge_name)\n AwardedBadge.where(participant_id: participant_id, badge_id: badge_id, approval_status: 0).first_or_create\n end", "def batch_badge_creator(attendees)\n#use map to return the names with the string as a new array\n attendees.map do |attendee|\n \"Hello, my name is #{attendee}.\"\n end\nend", "def badge_for(count)\n count == 0 ? \"\" : ' ' << content_tag(:b, count, :class => 'badge')\n end", "def badge; end", "def batch_badge_creator(array)\n #empty array\n list = []\n #iterate over the names in array\n array.each do |name|\n #add the badge messages to the list array\n list << badge_maker(name)\n end\n #return list\n list\n#end of batch_badge_creator method\nend", "def create_second_text_message(first_response)\n%(When I feel #{first_response}, I will also feel:\nCarefree\nPeaceful \nRelieved\nMellow\nRelaxed)\n end", "def batch_badge_creator(array)\n new_array = []\n array.each do |name|\n new_array << (\"Hello, my name is #{name}.\")\nend\n return new_array\nend", "def get_message(birth_num)\n case birth_num\n when 1\n message = \"Your numerology number is #{birth_num}.\\nOne is the leader. The number one indicates the ability to stand alone, and is a strong vibration. Ruled by the Sun.\"\n when 2\n message = \"Your numerology number is #{birth_num}.\\nThis is the mediator and peace-lover. The number two indicates the desire for harmony. It is a gentle, considerate, and sensitive vibration. Ruled by the Moon.\"\n when 3\n message = \"Your numerology number is #{birth_num}.\\nNumber Three is a sociable, friendly, and outgoing vibration. Kind, positive, and optimistic, Three's enjoy life and have a good sense of humor. Ruled by Jupiter.\"\n when 4\n message = \"Your numerology number is #{birth_num}.\\nThis is the worker. Practical, with a love of detail, Fours are trustworthy, hard-working, and helpful. Ruled by Uranus.\"\n when 5\n message = \"Your numerology number is #{birth_num}.\\nThis is the freedom lover. The number five is an intellectual vibration. These are 'idea' people with a love of variety and the ability to adapt to most situations. Ruled by Mercury.\"\n when 6\n message = \"Your numerology number is #{birth_num}.\\nThis is the peace lover. The number six is a loving, stable, and harmonious vibration. Ruled by Venus.\"\n when 7\n message = \"Your numerology number is #{birth_num}.\\nThis is the deep thinker. The number seven is a spiritual vibration. These people are not very attached to material things, are introspective, and generally quiet. Ruled by Neptune.\"\n when 8\n message = \"Your numerology number is #{birth_num}.\\nThis is the manager. Number Eight is a strong, successful, and material vibration. Ruled by Saturn.\"\n when 9\n message = \"Your numerology number is #{birth_num}.\\nThis is the teacher. Number Nine is a tolerant, somewhat impractical, and sympathetic vibration. Ruled by Mars.\"\n else\n message = \"I am pretty sure that you were not born on the leap year date, because you should still get a Numerology number. Either you entered your birthddate in the incorrect format or you are not originally from Earth, you should have a number between 1-9.\"\n end\nend", "def batch_badge_creator(attendees)\n attendees.map {|i| \"Hello, my name is #{i}.\"}\nend", "def get_message(birth_path_num)\n # use a case statement to display the correct meaning\n case birth_path_num\n when 1\n message = \"Your numerology number is #{birth_path_num}.\\nOne is the leader. The number one indicates the ability to stand alone, and is a strong vibration. Ruled by the Sun.\"\n when 2\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the mediator and peace-lover. The number two indicates the desire for harmony. It is a gentle, considerate, and sensitive vibration. Ruled by the Moon.\"\n when 3\n message = \"Your numerology number is #{birth_path_num}.\\nNumber Three is a sociable, friendly, and outgoing vibration. Kind, positive, and optimistic, Three's enjoy life and have a good sense of humor. Ruled by Jupiter.\"\n when 4\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the worker. Practical, with a love of detail, Fours are trustworthy, hard-working, and helpful. Ruled by Uranus.\"\n when 5\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the freedom lover. The number five is an intellectual vibration. These are 'idea' people with a love of variety and the ability to adapt to most situations. Ruled by Mercury.\"\n when 6\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the peace lover. The number six is a loving, stable, and harmonious vibration. Ruled by Venus.\"\n when 7\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the deep thinker. The number seven is a spiritual vibration. These people are not very attached to material things, are introspective, and generally quiet. Ruled by Neptune.\"\n when 8\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the manager. Number Eight is a strong, successful, and material vibration. Ruled by Saturn.\"\n when 9\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the teacher. Number Nine is a tolerant, somewhat impractical, and sympathetic vibration. Ruled by Mars.\"\n else\n message = \"Uh oh! Your birth path number is not 1-9!\"\n end\nend", "def get_message(birth_path_num)\n # use a case statement to display the correct meaning\n case birth_path_num\n when 1\n message = \"Your numerology number is #{birth_path_num}.\\nOne is the leader. The number one indicates the ability to stand alone, and is a strong vibration. Ruled by the Sun.\"\n when 2\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the mediator and peace-lover. The number two indicates the desire for harmony. It is a gentle, considerate, and sensitive vibration. Ruled by the Moon.\"\n when 3\n message = \"Your numerology number is #{birth_path_num}.\\nNumber Three is a sociable, friendly, and outgoing vibration. Kind, positive, and optimistic, Three's enjoy life and have a good sense of humor. Ruled by Jupiter.\"\n when 4\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the worker. Practical, with a love of detail, Fours are trustworthy, hard-working, and helpful. Ruled by Uranus.\"\n when 5\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the freedom lover. The number five is an intellectual vibration. These are 'idea' people with a love of variety and the ability to adapt to most situations. Ruled by Mercury.\"\n when 6\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the peace lover. The number six is a loving, stable, and harmonious vibration. Ruled by Venus.\"\n when 7\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the deep thinker. The number seven is a spiritual vibration. These people are not very attached to material things, are introspective, and generally quiet. Ruled by Neptune.\"\n when 8\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the manager. Number Eight is a strong, successful, and material vibration. Ruled by Saturn.\"\n when 9\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the teacher. Number Nine is a tolerant, somewhat impractical, and sympathetic vibration. Ruled by Mars.\"\n else\n message = \"Uh oh! Your birth path number is not 1-9!\"\n end\nend", "def badge\n group.name.sub(/\\sQuiz/, '')\n end", "def greeting(name)\n \"Hi my name is #{name}, nice to meet you!\"\nend", "def batch_badge_creator(attendees)\n badge_messages = Array.new\n # creates a badge message for each attendee and shovels to the badge_messages array\n attendees.each do |attendee|\n badge_messages << badge_maker(attendee)\n end\n # return list of badge messages\n badges\nend", "def batch_badge_creator( attendees )\n attendees.map do |attendee|\n \"Hello, my name is #{attendee}.\"\n end\nend", "def batch_badge_creator(atendees)\r\n badges = []\r\n atendees.each do |first_name|\r\n badges << badge_maker(first_name)\r\n end\r\n badges\r\n end", "def batch_badge_creator(speakers)\n speakers.map do |speaker|\n \"Hello, my name is #{speaker}.\"\n end\nend", "def greeting(name)\n message = \"Hello there #{name}!\"\nend", "def get_message(birth_path_num)\n case birth_path_num\n when 1\n message = \"Your numerology number is #{birth_path_num}.\\nOne is the leader. The number one indicates the ability to stand alone, and is a strong vibration. Ruled by the Sun.\"\n when 2\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the mediator and peace-lover. The number two indicates the desire for harmony. It is a gentle, considerate, and sensitive vibration. Ruled by the Moon.\"\n when 3\n message = \"Your numerology number is #{birth_path_num}.\\nNumber Three is a sociable, friendly, and outgoing vibration. Kind, positive, and optimistic, Three's enjoy life and have a good sense of humor. Ruled by Jupiter.\"\n when 4\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the worker. Practical, with a love of detail, Fours are trustworthy, hard-working, and helpful. Ruled by Uranus.\"\n when 5\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the freedom lover. The number five is an intellectual vibration. These are 'idea' people with a love of variety and the ability to adapt to most situations. Ruled by Mercury.\"\n when 6\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the peace lover. The number six is a loving, stable, and harmonious vibration. Ruled by Venus.\"\n when 7\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the deep thinker. The number seven is a spiritual vibration. These people are not very attached to material things, are introspective, and generally quiet. Ruled by Neptune.\"\n when 8\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the manager. Number Eight is a strong, successful, and material vibration. Ruled by Saturn.\"\n when 9\n message = \"Your numerology number is #{birth_path_num}.\\nThis is the teacher. Number Nine is a tolerant, somewhat impractical, and sympathetic vibration. Ruled by Mars.\"\n else\n message = \"Uh oh! Your birth path number is not 1-9!\"\n end #birth_path_num case\nend", "def create_first_text_message\n%(Today, I want to feel:\nPassionate\nEnergized\nConnected\nHopeful\nAligned\n)\n end", "def batch_badge_creator(array)\n badges = []\n array.each { |name| badges << badge_maker(name)}\n badges\nend", "def get_message(birthday_number)\n case birthday_number\n #5. Assign the return value to a variable\n when 1\n @message = \"One is the leader. The number one indicates the ability to stand alone, and is a strong vibration. Ruled by the Sun.\"\n when 2\n @message = \"This is the mediator and peace-lover. The number two indicates the desire for harmony. It is a gentle, considerate, and sensitive vibration. Ruled by the Moon.\"\n when 3\n @message = \"Number Three is a sociable, friendly, and outgoing vibration. Kind, positive, and optimistic, Three's enjoy life and have a good sense of humor. Ruled by Jupiter.\"\n when 4\n @message = \"This is the worker. Practical, with a love of detail, Fours are trustworthy, hard-working, and helpful. Ruled by Uranus.\"\n when 5\n @message = \"This is the freedom lover. The number five is an intellectual vibration. These are 'idea' people with a love of variety and the ability to adapt to most situations. Ruled by Mercury.\"\n when 6\n @message = \"This is the peace lover. The number six is a loving, stable, and harmonious vibration. Ruled by Venus.\"\n when 7\n @message = \"This is the deep thinker. The number seven is a spiritual vibration. These people are not very attached to material things, are introspective, and generally quiet. Ruled by Neptune.\"\n when 8\n @message = \"This is the manager. Number Eight is a strong, successful, and material vibration. Ruled by Saturn.\"\n when 9\n @message = \"This is the teacher. Number Nine is a tolerant, somewhat impractical, and sympathetic vibration. Ruled by Mars.\"\n else @message = \"I don't have any answers for you!\"\n end\nend", "def mk_sender\n [\"-- \", \"Med kærlig hilsen, \", \"Med venlig hilsen, \", \"MVH, \", \"Hilsen \"].sample + \"Nikolaj Lepka\\n\" +\n \"Telefon: 25 14 66 83\\n\" +\n \"Email: [email protected]\\n\" +\n \"Github: https://github.com/ElectricCoffee\\n\" +\n \"Twitter: https://twitter.com/Electric_Coffee\\n\\n\"\n end", "def greeting(name)\n return \"Hello #{name}! Nice to meet you!\"\nend", "def format(result)\n client.send(Badge.new(result).to_s, format: :html)\n end", "def batch_badge_creator(speakers) # because it takes array as an argument, it will go through each of the array contents\n speakers.collect { |x| badge_maker(x) } #technically, it returns an array of new messages, not really separate messages\nend", "def batch_badge_creator(attendees)\n new = []\n attendees.each do | name |\n new << badge_maker(name)\n end\nnew\nend", "def badge_name_is_valid\n if badge_name.present?\n unless self.badge.kind_of?(Badge)\n badge_names = self.authoritative_company.company_badges.map(&:short_name).to_sentence(two_words_connector: ' or ', last_word_connector: ' or ')\n errors.add(:badge, I18n.t('activerecord.errors.models.recognition.attributes.badge_id.invalid_name', badge_names: badge_names))\n end\n end\n end", "def batch_badge_creator(attendees)\n attendees.map do |attendee|\n \"Hello, my name is #{attendee}.\"\n end\nend" ]
[ "0.8725693", "0.86042297", "0.8558621", "0.8495704", "0.8476773", "0.8476773", "0.8476773", "0.8476773", "0.8476773", "0.8476773", "0.8476773", "0.8476773", "0.8457231", "0.8457231", "0.8417373", "0.8417373", "0.8417373", "0.84007484", "0.8386274", "0.8386274", "0.83823067", "0.83823067", "0.83823067", "0.83823067", "0.83823067", "0.83823067", "0.83823067", "0.83823067", "0.83823067", "0.83823067", "0.83312565", "0.8213455", "0.8182743", "0.8145745", "0.80272794", "0.7963996", "0.7750646", "0.74802727", "0.6535364", "0.6494556", "0.6492431", "0.6446272", "0.63797987", "0.6369967", "0.63506573", "0.62992007", "0.6255343", "0.62526053", "0.6241935", "0.622972", "0.61998254", "0.61692786", "0.6157154", "0.6146226", "0.6138731", "0.6096793", "0.6059164", "0.60292387", "0.60267216", "0.59907126", "0.59898645", "0.59831256", "0.59823173", "0.59600115", "0.59589744", "0.593566", "0.5922647", "0.5915607", "0.59137195", "0.5900144", "0.5897771", "0.5893552", "0.58811224", "0.58724916", "0.58720833", "0.58584535", "0.5853832", "0.5835554", "0.5814909", "0.58144104", "0.5812161", "0.5812161", "0.5804709", "0.5779017", "0.57641464", "0.5739149", "0.573092", "0.5711073", "0.56964517", "0.5691662", "0.568986", "0.5674033", "0.5665793", "0.5649876", "0.5648", "0.56461245", "0.56317073", "0.56290716", "0.5619078", "0.561894" ]
0.8172926
33
b. Write a batch_badge_creator method that takes a list of names as an argument and return a list of badge messages. Hint: Use the variable you created in part a. == speakers array collect iterates over the array and creates a new array containing modified values.
def batch_badge_creator(speakers) # because it takes array as an argument, it will go through each of the array contents speakers.collect { |x| badge_maker(x) } #technically, it returns an array of new messages, not really separate messages end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def batch_badge_creator(speakers)\n badge_messages = []\n for speaker in speakers do\n badge_messages << \"Hello, my name is #{speaker}.\" \n end\n return badge_messages\nend", "def batch_badge_creator(names)\n names.collect { |array_value| badge_maker(array_value) }\nend", "def batch_badge_creator(speakers)\n #empty array that needs strings pushed in\n badge_messages = []\n #iteration of speakers array to grab each name\n speakers.each do |guest|\n badge_messages.push(\"Hello, my name is #{guest}.\") # once a name is grabbed it creates a string using that name\n end\n badge_messages\nend", "def batch_badge_creator(array_of_names)\n badge_messages = []\n\n array_of_names.each do |name|\n badge_messages << badge_maker(name)\n end\n\n return badge_messages\nend", "def batch_badge_creator(speaker_list)\n badge_messages = []\n speaker_list.each {|name| badge_messages << \"Hello, my name is #{name}.\"}\n badge_messages\nend", "def batch_badge_creator (names_list)\nlist_badge_msgs=names_list.collect {|name| badge_maker(name)}\nend", "def batch_badge_creator(array_of_names)\n array_of_badges = []\n array_of_names.each {|name| array_of_badges << \"Hello, my name is #{name}.\"}\n return array_of_badges\nend", "def batch_badge_creator(speakers)\n speakers.map do |speaker|\n \"Hello, my name is #{speaker}.\"\n end\nend", "def batch_badge_creator(array)\n #empty array\n list = []\n #iterate over the names in array\n array.each do |name|\n #add the badge messages to the list array\n list << badge_maker(name)\n end\n #return list\n list\n#end of batch_badge_creator method\nend", "def batch_badge_creator(speaker_array)\n speaker_badge_array = []\n speaker_array.each do |speaker| speaker_badge_array << badge_maker(speaker)\n end\n speaker_badge_array\nend", "def batch_badge_creator(name_list)\n badge_batch = []\n name_list.each do |name|\n badge_batch.push(badge_maker(name))\n end\n return badge_batch\nend", "def batch_badge_creator(names)\n badges = []\n names.each do |name|\n badges << badge_maker(name)\n end\n return badges\nend", "def batch_badge_creator(names)\n names.collect { |name| \"Hello, my name is #{name}.\"};\nend", "def batch_badge_creator(names)\n names.map{|name| badge_maker(name)}\nend", "def batch_badge_creator(names_array)\n badges_array = []\n\n for name in names_array do\n badges_array.push(badge_maker(name))\n end\n\n badges_array\nend", "def batch_badge_creator(names)\n names.map {|n| badge_maker(n)}\nend", "def batch_badge_creator(array)\n badge_messages = []\n array.each {|name| badge_messages << badge_maker(name)}\n badge_messages\nend", "def batch_badge_creator(array)\n array.collect do |name|\n badge_maker(name)\n end\nend", "def batch_badge_creator(name)\n badge_list = []\n name.each do |n|\n badge_list.push(\"Hello, my name is #{n}.\")\n end\n badge_list\nend", "def batch_badge_creator(array)\n badges = []\n array.each { |name| badges << badge_maker(name)}\n badges\nend", "def batch_badge_creator( names )\n print_output = []\n names.each do | name |\n print_output << badge_maker( name ) \n end\n print_output # return the list ready for printing\nend", "def batch_badge_creator(attendees)\nbadges = []\nattendees.each do |name|\n badges.push(\"Hello, my name is #{name}.\")\nend\nreturn badges\nend", "def batch_badge_creator(attendees)\n greet_attendees = []\n attendees.each do |name|\n greet_attendees.push(\"Hello, my name is #{name}.\")\n end\n return greet_attendees\nend", "def batch_badge_creator(array)\n new_array = []\n array.each do |name|\n new_array << (\"Hello, my name is #{name}.\")\nend\n return new_array\nend", "def batch_badge_creator(name)\n name.collect do |badge|\n \"Hello, my name is \" + badge + \".\"\n end\n\nend", "def batch_badge_creator(names)\n names.each do |name|\n puts \"Hello, my name is #{name}.\"\n end\nend", "def batch_badge_creator(adendees)\n badges = []\n adendees.each do |atendee|\n badges.push(\"Hello, my name is #{atendee}.\")\n end\n return badges\nend", "def batch_badge_creator(attendees)\n badge_messages = []\n attendees.each do |name|\n badge_messages << badge_maker(name)\n end\n badge_messages\nend", "def batch_badge_creator(array)\n new_array = array.map { |e| \"Hello, my name is #{e}.\" }\nend", "def batch_badge_creator(array) \n badge_messages = []\n array.each do |object| \n badge_message = badge_maker(object)\n badge_messages.push(badge_message)\n end\n return badge_messages\nend", "def batch_badge_creator(attendees)\n#use map to return the names with the string as a new array\n attendees.map do |attendee|\n \"Hello, my name is #{attendee}.\"\n end\nend", "def batch_badge_creator(guests)\n guests.map { |name| \"Hello, my name is #{name}.\"}\n\nend", "def batch_badge_creator(attendees)\n attendees.collect do |name|\n badge_maker(name)\n# \"Hello, my name is #{name}.\" # don't think I need this\n end\nend", "def batch_badge_creator(names)\nnames.each do |names_array|\n puts names_array\nend\nend", "def batch_badge_creator(attendees)\n attendees.map {|i| \"Hello, my name is #{i}.\"}\nend", "def batch_badge_creator(attendees)\n attendees.collect do |name| \n badge_maker(name)\n end\nend", "def batch_badge_creator (attendees)\n attendees.collect do |name| badge_maker (name)\n end\nend", "def batch_badge_creator(participants)\n\tnew_list = []\n\tparticipants.each do |individual|\n\t\tnew_list << badge_maker(individual)\n\tend\n\treturn new_list\nend", "def batch_badge_creator(attendees)\n\tattendees.map{|name| badge_maker(name)}\nend", "def batch_badge_creator(attendees)\r\n batch_attendees = []\r\n attendees.each do | name |\r\n batch_attendees << badge_maker(name) # add the name to each iteration of the badge_maker method\r\n end\r\n return batch_attendees\r\nend", "def batch_badge_creator(attendees)\n new = []\n attendees.each do | name |\n new << badge_maker(name)\n end\nnew\nend", "def batch_badge_creator(attendees)\n badge_messages = Array.new\n # creates a badge message for each attendee and shovels to the badge_messages array\n attendees.each do |attendee|\n badge_messages << badge_maker(attendee)\n end\n # return list of badge messages\n badges\nend", "def batch_badge_creator(attendees)\n attendees.map do |name|\n badge_maker(name)\n end\nend", "def batch_badge_creator( attendees )\n attendees.map do |attendee|\n \"Hello, my name is #{attendee}.\"\n end\nend", "def batch_badge_creator(attendees)\n attendees.map do |attendee|\n \"Hello, my name is #{attendee}.\"\n end\nend", "def batch_badge_creator(atendees)\r\n badges = []\r\n atendees.each do |first_name|\r\n badges << badge_maker(first_name)\r\n end\r\n badges\r\n end", "def batch_badge_creator(attendee)\n # the below code would work more effectively but since I found it somewhere else I decided to use another method to prove to myself I could do it on my own. Since the collect method saves the results automatically to a new array, it doesnt require the extra step I needed to do with the .each method.\n # attendee.collect do |i|\n # badge_maker(i)\n # end\n\n badges =[]\n attendee.each {|i| badges.push badge_maker(i)}\n return badges\nend", "def batch_badge_creator(attendees)\n attendees.collect do |attendee|\n badge_maker(attendee)\n end\nend", "def batch_badge_creator(badges)\n array=Array.new\n badges. each do |individuals|\n array=array.push(badge_maker(individuals))\n end\n return array;\n\nend", "def printer (array)\nrooms = assign_rooms(array)\nnames = batch_badge_creator(array)\nrooms.each do |room1|\n puts room1\nend\nnames.each do |name1|\n puts name1\nend\nend", "def printer(array_speakers)\n batch_badge_creator(array_speakers).each {|badge| puts badge}\n assign_rooms(array_speakers).each {|room| puts room}\nend", "def printer(names_array)\n badges_array = batch_badge_creator(names_array)\n room_assignments_array = assign_rooms(names_array)\n\n num_attendees = names_array.count\n\n for i in (0...num_attendees) do\n puts badges_array[i]\n puts room_assignments_array[i]\n end\n\nend", "def printer(speakers)\n badges = batch_badge_creator(speakers)\n rooms = assign_rooms(speakers)\n\n badges.each_with_index {|badge,index| puts badge}\n rooms.each_with_index {|room,index| puts room}\n # badges.each_with_index {|badge,index| puts badge[index]}\n # rooms.each_with_index {|room,index| puts room[index]}\nend", "def printer(speakers)\n batch_badge_creator(speakers).each do |badge|\n puts badge\n end\n assign_rooms(speakers).each do |room|\n puts room\n end\nend", "def printer(names)\n batch_badge_creator(names).each { |name| puts name };\n assign_rooms(names).each {|name| puts name};\nend", "def assign_rooms(speakers) # same logic -- using array as an argument\n speakers.collect { |x| \"Hello, #{x}! You'll be assigned to room #{speakers.index(x)+1}!\" }\nend", "def printer(nameArr)\n b = batch_badge_creator(nameArr)\n d = assign_rooms(nameArr)\n i=0\n i1=0\n while i < b.size\n puts b[i]\n i+=1\n end\n\n while i1 < d.size\n puts d[i1]\n i1+=1\n end\nend", "def printer(array)\n badges = batch_badge_creator(array)\n rooms = assign_rooms(array)\n badges.each{|badge| puts badge}\n rooms.each{|room| puts room}\nend", "def assign_rooms(speakerArray)\n messages = []\n roomNumber = 1\n speakerArray.each do |speaker|\n messages << \"Hello, #{speaker}! You'll be assigned to room #{roomNumber}!\"\n roomNumber+= 1\n end\n messages\nend", "def assign_rooms(speakers)\n speaker_array = []\n speakers.each_with_index {|speaker, index| speaker_array << \"Hello, #{speaker}! You'll be assigned to room #{index + 1}!\"}\n speaker_array\nend", "def printer(array) \n batch_badge_creator(array).each do |result|\n puts result\n end\n assign_rooms(array).each do |result|\n puts result\n end\nend", "def printer(name)\n #iterate through the batch_badge_creator array and puts current element\n batch_badge_creator(name).each do |attendee|\n puts attendee\n end\n #iterate through the assign_rooms array and puts current element\n assign_rooms(name).each do |room|\n puts room\n end\nend", "def assign_rooms(speakers)\n new_arr = []\n speakers.each_with_index do |name, i |\n new_arr << \"Hello, #{name}! You'll be assigned to room #{i + 1}!\"\n end\n new_arr\nend", "def printer(array)\n batch_badge_creator(array).each do |line|\n puts line\n end\n assign_rooms(attendees).each do |room|\n puts room\n end\nend", "def printer(attendees)\n\n batch_badge_creator(attendees).each do |badge_string|\n puts badge_string\n end\n \n assign_rooms(attendees).each do |room_string|\n puts room_string\n end \n \nend", "def assign_rooms(array_speakers)\n room_assignments = []\n array_speakers.each_with_index {|speaker, room| room_assignments.push \"Hello, #{speaker}! You'll be assigned to room #{room+1}!\"}\n room_assignments\nend", "def printer(attendees)\nbatch_badge_creator(attendees).each_with_index do |badges|\n puts badges\nend\nassign_rooms(attendees).each_with_index do |room_assignments|\n puts room_assignments\nend\nend", "def printer (attendees)\n the_badges = batch_badge_creator (attendees)\n the_rooms = assign_rooms (attendees)\n the_badges.each do |x|\n puts \"#{x}\"\n end\n the_rooms.each do |x|\n puts \"#{x}\"\n end\nend", "def printer(attendees)\n badges = batch_badge_creator(attendees)\n rooms = assign_rooms(attendees)\n\n badges.zip(rooms)\n\n badges.zip(rooms).each do |badges, rooms|\n puts badges\n puts rooms\n end\n\nend", "def assign_rooms(speakers)\n speakers.each.with_index(1).collect {|speaker, index| \"Hello, #{speaker}! You'll be assigned to room #{index}!\"}\nend", "def assign_rooms(speakers)\n assigned_rooms = []\n speakers.each.with_index(1) do |speaker, room| \n assigned_rooms << \"Hello, #{speaker}! You'll be assigned to room #{room}!\"\n end\n return assigned_rooms\nend", "def assign_rooms (names_list)\n\t\nmsgs_with_room_no=names_list.collect.each_with_index{ |current_name,current_index| \"Hello, #{current_name}! You'll be assigned to room #{current_index+1}!\"\n }\n\t\nend", "def printer(parameter)\n\n badges = batch_badge_creator(parameter) #new variable holding return of #batch_badge_creator method\n rooms = assign_rooms(parameter) #new variable holding return of #assign_rooms method\n\n badges.each do |element| #this loops through badges array...\n puts element #and outputs each element\n end #end for each loop\n\n rooms.each do |element| #this loops through rooms array...\n puts element #and outputs each element\n end #end for each loop\nend", "def assign_rooms(guest_list)\n #each_with_index: keep track of each name provided and push the message to new array with the name and message\n room_assignments = []\n guest_list.each_with_index { |name, index| room_assignments << \"Hello, #{name}! You'll be assigned to room #{index+1}!\" }\n room_assignments\nend", "def new_badges\n achievements.map(&:new_badges).flatten\n end", "def assign_rooms(attendee)\n badges = []\n attendee.each.with_index(1) { |val,index| badges.push \"Hello, #{val}! You'll be assigned to room #{index}!\"}\n return badges\nend", "def printer(attendees)\n last_array = batch_badge_creator(attendees)\n last_array2 = assign_rooms(attendees)\n counter = 0\n\n last_array.each do |output1|\n puts last_array[counter]\n puts last_array2[counter]\n counter += 1\n end\n \nend", "def listings_to_badge\n return [] unless index_of_group_boundary_listing.present?\n\n cheapest_listings.take(index_of_group_boundary_listing + 1)\n end", "def printer(attendee)\n batch_badge_creator(attendee).each do |x| puts x #interates through the attendee array to out\"puts\" the list of badges created by \"batch_badge_creator\"\n end\n assign_rooms(attendee).each do |x| puts x #interates through the attendee array to out\"puts\" the room assignment list created by \"assign_rooms\"\n end\nend", "def greetings(names)\n\tnames.map { |name| \n\t\t\"Hello #{name}!\"\n\t}\nend", "def build_list_of_players\n [\n \"cliff\",\n \"anne\",\n \"harry\",\n \"sam\",\n \"devin\",\n \"ally\",\n \"bob\",\n \"jane\",\n \"jimmy\",\n \"dave\"\n ]\nend", "def create_badge_sprites\n @badges = Array.new(BADGE_COUNT) do |index|\n sprite = Sprite.new(@viewport).set_bitmap('tcard/badges', :interface)\n sprite.set_position(BADGE_ORIGIN_COORDINATE.first + (index % 2) * BADGE_OFFSET.first,\n BADGE_ORIGIN_COORDINATE.last + (index / 2) * BADGE_OFFSET.last)\n sprite.src_rect.set((index % 2) * BADGE_SIZE.first, (index / 2) * BADGE_SIZE.last, *BADGE_SIZE)\n sprite.visible = $trainer.has_badge?(index + 1)\n next(sprite)\n end\n end", "def assign_rooms(speakers)\n room_assignments = []\n speakers.each_with_index { |speaker,index| room_assignments << \"Hello, #{speaker}! You'll be assigned to room #{index+1}!\"}\n room_assignments\nend", "def pbPocketNames; return [\"\",\n _INTL(\"Items\"),\n _INTL(\"Medicine\"),\n _INTL(\"Jermo Balls\"),\n _INTL(\"TMs & HMs\"),\n _INTL(\"Bewwies\"),\n _INTL(\"Mail\"),\n _INTL(\"Battle Items\"),\n _INTL(\"Key Items\")\n]; end", "def printer(attendees)\n batch_badge_creator(attendees)\n assign_rooms(attendees)\n\n batch_badge_creator(attendees).each do |badge|\n puts badge\n end\n\n assign_rooms(attendees).each do |room_assignment|\n puts room_assignment\n end\nend", "def method_people(names, number)\n names_array = names.shuffle.map\n teams = (names.length.to_f / number).ceil\n new_array = Array.new(teams) { Array.new }\n count = 0\n names_array.map do |name|\n new_array[count] << name\n count == (teams - 1) ? (count = 0) : (count += 1)\n end\n new_array\n end", "def assign_rooms(array)\n new_array = []\n counter = 1\n array.each do |name|\n new_array.push(\"Hello, #{name}! You'll be assigned to room #{counter}!\")\n counter += 1\nend\n return new_array\nend", "def load_worker_array\n @worker_array = Worker.where(active: true).order(last_name: :asc).map { |worker_array| [worker_array.full_name, worker_array.badge_nbr]}\nend", "def printer(guests)\n batch_badge_creator(guests).map{|greeting| puts greeting}\n assign_rooms(guests).map{|room_assignment| puts room_assignment}\nend", "def badge_maker(name)\n return \"Hello, my name is #{name}.\"\n end", "def printer(attendees)\n batch_badge_creator(attendees).each do |badge|\n puts badge\n end\n assign_rooms(attendees).each do |room|\n puts room\n end\nend", "def badge_maker(speaker)\n return \"Hello, my name is #{speaker}.\"\nend", "def printer(attendees)\n batch_badge_creator(attendees).each do |x|\n puts x\n end\n assign_rooms(attendees).each do |x|\n puts x\n end\nend", "def generate_list(name, count); end", "def printer(attendees)\n badges = batch_badge_creator(attendees)\n badges.each do |badge|\n puts \"#{badge}\"\n end\n\n rooms = assign_rooms(attendees)\n rooms.each do |room|\n puts \"#{room}\"\n end\nend", "def mix_names(girls_array, boys_array)\n b_i = 0 \n boys_array.length.times do \n girls_array << boys_array[b_i]\n b_i += 1 \n end \n output = girls_array\n return output\nend", "def test_loop(badgedefs, user_id, course_points, user_points, exercises)\n to_award = []\n errors = []\n badgedefs.each do |badgedef|\n Rails.logger.debug(\"Evaluating BadgeDef #{badgedef.id}, user #{user_id}\")\n rezult = BadgeHelper.evaluate_badgedef(badgedef, user_id, course_points,\n user_points, exercises)\n if rezult[:ok]\n to_award.push(badgedef) if rezult[:give_badge]\n else\n # This clunky line has to be used because\n # [a, b, c].push([x, y, z]) doesn't give [a, b, c, x, y, z]\n # but rather [a, b, c, [x, y, z]].\n rezult[:errors].each { |e| errors.push(e) }\n end\n end\n { to_award: to_award, errors: errors }\n end", "def printer(attendees)\n batch_badge_creator(attendees).each do |badge|\n puts badge\n end\n assign_rooms(attendees).each do |assignment|\n puts assignment\nend\nend", "def badge_maker(name)\n\treturn \"Hello, my name is #{name}.\" \nend", "def assign_rooms(array_of_names)\n room_assignments = []\n\n array_of_names.each_with_index do |name, index|\n room_assignments << \"Hello, #{name}! You'll be assigned to room #{index + 1}!\"\n end\n\n room_assignments\nend" ]
[ "0.8458191", "0.8451556", "0.8446363", "0.83969724", "0.8392957", "0.83397466", "0.82760596", "0.8260213", "0.823035", "0.82086676", "0.8206306", "0.8194381", "0.8176962", "0.8166093", "0.8164338", "0.81437826", "0.81309587", "0.8077357", "0.80665547", "0.8006627", "0.78902286", "0.7866954", "0.78347903", "0.7795128", "0.7784452", "0.77200955", "0.76613915", "0.7632893", "0.7627892", "0.76181513", "0.7605288", "0.7585053", "0.75802875", "0.7545188", "0.74925554", "0.74884117", "0.7485032", "0.7475391", "0.7460365", "0.74589974", "0.7454854", "0.7424902", "0.7403501", "0.7395083", "0.73185766", "0.7181595", "0.71659005", "0.70358044", "0.6972835", "0.61700517", "0.60842973", "0.6036195", "0.59165496", "0.57342064", "0.5628956", "0.5593792", "0.55351377", "0.5533558", "0.5528181", "0.5517371", "0.5512895", "0.54976386", "0.5461371", "0.54435915", "0.5382995", "0.5361473", "0.5274865", "0.52259254", "0.5216041", "0.5212217", "0.5201088", "0.5189716", "0.5188933", "0.5124967", "0.51217866", "0.5101939", "0.5071295", "0.5070236", "0.5059552", "0.5053418", "0.5051361", "0.50490445", "0.5043199", "0.50407696", "0.5022017", "0.5011205", "0.5000127", "0.498576", "0.49773595", "0.49609107", "0.49495026", "0.49453193", "0.49443355", "0.49429452", "0.49416494", "0.49303782", "0.4917246", "0.4907154", "0.48941913", "0.48577213" ]
0.86165065
0
or use 'map', don't use collect like the below to see the transformed array: attendees.map do |attendee| badge_maker(attendee) end with each: attendees.each do |attendee| badge_maker(attendee) end this is basically using yield; the method takes us to an array, and the enumerator will go through attendeeds one by one 3. You just realized that you also need to give each speaker a room assignment. Write a method called assign_rooms that takes the list of speakers that will assign each speaker to a room. Make sure that each room only has one speaker. You have rooms 17. return a list of room assignments in the form of: "Hello, _____! You'll be assigned to room _____!"
def assign_rooms(speakers) # same logic -- using array as an argument speakers.collect { |x| "Hello, #{x}! You'll be assigned to room #{speakers.index(x)+1}!" } end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def assign_rooms(attendee)\n badges = []\n attendee.each.with_index(1) { |val,index| badges.push \"Hello, #{val}! You'll be assigned to room #{index}!\"}\n return badges\nend", "def assign_rooms(attendees)\n#use #each_with_index to create a room pair for each name. use map to return as new array\n attendees.each_with_index.map do |attendee, index|\n#index+1 to start index count from 1 instead of 0\n \"Hello, #{attendee}! You'll be assigned to room #{index+1}!\"\n end\nend", "def assign_rooms(attendees)\n attendees.map.each_with_index {|name, i| \"Hello, #{name}! You'll be assigned to room #{i + 1}!\"}\nend", "def assign_rooms(attendees)\n attendees.each_with_index.map do |attendee, index|\n \"Hello, #{attendee}! You'll be assigned to room #{index+1}!\"\n end\nend", "def assign_rooms(attendees)\n attendees.each_with_index.collect do |attendee, index|\n \"Hello, #{attendee}! You'll be assigned to room #{index+1}!\"\n end\nend", "def assign_rooms(attendees)\n attendees.each_with_index.map do | attendee, index |\n \"Hello, #{attendee}! You'll be assigned to room #{index+1}!\"\n end\nend", "def batch_badge_creator(attendees)\n#use map to return the names with the string as a new array\n attendees.map do |attendee|\n \"Hello, my name is #{attendee}.\"\n end\nend", "def assign_rooms(attendees)\n attendees.each_with_index.map do |x, index|\n \"Hello, #{x}! You'll be assigned to room #{index + 1}!\"\n end\nend", "def assign_rooms(attendees)\nroom_number = 0\n attendees.collect do |attendee|\n \"Hello, #{attendee}! You'll be assigned to room #{room_number += 1}!\"\n end\nend", "def assign_rooms(speakers)\n speakers.each.with_index(1).collect {|speaker, index| \"Hello, #{speaker}! You'll be assigned to room #{index}!\"}\nend", "def assign_rooms(attendees)\nattendees.each_with_index.map do |name, room_number|\n\"Hello, #{name}! You'll be assigned to room #{room_number+1}!\"\nend\nend", "def assign_rooms( attendees )\n newlist = []\n attendees.each_with_index do |attendee, index|\n newlist << \"Hello, #{attendee}! You'll be assigned to room #{index + 1}!\"\n end\n return newlist\nend", "def batch_badge_creator( attendees )\n attendees.map do |attendee|\n \"Hello, my name is #{attendee}.\"\n end\nend", "def assign_rooms(speakers)\n assigned_rooms = []\n speakers.each.with_index(1) do |speaker, room| \n assigned_rooms << \"Hello, #{speaker}! You'll be assigned to room #{room}!\"\n end\n return assigned_rooms\nend", "def batch_badge_creator(attendees)\n attendees.map do |attendee|\n \"Hello, my name is #{attendee}.\"\n end\nend", "def batch_badge_creator(attendees)\n attendees.map do |name|\n badge_maker(name)\n end\nend", "def batch_badge_creator(attendees)\n attendees.collect do |attendee|\n badge_maker(attendee)\n end\nend", "def assign_rooms(guests)\n guests.map{|guest| \"Hello, #{guest}! You'll be assigned to room #{guests.index(guest) + 1}!\"}\nend", "def batch_badge_creator(attendees)\n attendees.collect do |name| \n badge_maker(name)\n end\nend", "def batch_badge_creator (attendees)\n attendees.collect do |name| badge_maker (name)\n end\nend", "def batch_badge_creator(attendees)\n attendees.map {|i| \"Hello, my name is #{i}.\"}\nend", "def assign_rooms(speakers)\n speaker_array = []\n speakers.each_with_index {|speaker, index| speaker_array << \"Hello, #{speaker}! You'll be assigned to room #{index + 1}!\"}\n speaker_array\nend", "def assign_rooms(attendees)\n room_assignments = []\n attendees.each_with_index do |attendee, index|\n room_assignments << \"Hello, #{attendee}! You'll be assigned to room #{index + 1}!\"\n end\n room_assignments\nend", "def assign_rooms(array_speakers)\n room_assignments = []\n array_speakers.each_with_index {|speaker, room| room_assignments.push \"Hello, #{speaker}! You'll be assigned to room #{room+1}!\"}\n room_assignments\nend", "def assign_rooms(speakers)\n room_assignments = []\n speakers.each_with_index { |speaker,index| room_assignments << \"Hello, #{speaker}! You'll be assigned to room #{index+1}!\"}\n room_assignments\nend", "def batch_badge_creator(attendees)\n attendees.collect do |name|\n badge_maker(name)\n# \"Hello, my name is #{name}.\" # don't think I need this\n end\nend", "def batch_badge_creator(attendee)\n # the below code would work more effectively but since I found it somewhere else I decided to use another method to prove to myself I could do it on my own. Since the collect method saves the results automatically to a new array, it doesnt require the extra step I needed to do with the .each method.\n # attendee.collect do |i|\n # badge_maker(i)\n # end\n\n badges =[]\n attendee.each {|i| badges.push badge_maker(i)}\n return badges\nend", "def assign_rooms(attendees)\n attendees_room = []\n attendees.each_with_index do |name, index|\n attendees_room.push(\"Hello, #{name}! You'll be assigned to room #{index + 1}!\")\n end\nreturn attendees_room\nend", "def batch_badge_creator(attendees)\n\tattendees.map{|name| badge_maker(name)}\nend", "def printer( attendees )\n newlist = []\n attendees.each_with_index do |attendee, index|\n puts \"Hello, my name is #{attendee}.\"\n newlist << \"Hello, #{attendee}! You'll be assigned to room #{index + 1}!\"\n end\n newlist.map {|person| puts person }\nend", "def assign_rooms(speakerArray)\n messages = []\n roomNumber = 1\n speakerArray.each do |speaker|\n messages << \"Hello, #{speaker}! You'll be assigned to room #{roomNumber}!\"\n roomNumber+= 1\n end\n messages\nend", "def batch_badge_creator(speakers)\n speakers.map do |speaker|\n \"Hello, my name is #{speaker}.\"\n end\nend", "def assign_rooms(attendees)\n var1 = []\n attendees.each_with_index do |speaker, index|\n var1 << \"Hello, #{speaker}! You'll be assigned to room #{index +1}!\"\n end\n var1\nend", "def assign_rooms(attendees)\n attendees.map.with_index do |name, index|\n \"Hello, #{name}! You'll be assigned to room #{index + 1}!\"\n end\nend", "def printer(attendees)\n badges = batch_badge_creator(attendees)\n rooms = assign_rooms(attendees)\n\n badges.zip(rooms)\n\n badges.zip(rooms).each do |badges, rooms|\n puts badges\n puts rooms\n end\n\nend", "def batch_badge_creator(speakers) # because it takes array as an argument, it will go through each of the array contents\n speakers.collect { |x| badge_maker(x) } #technically, it returns an array of new messages, not really separate messages\nend", "def batch_badge_creator(attendees)\n badge_messages = Array.new\n # creates a badge message for each attendee and shovels to the badge_messages array\n attendees.each do |attendee|\n badge_messages << badge_maker(attendee)\n end\n # return list of badge messages\n badges\nend", "def batch_badge_creator(attendees)\r\n batch_attendees = []\r\n attendees.each do | name |\r\n batch_attendees << badge_maker(name) # add the name to each iteration of the badge_maker method\r\n end\r\n return batch_attendees\r\nend", "def batch_badge_creator(attendees)\n badge_messages = []\n attendees.each do |name|\n badge_messages << badge_maker(name)\n end\n badge_messages\nend", "def printer(attendees)\n\n batch_badge_creator(attendees).each do |badge_string|\n puts badge_string\n end\n \n assign_rooms(attendees).each do |room_string|\n puts room_string\n end \n \nend", "def printer(attendees)\nbatch_badge_creator(attendees).each_with_index do |badges|\n puts badges\nend\nassign_rooms(attendees).each_with_index do |room_assignments|\n puts room_assignments\nend\nend", "def printer(attendee)\n batch_badge_creator(attendee).each do |x| puts x #interates through the attendee array to out\"puts\" the list of badges created by \"batch_badge_creator\"\n end\n assign_rooms(attendee).each do |x| puts x #interates through the attendee array to out\"puts\" the room assignment list created by \"assign_rooms\"\n end\nend", "def printer (attendees)\n the_badges = batch_badge_creator (attendees)\n the_rooms = assign_rooms (attendees)\n the_badges.each do |x|\n puts \"#{x}\"\n end\n the_rooms.each do |x|\n puts \"#{x}\"\n end\nend", "def printer(attendees)\n batch_badge_creator(attendees)\n assign_rooms(attendees)\n\n batch_badge_creator(attendees).each do |badge|\n puts badge\n end\n\n assign_rooms(attendees).each do |room_assignment|\n puts room_assignment\n end\nend", "def printer(attendees)\n badges = batch_badge_creator(attendees)\n badges.each do |badge|\n puts \"#{badge}\"\n end\n\n rooms = assign_rooms(attendees)\n rooms.each do |room|\n puts \"#{room}\"\n end\nend", "def batch_badge_creator(attendees)\n new = []\n attendees.each do | name |\n new << badge_maker(name)\n end\nnew\nend", "def printer(attendees)\n batch_badge_creator(attendees).each do |badge|\n puts badge\n end\n assign_rooms(attendees).each do |room|\n puts room\n end\nend", "def assign_rooms(speakers)\r\n speakers.map.with_index do |speaker_name, index|\r\n \"Hello, #{speaker_name}! You'll be assigned to room #{index + 1}!\"\r\n end\r\nend", "def assign_rooms(guest_list)\n #each_with_index: keep track of each name provided and push the message to new array with the name and message\n room_assignments = []\n guest_list.each_with_index { |name, index| room_assignments << \"Hello, #{name}! You'll be assigned to room #{index+1}!\" }\n room_assignments\nend", "def printer(attendees)\n # use helper method to print a badge message for every attendee on a separate line\n batch_badge_creator(attendees).each do |badge_message|\n puts badge_message\n end\n # use helper method to print a welcome message for every attendee on a separate line\n assign_rooms(attendees).each do |room_assignment_welcome_message|\n puts room_assignment_welcome_message\n end\nend", "def batch_badge_creator(speakers)\n badge_messages = []\n for speaker in speakers do\n badge_messages << \"Hello, my name is #{speaker}.\" \n end\n return badge_messages\nend", "def printer(attendees)\n\n room_assignments = []\n rooms = 1\n attendees.each do |guest|\n puts \"Hello, my name is #{guest}.\"\n rooms += 1\n\n end\n room_assignments = []\n rooms = 1\n attendees.each do |guest|\n puts \"Hello, #{guest}! You'll be assigned to room #{rooms}!\"\n rooms += 1\n\n end\n\nend", "def printer(attendees)\n batch_badge_creator(attendees).each { |badge| puts badge}\n assign_rooms(attendees).each {|rooms | puts rooms }\nend", "def printer(attendees)\n batch_badge_creator(attendees).each do |badge|\n puts badge\n end\n assign_rooms(attendees).each do |assignment|\n puts assignment\nend\nend", "def batch_badge_creator(guests)\n guests.map { |name| \"Hello, my name is #{name}.\"}\n\nend", "def printer (array)\nrooms = assign_rooms(array)\nnames = batch_badge_creator(array)\nrooms.each do |room1|\n puts room1\nend\nnames.each do |name1|\n puts name1\nend\nend", "def batch_badge_creator(atendees)\r\n badges = []\r\n atendees.each do |first_name|\r\n badges << badge_maker(first_name)\r\n end\r\n badges\r\n end", "def printer(attendees)\n batch_badge_creator(attendees).each do |x|\n puts x\n end\n assign_rooms(attendees).each do |x|\n puts x\n end\nend", "def printer(array)\n batch_badge_creator(array).each do |line|\n puts line\n end\n assign_rooms(attendees).each do |room|\n puts room\n end\nend", "def printer(name)\n #iterate through the batch_badge_creator array and puts current element\n batch_badge_creator(name).each do |attendee|\n puts attendee\n end\n #iterate through the assign_rooms array and puts current element\n assign_rooms(name).each do |room|\n puts room\n end\nend", "def printer(attendees)\n batch_badge_creator(attendees).each do |badge|\n puts badge\n end\n\n assign_rooms(attendees).each do |assignment|\n puts assignment\n end\nend", "def printer(attendees)\n batch_badge_creator(attendees).each do |badge|\n puts badge\n end\n\n assign_rooms(attendees).each do |assignment|\n puts assignment\n end\nend", "def printer(attendees)\n batch_badge_creator(attendees).each do |badge|\n puts badge\n end\n\n assign_rooms(attendees).each do |assignment|\n puts assignment\n end\nend", "def printer(attendees)\n attendees.each_with_index do |name, index|\n puts \"#{badge_maker(name)}\"\n puts \"Hello, #{name}! You'll be assigned to room #{index + 1}!\"\n end\nend", "def batch_badge_creator(speaker_array)\n speaker_badge_array = []\n speaker_array.each do |speaker| speaker_badge_array << badge_maker(speaker)\n end\n speaker_badge_array\nend", "def batch_badge_creator(attendees)\n greet_attendees = []\n attendees.each do |name|\n greet_attendees.push(\"Hello, my name is #{name}.\")\n end\n return greet_attendees\nend", "def assign_rooms(names)\n names.each_with_index.collect {|name, i| \"Hello, #{name}! You'll be assigned to room #{i+1}!\"}\nend", "def assign_rooms(speakers)\n new_arr = []\n speakers.each_with_index do |name, i |\n new_arr << \"Hello, #{name}! You'll be assigned to room #{i + 1}!\"\n end\n new_arr\nend", "def printer(attendees)\n batch_badge_creator(attendees).each do |badge|\n puts badge\n end\n assign_rooms(attendees).each do |assignment|\n puts assignment\n end\nend", "def printer(attendees)\n batch_badge_creator(attendees).each do |badge|\n puts badge\n end\n assign_rooms(attendees).each do |assignment|\n puts assignment\n end\nend", "def assign_rooms (names_list)\n\t\nmsgs_with_room_no=names_list.collect.each_with_index{ |current_name,current_index| \"Hello, #{current_name}! You'll be assigned to room #{current_index+1}!\"\n }\n\t\nend", "def batch_badge_creator(participants)\n\tnew_list = []\n\tparticipants.each do |individual|\n\t\tnew_list << badge_maker(individual)\n\tend\n\treturn new_list\nend", "def printer(guests)\n batch_badge_creator(guests).map{|greeting| puts greeting}\n assign_rooms(guests).map{|room_assignment| puts room_assignment}\nend", "def printer (attendees)\n\tbatch_badge_creator(attendees).each { |attendee| puts attendee}\n\tassign_rooms(attendees).each {|attendee| puts attendee}\nend", "def printer(speakers)\n badges = batch_badge_creator(speakers)\n rooms = assign_rooms(speakers)\n\n badges.each_with_index {|badge,index| puts badge}\n rooms.each_with_index {|room,index| puts room}\n # badges.each_with_index {|badge,index| puts badge[index]}\n # rooms.each_with_index {|room,index| puts room[index]}\nend", "def batch_badge_creator(attendees)\nbadges = []\nattendees.each do |name|\n badges.push(\"Hello, my name is #{name}.\")\nend\nreturn badges\nend", "def assign_rooms(names)\n assignments = []\n rooms = (1..7).to_a\n names.each_with_index do |name, index|\n assignments.push(\"Hello, #{name}! You'll be assigned to room #{rooms[index]}!\")\n end\n return assignments\nend", "def batch_badge_creator(speaker_list)\n badge_messages = []\n speaker_list.each {|name| badge_messages << \"Hello, my name is #{name}.\"}\n badge_messages\nend", "def printer(speakers)\n batch_badge_creator(speakers).each do |badge|\n puts badge\n end\n assign_rooms(speakers).each do |room|\n puts room\n end\nend", "def printer(names_array)\n badges_array = batch_badge_creator(names_array)\n room_assignments_array = assign_rooms(names_array)\n\n num_attendees = names_array.count\n\n for i in (0...num_attendees) do\n puts badges_array[i]\n puts room_assignments_array[i]\n end\n\nend", "def batch_badge_creator(adendees)\n badges = []\n adendees.each do |atendee|\n badges.push(\"Hello, my name is #{atendee}.\")\n end\n return badges\nend", "def printer(array_speakers)\n batch_badge_creator(array_speakers).each {|badge| puts badge}\n assign_rooms(array_speakers).each {|room| puts room}\nend", "def printer(attendees)\n badge_print = batch_badge_creator(attendees)\n badge_print.each { |badge| puts badge }\n \n room_print = assign_rooms(attendees)\n room_print.each { |room| puts room }\nend", "def batch_badge_creator(speakers)\n #empty array that needs strings pushed in\n badge_messages = []\n #iteration of speakers array to grab each name\n speakers.each do |guest|\n badge_messages.push(\"Hello, my name is #{guest}.\") # once a name is grabbed it creates a string using that name\n end\n badge_messages\nend", "def batch_badge_creator (names_list)\nlist_badge_msgs=names_list.collect {|name| badge_maker(name)}\nend", "def assign_rooms(attendees)\nattendees = [\"Edsger\", \"Ada\", \"Charles\", \"Alan\", \"Grace\", \n \"Linus\", \"Matz\"]\n\nhash = Hash.new\n attendees.each_with_index { |name, index|\n hash[index] = index\n }\n return room_assignments\nend", "def assign_rooms(speaker)\nspeaker.collect.with_index(1) do |name, index|\n \"Hello, #{name}! You'll be assigned to room #{index}!\"\n #binding .pry\nend \nend", "def assign_rooms(array)\n new_array = []\n counter = 1\n array.each do |name|\n new_array.push(\"Hello, #{name}! You'll be assigned to room #{counter}!\")\n counter += 1\nend\n return new_array\nend", "def batch_badge_creator(names)\n names.map{|name| badge_maker(name)}\nend", "def assign_rooms(array)\n array.map.with_index do |name, i|\n \"Hello, #{name}! You'll be assigned to room #{i+1}!\"\n end\nend", "def assign_rooms(names)\n assignments = [];\n names.each_with_index { |name, index| assignments.push(\"Hello, #{name}! You'll be assigned to room #{index + 1}!\")};\n return assignments;\nend", "def batch_badge_creator(array)\n #empty array\n list = []\n #iterate over the names in array\n array.each do |name|\n #add the badge messages to the list array\n list << badge_maker(name)\n end\n #return list\n list\n#end of batch_badge_creator method\nend", "def assign_rooms(array_of_names)\n room_assignments = []\n\n array_of_names.each_with_index do |name, index|\n room_assignments << \"Hello, #{name}! You'll be assigned to room #{index + 1}!\"\n end\n\n room_assignments\nend", "def assign_rooms(array)\n\tempty_array = []\n\tarray.each_with_index do |value, index|\n\t\tempty_array << \"Hello, #{value}! You'll be assigned to room #{index + 1}!\"\n\tend\n\treturn empty_array\nend", "def batch_badge_creator(array)\n new_array = array.map { |e| \"Hello, my name is #{e}.\" }\nend", "def assign_rooms(names_array)\n room_assignments_array = []\n\n names_array.each_with_index { | name, index |\n room_assignments_array.push(room_assignment_maker(name, index))\n }\n\n room_assignments_array\nend", "def batch_badge_creator(array)\n array.collect do |name|\n badge_maker(name)\n end\nend", "def assign_rooms(names)\n room_array = [ ]\n names.each_with_index do |name, index|\n room = index + 1\n room_array << \"Hello, #{name}! You'll be assigned to room #{room}!\" \n end \n return room_array\nend", "def printer(array)\n badges = batch_badge_creator(array)\n rooms = assign_rooms(array)\n badges.each{|badge| puts badge}\n rooms.each{|room| puts room}\nend", "def batch_badge_creator(names)\n names.map {|n| badge_maker(n)}\nend" ]
[ "0.75163215", "0.75001603", "0.73796827", "0.73795754", "0.73230606", "0.73137933", "0.7293626", "0.72384405", "0.72338235", "0.7229176", "0.7165625", "0.71607494", "0.7152063", "0.7148399", "0.7143087", "0.7104131", "0.70906377", "0.70848817", "0.7082804", "0.7052302", "0.70314723", "0.6956102", "0.693373", "0.68939394", "0.6886466", "0.688454", "0.6837287", "0.67930645", "0.6792036", "0.6759439", "0.6751794", "0.67390734", "0.6735555", "0.67309314", "0.67232627", "0.67151105", "0.66916007", "0.66825694", "0.6668836", "0.66685754", "0.66570985", "0.6646658", "0.66409534", "0.6586403", "0.6581783", "0.65782976", "0.6532038", "0.6521031", "0.6517871", "0.65132725", "0.6491617", "0.64577264", "0.64446205", "0.644114", "0.6433458", "0.6409883", "0.6377679", "0.6358917", "0.6356416", "0.6355009", "0.6353338", "0.6353338", "0.6353338", "0.6352112", "0.63380927", "0.6319193", "0.63179976", "0.63171196", "0.62983483", "0.62983483", "0.62952167", "0.62694323", "0.62617236", "0.6239678", "0.62384975", "0.62383354", "0.6237513", "0.6228369", "0.62062174", "0.61994517", "0.6198642", "0.6174285", "0.6164652", "0.61345315", "0.6120112", "0.6088097", "0.60676664", "0.601725", "0.60111237", "0.60022825", "0.599865", "0.595835", "0.5943439", "0.5943347", "0.5930021", "0.591765", "0.58970314", "0.5877019", "0.58641297", "0.586298" ]
0.6781814
29
set the status for the given node
def status @node.status = request.raw_post @node.save! render :text => @node.status end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setNodeON(node_id)\n cm_url = APP_CONFIG['cm_ip'] + ':' + APP_CONFIG['cm_port'].to_s\n\n options = {body: {state:\"on\"}.to_json, :headers => { 'Content-Type' => 'application/json' }}\n res = HTTParty.put(cm_url+\"/resources/node/\"+ node_id, options)\n \n end", "def set_status(val)\n self.status = val\n self\n end", "def status=(value)\n value = value.to_sym\n raise Deployment::InvalidArgument.new self, 'Invalid node status!', value unless ALLOWED_STATUSES.include? value\n @status = value\n end", "def set_status(target, status)\n target.status = status\n end", "def set_status\n self.status = 1\n end", "def setNodeOFF(node_id) \n cm_url = APP_CONFIG['cm_ip'] + ':' + APP_CONFIG['cm_port'].to_s\n\n options = {body: {state:\"off\"}.to_json, :headers => { 'Content-Type' => 'application/json' }}\n res = HTTParty.put(cm_url+\"/resources/node/\"+ node_id, options)\n \n end", "def change_status(object, status)\n object.update_attribute :status, status\n end", "def set_event_status( event, status )\n event.instance_exec( namespace ) { |namespace| set_status(status, namespace) }\n end", "def status=(new_status)\n update_values([:status] => new_status)\n end", "def mark_as(status)\n self.status = status\n self.save\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(value)\n @status = value\n end", "def status=(status); end", "def status=(value)\n write_attribute :status, value.to_i\n end", "def status=(new_status)\n self.last_status = status\n\n self.setValue new_status, forKey: 'status'\n end", "def set_status\n self.status = Status.find_by(code: \"OPEN\")\n end", "def set_status!(status)\n ## FIXME_NISH Fix this.\n ## FIXED\n update(status: (status == 'true'))\n end", "def set_status(user_id, status)\n user_id = user_id.to_s\n validate_identifier(:user_id, user_id)\n call_myspace_api(:status_put, :user_id => user_id, :body => {:status => status})\n end", "def setStatus(status)\r\n\t\t\t\t\t@status = status\r\n\t\t\t\tend", "def set_node(val)\n self.node = val\n self\n end", "def set_node(val)\n self.node = val\n self\n end", "def set_status(user_id, status)\r\n user_id = user_id.to_s\r\n @context.validate_identifier(:user_id, user_id)\r\n @context.call_myspace_api(:status_put, :user_id => user_id, :body => {:status => status})\r\n end", "def status=(val)\n if val.nil?\n delete_element('status')\n else\n replace_element_text('status', val)\n end\n end", "def status=(status)\n @_status = status\n end", "def status=(status)\n @_status = status\n end", "def change_status(status)\n unless @status == status\n @status = status\n @start_time = Time.now\n end\n end", "def set_Status(value)\n set_input(\"Status\", value)\n end", "def set_Status(value)\n set_input(\"Status\", value)\n end", "def set_status(new_status)\n profile_page = get_profile_page\n activity_hash = profile_page.at('#activityhash')['value'] rescue nil\n \n #Referer MUST be \"/profile.php\"\n ajax_post(profile_url, :setactivity => new_status.to_s, :activityhash => activity_hash).inspect\n end", "def update_status status\n @job.set({\n custom_status: status,\n pinged_at: Time.now\n })\n end", "def change_status(status)\n #not implemented \n end", "def set_status\n @status= {\n 'Unstarted' => 0,\n 'In Progress' => 1,\n 'Completed' => 2\n }\n end", "def update_status(status)\n @status = status\n @last_status_change = Time.now\n update_statusfile\n end", "def node=(node)\n @node = node\n end", "def set_status\n @status = Status.find(params[:id])\n end", "def set_status\n @status = Status.find(params[:id])\n end", "def set_status\n @status = Status.find(params[:id])\n end", "def set_status\n @status = Status.find(params[:id])\n end", "def set_status\n @status = Status.find(params[:id])\n end", "def set_status\n @status = Status.find(params[:id])\n end", "def set_status\n @status = Status.find(params[:id])\n end", "def set_status\n @status = Status.find(params[:id])\n end", "def set_status\n @status = Status.find(params[:id])\n end", "def set_status\n @status = Status.find(params[:id])\n end", "def set_status\n @status = Status.find(params[:id])\n end", "def set_status\n @status = Status.find(params[:id])\n end", "def set_status\n @status = Status.find(params[:id])\n end", "def set_status\n @status = Status.find(params[:id])\n end", "def set_status\n @status = Status.find(params[:id])\n end", "def setstatus!(status)\n if status == \"admin\"\n self.userstatus_id = 1\n elsif status == \"pro\"\n self.userstatus_id = 2\n elsif status == \"full\"\n self.userstatus_id = 3\n else\n self.userstatus_id = 4\n end\n end", "def node=(node)\n retract[:node] = node\n end", "def status_enum=(status)\n write_attribute(:status, status)\n end", "def status=(value)\n if value.nil?\n write_attribute :state, nil\n else\n write_attribute :state, value.to_s\n end\n end", "def set_node\n @node = Node.find(params[:id])\n end", "def set_node\n @node = Node.find(params[:id])\n end", "def set_node\n @node = Node.find(params[:id])\n end", "def status=(status)\n raise ArgumentError unless status.is_a?(Status)\n end", "def set_status(status, status_message = '')\n return if status == Algorithm.statuses[self.status]\n self.update_attributes(status: status)\n self.update_attributes(status_message: status_message)\n self.update_attribute(:diva_id, nil) if self.status == 'review' || self.status == 'unpublished_changes'\n self.update_version if self.status == 'published'\n end", "def on_change_status(&block)\n self.actions[:change_status] = block\n end", "def node=(val)\n attributes['node'] = val\n end", "def node=(val)\n attributes['node'] = val\n end", "def status=(status)\n @content[0] = status\n end", "def set_node\r\n @node = Node.find(params[:id])\r\n end", "def set_entity_status\n @entity_status = EntityStatus.find(params[:id])\n end", "def status=(v)\n @status = alma_string v\n end", "def set_status\n @status ||= Status.find(params[:id])\n end", "def update_status(new_status)\n raise ArgumentError.new\"Invalid Status\" unless new_status == :AVAILABLE || new_status == :UNAVAILABLE\n @status = new_status\n end", "def status=(newstatus)\n @log.debug(\"New status: '#{newstatus}'.\")\n puts \"Status: '#{newstatus}'.\"\n win_main = Knj::Gtk2::Window.get(\"main\")\n \n if win_main\n win_main.gui[\"statusbar\"].push(0, newstatus)\n end\n end", "def setStatusInfo(name, value)\n @device.setStatusInfo(name,value) ;\n end", "def set_node_attributes(job)\n return if node_attributes.empty?\n\n nodes.concurrent_map do |node|\n node.reload\n\n node_attributes.each do |attribute|\n key, value, options = attribute[:key], attribute[:value], attribute[:options]\n\n if options[:toggle]\n original_value = node.chef_attributes.dig(key)\n\n toggle_callbacks << ->(job) {\n message = if original_value.nil?\n \"Toggling off node attribute '#{key}' on #{node.name}\"\n elsif !options[:force_value_to].nil?\n \"Forcing node attribute to '#{options[:force_value_to]}' on #{node.name}\"\n else\n \"Toggling node attribute '#{key}' back to '#{original_value.inspect}' on #{node.name}\"\n end\n job.set_status(message)\n value_to_set = options[:force_value_to].nil? ? original_value : options[:force_value_to]\n node.set_chef_attribute(key, value_to_set)\n node.save\n }\n end\n\n job.set_status(\"Setting node attribute '#{key}' to #{value.inspect} on #{node.name}\")\n node.set_chef_attribute(key, value)\n end\n\n node.save\n end\n end", "def status=(int)\n if not STATUSE.select { |k, v| v == int }\n raise Fatal::InvalidArgumentError.code(13002)\n end\n @status = int\n end", "def status=(new_status)\n payload = {\n 'user' => {\n 'content' => {\n 'status' => new_status.to_s.upcase,\n 'userRoles' => @json['user']['content']['userRoles']\n },\n 'links' => {\n 'self' => uri\n }\n }\n }\n\n @json = client.post(\"/gdc/projects/#{project_id}/users\", payload)\n end", "def update_status(status)\n @metric_status = status if STATUSES[status] > STATUSES[@metric_status]\n end", "def set_node\n @node = ::Node::Node.find(params[:id])\n end", "def update(node); end", "def set_status\n @status = Status.find(params[:id])\n end", "def set_status\n @status = Status.find(params[:id])\n end", "def set_status\n @status = Status.find(params[:id])\n end" ]
[ "0.72162616", "0.70840734", "0.7028983", "0.69587994", "0.6822384", "0.6748916", "0.66424096", "0.6572275", "0.6521027", "0.65065444", "0.64727414", "0.6472161", "0.6472161", "0.6472161", "0.6472161", "0.6472161", "0.6472161", "0.6472161", "0.6472161", "0.6472161", "0.6472161", "0.6472161", "0.6472161", "0.6472161", "0.6472161", "0.6472161", "0.6472161", "0.6472161", "0.6472161", "0.6472161", "0.6472161", "0.6472161", "0.6472161", "0.6472161", "0.64548945", "0.6432457", "0.6422501", "0.64061725", "0.6400279", "0.6398518", "0.63912153", "0.6379944", "0.6379944", "0.6368035", "0.6305679", "0.6284623", "0.6284623", "0.6247952", "0.62429804", "0.62429804", "0.6229407", "0.62154675", "0.6208255", "0.62076044", "0.6191734", "0.6141807", "0.6094999", "0.6094999", "0.6094999", "0.6094999", "0.6094999", "0.6094999", "0.6094999", "0.6094999", "0.6094999", "0.6094999", "0.6094999", "0.6094999", "0.6094999", "0.6094999", "0.60705906", "0.6025878", "0.59903586", "0.59835255", "0.59785795", "0.5972679", "0.5972679", "0.5972679", "0.5966448", "0.596286", "0.59514517", "0.59506905", "0.59506905", "0.5939189", "0.59248453", "0.5923317", "0.59117377", "0.59028983", "0.58898085", "0.5886358", "0.58847463", "0.58814365", "0.58569926", "0.58355623", "0.58314556", "0.5816943", "0.58145696", "0.5805157", "0.5805157", "0.5805157" ]
0.6145385
55
uppercase letters, no repeats). Consider the substrings of the string: consecutive sequences of letters contained inside the string. Find the longest such string of letters that is a palindrome. Note that the entire string may itself be a palindrome. You may want to use Array's `slice(start_index, length)` method, which returns a substring of length `length` starting at index `start_index`: "abcd".slice(1, 2) == "bc" "abcd".slice(1, 3) == "bcd" "abcd".slice(2, 1) == "c" "abcd".slice(2, 2) == "cd" Difficulty: hard.
def palindrome?(string) i = 0 while i < string.length if string[i] != string[(string.length - 1) - i] return false end i += 1 end return true end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def longest_palindrome(string)\n\ti = 0\n\tlongest = \"\"\n\twhile i < string.length\n\t\tcurrent_palindrome = \"\"\n\t\tj = 1\n\t\twhile j <= string.length\n\t\t\tif palindrome?(string.slice(i,j))\n\t\t\t\tcurrent_palindrome = string.slice(i,j)\n\t\t\tend\n\n\t\t\tif longest == \"\" || current_palindrome.length > longest.length\n\t\t\t\tlongest = current_palindrome\n\t\t\tend\t\n\t\t\tj += 1\n\t\tend\n\t\ti += 1\n\tend\n\tlongest\nend", "def longest_palindrome(string)\n string_arr = string.split(\"\")\n longest = 0\n substring = \"\"\n\n while string_arr.length > 0\n string_arr.each do |char|\n substring << char\n palindrome?(substring) && substring.length > longest ? longest = substring.length : next\n end\n\n string_arr.shift\n substring = \"\"\n end\n \n longest\nend", "def longest_palindrome(string)\n # Create a variable to store our results to be returned\n best_palindrome = nil\n \n # Loop through our string\n i = 0\n while i < string.length # to go through each letter in the string\n # create local variable\n length = 1 # assign first length to 1\n \n # Create second loop\n while (i + length) <= string.length # to slice out out the substring 0 + 1 <= 5\n # local variable\n substring = string.slice(i, length) # to define the substring string.slice(0, 1) => ab\n \n if palindrome?(substring) && (best_palindrome == nil || substring.length > best_palindrome.length)\n best_palindrome = substring\n end\n # substring becomes new best_palindrome if it meets 2 requirements:\n # 1. it is a palindrome\n # 2. it's (nil = to the first substring as best_palindrome) OR new substring is longer than current logest substring.\n \n length += 1 # increment length of substring at each starting index. (i.e. a, ab, abc, abcd, etc...)\n end\n \n i += 1 # then increment i to set each letter as startign index. (i.e. abcd, bcbd, cbd, etc...)\n end\n \n return best_palindrome\n end", "def longest_palindrome(string)\n if string.length <= 1\n return string\n end\n\n i = 0\n string.length.downto(1).each do |length|\n # .. is inclusive\n (0..i).each do |start_index|\n if palindrome?(string.slice(start_index, length))\n return string.slice(start_index, length)\n end\n end\n\n i += 1\n end\n\n string[0]\nend", "def longest_palindrome(string)\n for substring_length in string.length...3\n for check in 0..(string.length - sub_string)\n possible_palindrome = string[check...check+substring_length]\n if palindrome(possible_palindrome)\n return possible_palindrome\n end\n end\n end\n return false \nend", "def longest_palindrome(string)\n best_palindrome_start = 0\n best_palindrome_len = 0\n\n 0.upto(string.length - 1).each do |start|\n # micro-optimization: don't look at substrings shorter than best\n # palindrome.\n len = best_palindrome_len + 1\n while start + len <= string.length\n if is_palindrome?(string, start, len)\n best_palindrome_start, best_palindrome_len = start, len\n end\n\n len += 1\n end\n end\n\n [best_palindrome_start, best_palindrome_start + best_palindrome_len - 1]\nend", "def longest_palindrome(string)\n\tcurrent_longest = \"\"\n\tif string.length < 3\n\t\treturn false\n\tend\n\t#This represents the overarching iteration through the string. \n\t#As a string must be at least length 3 to be a palindrome, no search is necessary\n\t#for the last 2 letters of the string\n\tfor x in 0...string.length-2 do\n\t\t#Note: Need to ensure that every matching letter is checked\n\t\t#First part of conditional removes spaces from consideration for palindrome-ness\n\t\tif string[x] != \" \" && string.slice(x+1...(string.length)).include?(string[x])\n\t\t\tsubstring = string.slice(x..(string.rindex(string[x])))\n\t\t\tif Palindrome?(substring) && substring.length > current_longest.length\n\t\t\t\tcurrent_longest = substring\n\t\t\t\t#If the longest palindrome is the entire string itself, return it\n\t\t\t\tif current_longest.length == string.length\n\t\t\t\t\treturn current_longest\n\t\t\t\tend\n\t\t\tend\n\t\t\t#Check the substring, without the last matched letter for palindrome\n\t\t\t#This is to account for second matched letters, but wastes runtime on\n\t\t\t#Some unnecessary operations\n\t\t\tsub_test = longest_palindrome(string.slice(x...(string.rindex(string[x]))))\n\t\t\tif sub_test && sub_test.length > current_longest.length\n\t\t\t\tcurrent_longest = sub_test\n\t\t\tend\n\t\tend\n\tend\n\treturn current_longest.length > 0 ? current_longest : false\nend", "def longest_palindrome(str)\n return 0 if str.empty?\n\n arr = str.chars\n substrings = []\n length = 1\n loop do\n arr.each_cons(length) { |set| substrings << set }\n length += 1\n break if length > arr.length\n end\n substrings.select { |set| set == set.reverse }.max_by(&:length).length\n p substrings\nend", "def longest_palindrome(string)\n string.length.downto(3) do |length|\n string.chars.each_cons(length) do |substr_arr|\n return substr_arr.length if palindrome?(substr_arr.join)\n end\n end\n false\nend", "def longest_palindrome(string)\n longest = \"\"\n i = 0\n j = 0\n\n while i < string.length\n while j < string.length\n if is_palindrome?(string[i..j]) && string[i..j].length > longest.length\n longest = string[i..j]\n end\n j += 1\n end\n i += 1\n j = i + 1\n end\n\n longest\nend", "def longest_palindrome(string)\n longest = ''\n i = 0\n while i < string.length\n j = 1\n while (i + j) <= string.length\n x = string.slice(i, j)\n if (x == x.reverse) && (x.length > longest.length)\n longest = x\n end\n j += 1\n end\n i += 1\n end\n if longest.length == 1\n return false\n end\n longest.length\n\nend", "def longest_palindrome(s)\n result = ''\n \n for i in (0..s.length-1) do\n for j in (i..s.length-1) do \n result = s[i..j] if s[i..j].length > result.length && s[i..j] == s[i..j].reverse\n end\n end\n \n result\nend", "def longest_palindrome(s)\n return '' if s == ''\n arr = s.chars\n\n s.length.downto(1) do |char|\n palindrome = arr.each_cons(char).find { |tested| tested == tested.reverse }\n return palindrome.join if palindrome\n end\nend", "def longest_palindrome(string)\n largest = ''\n (0...string.length).each do |i|\n (i + 1...string.length).each do |j|\n sub_str = string[i..j]\n if sub_str == sub_str.reverse\n if sub_str.length > largest.length\n largest = sub_str\n end\n end\n end\n end \n largest\nend", "def longestPalSubstr(str)\n maxLength = 1\n start = 0\n length = str.size\n low = 0\n high = 0\n \n # One by one consider every character as center point of \n # even and length palindromes\n for i in 1..length\n # Find the longest even length palindrome with center\n # points as i-1 and i.\n low = i - 1\n high = i\n while (low >= 0) and (high < length) and (str[low] == str[high]) do\n if high - low + 1 > maxLength then\n start = low\n maxLength = high - low + 1\n end\n low -= 1\n high += 1\n end\n \n # Find the longest odd length palindrome with center \n # point as i\n low = i - 1\n high = i + 1\n while (low >= 0) and (high < length) and (str[low] == str[high]) do\n if (high - low + 1) > maxLength then\n start = low\n maxLength = high - low + 1\n end\n low -= 1\n high += 1\n end\n end\n \n # puts \"Longest palindrome substring is: start: #{start} maxLength: #{maxLength}\"\n # puts str[start, maxLength]\n \n return str[start, maxLength]\n end", "def longest_palindrome(string)\n longest_palindrome = false\n i = 0\n\n while i < string.length - 1\n j = i + 1\n\n while j < string.length\n curr_string = string[i..j]\n len = curr_string.length\n\n if is_palindrome?(curr_string)\n longest_palindrome = len if !longest_palindrome || len > longest_palindrome\n end\n\n j += 1\n end\n\n i += 1\n end\n\n longest_palindrome\nend", "def longest_palindrome(string)\n longest_palindrome = false\n i = 0\n\n while i < string.length - 1\n j = i + 1\n\n while j < string.length\n curr_string = string[i..j]\n len = curr_string.length\n\n if is_palindrome?(curr_string)\n longest_palindrome = len if !longest_palindrome || len > longest_palindrome\n end\n\n j += 1\n end\n\n i += 1\n end\n\n longest_palindrome\nend", "def naive_longest_palindrome(s)\n longest = s[0]\n s.each_char.with_index do |ch_i, i|\n s.each_char.with_index do |ch_j, j|\n next if j < i\n possible_pal = s[i..j]\n if is_palindrome?(possible_pal)\n longest = possible_pal.length > longest.length ? possible_pal : longest\n end\n end\n end\n longest\nend", "def longest_palindrome(string)\n \nend", "def longest_palindrome(string)\n longest = \"\"\n len = 2\n \n while len < string.length\n i = 0\n while i+len <= string.length\n if is_palindrome?(string[i...i+len])\n longest = [i, i+len-1]\n end\n i += 1\n end\n len += 1\n end\n\n longest\nend", "def find_longest_palindrome(string)\n l = string.size\n l.downto(0) { |i|\n 0.upto(l) { |j|\n s = string.slice(j,i)\n return s if s == s.reverse && s.size > 1\n }\n }\nend", "def longest_palindrome(string)\nend", "def longest_palindrome(str)\r\n pal = \"\"\r\n (0..str.size).each do |x|\r\n i = 1\r\n while (i < (str.size-x)) do\r\n\t if str[x,i].reverse == str[x,i]\r\n\t pal = str[x,i] if pal.size < str[x,i].size\r\n\t end\r\n\t i+= 1\r\n\tend\r\n end\r\n pal\r\nend", "def longest_palindrome(str)\n \nend", "def longest_palindrome(string)\n\n palindromes = []\n\n (0...string.length).each do |i|\n (i+1...string.length).each do |j|\n sub = string[i..j]\n palindromes << sub if sub == sub.reverse\n end\n end\n\n palindromes.sort_by(&:length).last \n\nend", "def longest_palindrome(string)\n\nend", "def longest_palindrome(string)\n\nend", "def longest_palindrome(string)\n\nend", "def longest_pali(string)\n letters = string.gsub(/\\W+/,'').split('')\n combos = []\n idx = 2\n while idx <= letters.length\n combos << letters.combination(idx).to_a\n idx += 1\n end\n longest = nil\n combos.flatten(1).each do |x|\n if (palindrome?(x.join(''))) && (longest == nil || x.length > longest.length)\n longest = x\n end\n end\n \n return longest.join('')\nend", "def longest_pallimdromic_substr(str)\n maxlen = 1\n start = 0\n n = str.length\n for i in 1...n\n\n # even Case\n low = i-1\n hi = i\n while(low >= 0 && hi < n && str[low] == str[hi])\n if(hi-low+1 > maxlen)\n maxlen = hi - low + 1\n start = low\n end\n low -= 1\n hi += 1 \n end\n \n\n # odd Case\n low = i-1\n hi = i + 1\n while(low >= 0 && hi < n && str[low] == str[hi])\n if(hi-low+1 > maxlen)\n maxlen = hi - low + 1\n start = low\n end\n low -= 1\n hi += 1\n end \n end\n\n puts \"maxlength is #{maxlen}\"\n puts \"string is #{str[start..maxlen]}\" \nend", "def longest_palindrom(str)\n return str if str.length <= 1\n\n result = ''\n (0...str.length - 1).each do |i|\n ((i + 1)...str.length).each do |j|\n substr = str[i, j]\n result = substr if is_palindrom?(substr) && (result.length < substr.length)\n end\n end\n result\nend", "def longest_palindrome(str)\n\nend", "def longest_parlindrom_substring string\n return string if string.length < 2\n\n length = string.length\n memo = {}\n\n 1.upto(length) do |sub_length|\n 0.upto(length - sub_length) do |start_i|\n\n last_i = start_i + sub_length - 1\n indice = [start_i, last_i]\n\n next unless string[start_i] == string[last_i]\n\n memo[indice] = 1 if start_i == last_i\n memo[indice] = 2 if start_i + 1 == last_i\n\n indice_of_sub_substring = [start_i+1, last_i-1]\n \n if memo.has_key?(indice_of_sub_substring)\n memo[indice] = 2 + memo[indice_of_sub_substring]\n end\n end\n end\n indice = memo.key(memo.values.max)\n string[indice[0]..indice[1]]\nend", "def longest_palindrome(s)\n return s if s.length == 1\n previous = s[0]\n\n all_same = true\n z = 0\n counter = 0\n while z < s.length\n if s[z] == previous\n counter += 1\n else\n tmp_longest = previous * counter\n longest = tmp_longest if tmp_longest.length > longest.length\n previous = s[z]\n counter = 1\n break if counter > s.length / 2\n end\n z += 1\n end\n return previous * counter if all_same || counter > s.length / 2\n\n longest = ''\n i = 0\n while i < s.length\n starting_letter = s[i]\n matching_hash = {}\n matching_hash[i] = starting_letter\n\n j = i + 1\n while j < s.length\n if s[j] == starting_letter\n matching_hash[j] = s[j]\n end\n j += 1\n end\n\n i += 1\n\n indices = matching_hash.keys\n next if indices.length == 1\n next if (indices[-1] - indices[0]) < longest.length\n\n first_index = matching_hash.first[0]\n\n k = 0\n valid_sequence = true\n while k < (indices.length / 2 - 1)\n first_dist = indices[k + 1] - indices[k]\n second_dist = indices[indices.length - 1 - k] - indices[indices.length - 2 - k]\n if first_dist != second_dist\n valid_sequence = false\n break\n end\n k += 1\n end\n\n next if !valid_sequence\n\n matching_hash.delete_if {|key,v| (key != first_index) && (key - first_index < longest.length)}\n indices = matching_hash.keys\n\n n = 1\n last_index = indices[-1]\n running_str = ''\n while first_index + n < ((last_index - first_index) / 2)\n matching_hash.each do |key, v|\n if key == first_index\n running_str = matching_hash[key] << s[key + n]\n else\n checking_str = matching_hash[key] << s[key - n]\n matching_hash.delete(key) if checking_str != running_str\n end\n end\n n +=1\n end\n\n matching_hash.values.each do |value|\n longest = value if value.length > longest.length\n end\n end\n puts \"longest: #{longest}\"\n return longest\nend", "def longest_palindrome(string)\n length = string.length\n while true\n (0..(string.length - length)).each do |i|\n word = string[i...(i + length)]\n return [i, (i + length - 1)] if is_palindrome?(word)\n end\n length -= 1\n end\nend", "def longest_palindrome(s)\n left = 0\n right = 0\n for i in 0...s.length do\n odd = length_from_center(s, i, i)\n even = length_from_center(s, i, i + 1)\n longest = [odd, even].max\n if longest > right - left\n left = i - (longest - 1) / 2\n right = i + longest / 2\n end\n end\n return s[left..right]\n end", "def palindrome_substrings(string)\n\nend", "def str_palindromes(str)\n arr_palindromes = arr_all_possible_substrings(str).select { |substr| palindrome?(substr)}\nend", "def palindromes(string)\n results = substrings(string)\n results.select {|substring| substring == substring.reverse && substring.length > 1}\nend", "def palindrome_substrings(str)\n substrings(str).select {|sub| sub if sub.length > 1 && palindrome?(sub)}\nend", "def longest_palindrome s\n return 0 if s.empty?\n (s.length).downto(1).each do | i |\n s.split(\"\").each_cons(i).map do |c|\n return c.count if c == c.reverse\n end\n end\nend", "def palindromes(string)\n all_substrings = all_substrings(string)\n palindromes = []\n all_substrings.each do |substring|\n palindromes << substring if substring == substring.reverse && substring.length > 1\n end\n palindromes\nend", "def palindromes(string)\n substrings(string).select { |el| el.length > 1 && el == el.reverse && el.match(/[a-zA-Z1-9]/) }\nend", "def shortest_palindrome(string)\n return string if palindrome?(string)\n\n for i in 0...string.length\n rev = string.length - i\n suffix = string[(rev - 1)...string.length]\n rev_suffix = suffix.reverse\n\n new_string = rev_suffix + string\n return new_string if palindrome?(new_string)\n end\nend", "def length_of_longest_substring(str)\n arr = str.chars\n lengths = []\n sub_str = []\n arr.each do |letter|\n\n if sub_str.include?(letter)\n lengths << sub_str.length\n idx = sub_str.index(letter)\n sub_str = sub_str[idx + 1..-1]\n end\n sub_str << letter \n end\n lengths << sub_str.length\n lengths.max\nend", "def longest_palindrome(s)\n max = \"\"\n 0.upto(s.size - 1).each { |i|\n odd = palindrome(s, i, i)\n even = palindrome(s, i, i + 1)\n \n return s if odd.size == s.size || even.size == s.size\n max = odd if odd.size > max.size\n max = even if even.size > max.size\n }\n\n max \nend", "def palindromes(string)\n array = substrings(string)\n array.select { |sub| sub == sub.reverse }\nend", "def longest_palindrome(str)\n result = [0, \"\"]\n for i in (0..str.length - 1)\n p1 = expand(str, i, i) \n result = result[0] > p1.length ? result : [p1.length, p1]\n p2 = expand(str, i, i + 1) \n result = result[0] > p2.length ? result : [p2.length, p2]\n end\n result[1]\nend", "def palindromes(string)\n all_substrings = substrings(string)\n results = []\n all_substrings.each do |substring|\n results << substring if palindrome?(substring)\n end\n results\nend", "def findWholePalindrome(str, center)\n if str[center] == str[center-1]\n i = center\n j = center-1\n else\n i = center+1\n j = center-1\n end\n until j < 0 || i > str.length || str[j] != str[i]\n j-=1\n i+=1\n end\n return str[j+1..i-1]\nend", "def length_of_longest_substring(str) ## Does not pass LeetCode Test\n max_longest = ''\n current_longest = ''\n str.each_char do |letter|\n current_longest = \"\" if current_longest.index(letter)\n current_longest << letter \n max_longest = current_longest if current_longest.length > max_longest.length \n end\n max_longest.length \nend", "def palindromes(str)\n result = []\n array = substrings(str)\n array.each do |char|\n result << char if char.reverse == char && char.size > 1\n end\n result\nend", "def palindrome_substrings(str)\r\n result = []\r\n substrings_arr = substrings(str)\r\n substrings_arr.each do |substring|\r\n result << substring if is_palindrome?(substring)\r\n end\r\n result\r\nend", "def palindromes(str)\n results = []\n 1.upto(str.length) do |idx|\n str.chars.each_cons(idx)\n .each {|subr| results << subr.join if subr.size > 1 && subr == subr.reverse}\n end\n results.sort \nend", "def longest(string)\n sliced = string.chars.slice_when {|a,b| a > b}.to_a\n longest = sliced.max_by {|sub_arr| sub_arr.length}.join\nend", "def palindromes(str)\n substrings(str).select do |el|\n el == el.reverse && el.size > 1\n end\nend", "def palindrome_substrings(str)\n result = []\n substrings_arr = substrings(str)\n substrings_arr.each do |substring|\n result << substring if is_palindrome?(substring)\n end\n result\nend", "def palindrome(string)\n word_array = string.split(\"\")\n\n word_array.each_with_index do |letter,index|\n if letter[index] == letter[-index]\n return true\n end\n end\n end", "def longest_palindrome(string)\n pal_arr = get_palindromes(string)\n return false if pal_arr.empty?\n sorted_arr = pal_arr.sort_by {|el| el.length}\n \n sorted_arr[-1].length\n \n \nend", "def shortest_palindrome(str)\n # find longest palindromic prefix\n n = str.size\n rev = str.reverse\n\n (0...n).each do |i|\n return rev[0...i] + str if str[0...(n - i)] == rev[i...n]\n end\n\n ''\nend", "def palindromes(str)\n evaluated_words = substrings(str)\n evaluated_words.select { |char| char.size > 1 && char == char.reverse }\nend", "def palindrome_substrings(str)\n result = []\n\n all_substrings = substrings(str)\n all_substrings.select {|substring| substring.length > 1 && palindrome?(substring)}\nend", "def palindromes(string)\n result = []\n all_substrings = substrings(string)\n all_substrings.each do |item|\n result << item if item.size > 1 && item == item.reverse\n end\n result\nend", "def find_palindromes_right(s)\n outarray = []\n for x in (0..(s.length-1))\n outarray[x] = 1\n end\n for y in (s.length-1).downto(0)\n if matches = s.indexes(s[y])\n matches.each do |x|\n test_string = s[x..y]\n #puts \"x=#{x}, y=#{y}, substring = \\\"#{test_string}\\\"\"\n if test_string.palindrome?\n #puts \"Found palindrome: \\\"#{test_string}\\\"\"\n outarray[y] = test_string.length\n # Note that I will always find a palindrome at s[y..y] so x won't pass y\n break\n end\n end\n end\n end\n outarray\nend", "def palindrome_substrings(string)\narray = substrings(string)\npalindromes = []\n\n array.each do |ele|\n if palindrome?(ele) == true && ele.length > 1\n palindromes << ele\n end\nend\npalindromes\nend", "def palindrome_substrings(string)\n ss = substrings(string)\n results = ss.select do |ele|\n palindrome?(ele) && ele.length > 1\n end \n results \nend", "def palindrome?(str, len)\n pos = len - 1\n sequences = []\n while pos < str.length do\n chars = str[(pos - len + 1)..pos].chars\n j = 0\n palin = true\n while j < len / 2 do\n if chars[j] != chars[len - j - 1]\n palin = false\n break\n end\n j += 1\n end\n\n sequences << [chars.first, chars[1]] if palin && chars[0] != chars[1]\n pos += 1\n end\n sequences\n end", "def palindromes(str)\n substrings = substrings(str)\n substrings.select do |substring|\n palindrome?(substring)\n end\nend", "def palindromes(string) \n test_substrings = substrings(string)\n test_substrings.select do |substring|\n substring.length > 1 && substring.downcase.gsub(/[^A-Z0-9]/i, '') == substring.downcase.gsub(/[^A-Z0-9]/i, '').reverse\n end\nend", "def longest_palindrome_subseq(s)\n length = s.size\n dp = []\n\n length.times do |i|\n dp[i]= Array.new(length).fill(0);\n dp[i][i] = 1\n end\n puts \"dp #{dp}\"\n (2..length).each do |len|\n puts \"len #{len}\"\n (0..(length - len)).each do |i|\n j = i + len - 1\n # puts \"j #{j} => i:#{i} + len:#{len} - 1\"\n\n if s[i] == s[j]\n dp[i][j] = 2 + (len == 2 ? 0 : dp[i+1][j-1])\n else\n dp[i][j] = [dp[i+1][j], dp[i][j-1]].max\n end\n puts \"dp #{dp}\"\n end\n end\n\n dp[0][length - 1]\nend", "def longest(str)\n \n alphabet_substr = find_substr(str).select do |substr|\n substr.chars == substr.chars.sort\n end\n alphabet_substr.sort_by! { |str| str.length }\n\n longest = alphabet_substr.select { |str| str.length == alphabet_substr.last.length }\n longest.shift\nend", "def palindrome_substrings(str)\n palSub = []\n subStr = substrings(str)\n\n subStr.each do |ele|\n if palindrome?(ele) && ele.length > 1\n palSub << ele\n end\n end\n\n return palSub\n\nend", "def find_palindromes_left(s)\n outarray = []\n for x in (0..(s.length-1))\n outarray[x] = 1\n end\n for x in (0..(s.length-1))\n if matches = s.indexes(s[x])\n matches.reverse.each do |y|\n test_string = s[x..y]\n #puts \"x=#{x}, y=#{y}, substring = \\\"#{test_string}\\\"\"\n if test_string.palindrome?\n #puts \"Found palindrome: \\\"#{test_string}\\\"\"\n outarray[x] = test_string.length\n # Note that I will always find a palindrome at s[x..x] so y won't pass x\n break\n end\n end\n end\n end\n outarray\nend", "def palindromes_further_explore(str)\n new_str = str.downcase.gsub(/[^a-z0-9]/,\"\")\n substrings(new_str).select do |el|\n el == el.reverse && el.size > 1\n end\nend", "def length_of_longest_substring(s)\n # find each substring and compare lengths\n # as i move through the string, have a store that keeps building until it finds a repeated character\nend", "def largest_palindrome(string)\n palindromes(string).max {|a, b| a.size <=> b.size }\nend", "def is_palindrome(string)\n array = string.chars\n orginal_char_array = array.select do |char|\n /[A-Za-z]/.match char\n end\n \n char_array = array.select do |char|\n /[A-Za-z]/.match char\n end\n length = char_array.length\n first_index = 0\n last_index = length -1\n while first_index < last_index\n temp = char_array[first_index]\n char_array[first_index] = char_array[last_index]\n char_array[last_index] = temp\n first_index += 1\n last_index -= 1\n end\n \n if char_array.join.upcase == orginal_char_array.join.upcase\n return true\n else\n return false\n end\nend", "def palindrome(string)\n result = []\n new_string = string.chars\n\n loop do\n result << new_string.pop\n break if new_string.size == 0\n end\n result.join\nend", "def palindrome(string)\n result = Array.new\n\n new_string = string.chars\n string.size.times { result << new_string.delete_at(-1) }\n result.join\nend", "def max_unique_psub(string)\n\tanswer = []\n\tletters = string.split(\"\")\n\n\tletters.each_with_index do |letter, index|\n\t\tanswer.concat(substrings(letters[index..-1]))\n\t\tstart = 2\n\n\t\twhile index + start < letters.length\n\t\t\tnew_str = ([letters[index]] + letters[index+start..-1])\n\t\t\tanswer.concat(substrings(new_str))\n\t\t\tstart += 1\n\t\tend\n\n\tend\n\n\treturn answer.uniq.last\nend", "def palindrome?(string)\n chars_arr = string.chars\n new_arr = []\n chars_arr.each { |substring| new_arr.prepend(substring) }\n new_arr.join == string\nend", "def palindrome(string)\n (string.length <= 1) || ((string[0, 1] == string[-1, 1]) && palindrome(string[1..-2]))\nend", "def longest_repeated_substring str\n longest = nil\n\n # Try all substrings up to half the size of the string (we can't overlap)\n (1..str.size / 2).each do |size|\n # Try all starting positions until we can only fit two substrings\n (0..str.size - 2*size).each do |offset|\n index = str.index str[offset, size], offset + size\n if index\n substr = str[index, size]\n next if substr.strip.empty? # Ignore whitespace\n\n longest = str[index, size]\n break # Found substring of length `size`, now move on to size+1\n end\n end\n end\n\n longest\nend", "def is_palindrome(string)\n array = string.gsub((/(?i)[^a-z]/),\"\").downcase.chars\n array_length = array.length - 1\n \n array.each_with_index do |letter,index|\n return false if letter != array[array_length - index]\n end\n\n return true\nend", "def palindrome(string)\n result = Array.new\n\n new_string = string.chars\n string.size.times { result << new_string.pop }\n result.join\nend", "def PalindromeSeq (str, index=0)\n str == str.reverse || str.length < 2 ? str.length : PalindromeSeq(str[(index+1)...str.length]) > PalindromeSeq(str[(index)...(str.length-1)]) ? PalindromeSeq(str[(index+1)...str.length]) : PalindromeSeq(str[(index)...(str.length-1)])\nend", "def lps(s)\n return \"\" if s.empty?\n longest = [0,0]\n s.chars.each_with_index do |ch, idx|\n for i,j in [[idx, idx + 1], [idx, idx]]\n until s[i] != s[j]\n break if i < 0 || j >= s.length\n longest = [i,j] if longest[1] - longest[0] < j - i\n i -= 1\n j += 1\n end\n end\n end\n s[longest[0]..longest[1]]\nend", "def longest_substring_which_contains_two_unique_characters(string)\n longest = ''\n current = ''\n hash = Hash.new(0)\n\n string.split('').each do |s|\n current += s\n\n if hash[s] > 0\n hash[s] += 1\n else\n hash[s] += 1\n if hash.size > 2\n longest = longest.length > current.length ? longest : current\n # binding.pry\n pair = hash.min_by{|k,v| v}\n hash.delete(pair[0])\n\n current = current[1..-1]\n else\n current += s\n end\n end\n end\n\n longest\nend", "def check_palindrome_recursive(string)\n return true if string.length < 2\n return false if string[0] != string[-1]\n slice_first_last_character(string)\n check_palindrome_recursive(string)\n end", "def palindromes(word)\n substrings(word).select { |word| word == word.reverse && word.length > 1 }\nend", "def palindrome?(str)\n new_arry = []\n index = 1\n while index <= str.size\n new_arry << str[-index]\n index += 1\n end\n new_arry.join == str\nend", "def palindrome?(str)\n new_arry = []\n index = 1\n while index <= str.size\n new_arry << str[-index]\n index += 1\n end\n new_arry.join == str\nend", "def palindrome?(str)\n matches = []\n str.size.times { |index| matches << (str[index] == str[-(index+1)]) }\n matches.all?(true) && matches.size > 1\nend", "def longest_substring(s)\n return 0 if s.empty?\n return 1 if s.length == 1\n\n max_length = 0\n longest_substring = Set.new\n\n first = 0\n last = 0\n\n while last < s.length\n until !longest_substring.include?(s[last])\n longest_substring.delete(s[first])\n first += 1\n end \n longest_substring.add(s[last])\n max_length = [max_length, longest_substring.length].max\n last += 1\n end\n return max_length\nend", "def palindrome?(str)\n (str.length - 1).downto(0).map { |i| str[i] }.zip(str.chars).all? { |a, b| a == b }\nend", "def longest_repeated_substring(input)\r\n len = input.size / 2 # Max size is half total length, since strings cannot overlap\r\n len = 255 if len > 255\r\n while len > 0\r\n # Find all substrings of given length\r\n sub_strings = {}\r\n for i in 0...input.size-len\r\n sub_str = input[i..i+len]\r\n \r\n if not sub_strings.has_key?(sub_str)\r\n # Add to list, track end pos for overlaps\r\n sub_strings[sub_str] = i+len \r\n elsif sub_strings[sub_str] < i\r\n # First non-overlapping match ties for longest\r\n return sub_str \r\n end\r\n end\r\n \r\n len -= 1\r\n end\r\n \r\n nil\r\nend", "def is_palindrome(string)\n # raise NotImplementedError, \"Not implemented yet\"\n array = convert_string_array(string)\n start_ind = 0\n end_ind = array.size - 1\n\n while start_ind < end_ind\n if array[start_ind] != array[end_ind]\n return false\n end\n\n start_ind += 1\n end_ind -= 1\n end\n\n return true\nend", "def longest(s)\n\tall_results = []\n\n\tlist_of_chars = s.chars\n\n\tlist_of_chars.each_with_index do |char, index|\n\n\t\tresult = ''\n\t\tnext_index = index + 1 \n\t\tcurrent_index = index\n\t\tresult << char\n\n\t\tif current_index == list_of_chars.length - 1\n\t\t\tall_results = all_results.max { |a,b| a.length <=> b.length }\n\t\telse\n\t\t loop do\n\t\t \tif list_of_chars[next_index] == nil\n\t\t \t\tall_results << result\n\t\t \t\tbreak\n\t\t\t\telsif list_of_chars[next_index] >= list_of_chars[current_index]\n\t\t\t\t\tresult << list_of_chars[next_index]\n\t\t\t\t\tcurrent_index += 1\n\t\t\t\t\tnext_index += 1\n\t\t\t\telse\n\t\t\t\t\tall_results << result\n\t\t\t\t\tbreak\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\tend\n\tall_results\nend", "def longestSub(str)\n\n\nend", "def longest_palindrome(s)\n string_hash = Hash.new(0)\n \n s.each_char do |char|\n string_hash[char] += 1\n end\n \n beginning_count = 0\n middle_count = 0\n \n string_hash.keys.each do |key|\n if string_hash[key] % 2 == 1\n middle_count = 1\n end\n \n beginning_count += string_hash[key] / 2\n end\n \n beginning_count * 2 + middle_count\nend", "def palindrome_permutation(string)\n # new_string = string.gsub(/\\s+/, \"\")\n # p new_string.length\n # permutation = new_string.chars\n # p \n\n\n # if new_string.length % 2 != 0\n # puts \"True. permutation\"\n # end\n\n # pal_split = string.split\n\n # pal0_split = pal_split[0].chars\n # removed_last_letter = pal0_split.pop\n # word_one = pal0_split.join\n # if word_one == pal_split[1].reverse\n # puts \"True permutations \"\n # else\n # puts \"no\"\n # end\n\n new_string = string.gsub(/\\s+/, \"\")\n reversed_word = new_string.reverse\n if new_string == reversed_word\n puts \"true.\"\n else \n puts \"false\"\n end \nend" ]
[ "0.8338428", "0.8288934", "0.82832694", "0.82351744", "0.81817216", "0.8177087", "0.81531215", "0.8066029", "0.80623484", "0.8052572", "0.8031901", "0.7955093", "0.79294676", "0.7919743", "0.79162496", "0.78464746", "0.78464746", "0.7830216", "0.78268725", "0.7821299", "0.7809112", "0.7805101", "0.7781036", "0.7750146", "0.77422017", "0.7740928", "0.7740928", "0.7740928", "0.7736187", "0.7712105", "0.769861", "0.7656404", "0.7616016", "0.7614567", "0.7547458", "0.7454067", "0.7450057", "0.7358394", "0.7354076", "0.73428303", "0.73190147", "0.7299319", "0.7287736", "0.7268264", "0.72458977", "0.7244121", "0.72375673", "0.7229197", "0.7208057", "0.7184603", "0.718089", "0.71682805", "0.71493024", "0.7143368", "0.7138709", "0.71309704", "0.7120997", "0.71018344", "0.7088446", "0.70875984", "0.7076012", "0.70658785", "0.70455194", "0.7041048", "0.7037989", "0.7017129", "0.69997007", "0.6989715", "0.69828844", "0.6972617", "0.6951859", "0.6948743", "0.6942876", "0.6931045", "0.6922281", "0.6922144", "0.6855434", "0.6854974", "0.68548506", "0.68504155", "0.6838225", "0.6836391", "0.68316287", "0.6791327", "0.67907006", "0.678421", "0.6781555", "0.6779144", "0.6769693", "0.67609495", "0.67561084", "0.67561084", "0.6750987", "0.6737584", "0.67067736", "0.6706479", "0.67063725", "0.67045975", "0.67008865", "0.6700069", "0.6696374" ]
0.0
-1
specifies the URL on which to find categories
def start_url(url) @start_url = url end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_categories\n new_categories = extract_categories_from_url\n unless new_categories.nil?\n @categories = new_categories\n else\n @categories = Type::DefaultCategories\n end\n end", "def calendar_category\n @request.path_parameters[:url][1]\n end", "def discover_url\n \"http://discover.amee.com/categories#{path}\"\n end", "def url(category)\n %{<a class=\"category_link\" href=\"../list/#{category}\">#{category}</a>}\n end", "def category_url\n revision.category_url\n end", "def find_categories\n page = agent.get(WEBSITE_URL)\n\n page.search(CATEGORIES_SELECTOR).each do |a|\n category_name = a.text.strip\n path = a.attribute('href').value.strip\n\n next if category_name.blank? || path.blank?\n\n data[category_name] = {\n 'url' => URI.join(WEBSITE_URL, path).to_s\n }\n end\n\n Helpers.wait\n end", "def category\n # Whats the last category we are asking for? (the rest don't matter I don't think..)\n requested_category = params[:category].split(\"/\").last\n category = Taxonomy.find_by_seo_url requested_category\n\n if category.present?\n @category = category\n @posts = get_posts category.posts\n\n\n respond_to do |format|\n format.html { render :template => 'default/index' }\n format.json { render json: @posts }\n end\n else\n # No such category found, redirect to root index\n redirect_to root_path\n end\n end", "def category(rel_path)\n find_category based_on_root rel_path\n end", "def category\n if params[:city] == \"sf\"\n @city = \"San Francisco\"\n elsif params[:city] == \"ny\"\n @city = \"New York\"\n end\n @listings = Listing.find_all_by_city(@city)\n end", "def set_category\n end", "def category; end", "def categories\n add_to_query restrict_kind: 'category'\n end", "def categories_shortcut\n (config_params||DEFAULT_CONFIG_PARAMS)[\"Categories Shortcut\"]\n end", "def category_path(category)\n category ? \"/l/#{category.name}\" : \"/l/uncategoried\"\n end", "def categories_from_path(special_dir); end", "def url_to\n h.url_for [model.category, model]\n end", "def category_links(category)\n links = \"\"\n iterated_cat = category\n if iterated_cat.parent.nil?\n links = insert_category_link(links,iterated_cat)\n else \n i = 0\n while !iterated_cat.parent.nil? and iterated_cat != Sfcatnode.root\n links = insert_category_link(links,iterated_cat)\n iterated_cat = iterated_cat.parent\n i+= 1\n end\n end\n links.insert(0,\"#{link_to('All Solutions', :action => 'index')}\")\n end", "def category\n @category = NewsCategory.find_by! slug: params[:category_slug]\n @collection = News.of_type(:news).in_category(@category).page_for_visitors(current_page)\n end", "def scrape_categories_page\n categories = {}\n # On non-root pages, the xpath is:\n # \"//ul[@id='nav-categories']/li/a\"\n @agent.get(clean_url('/')) do |page|\n page.search(\"//h2[text()='Categories']/following-sibling::ul/li/a\").each do |link|\n category_url = link.attributes[\"href\"].value\n categories[link.text.gsub(/[^\\w\\s]+/,'').lstrip] = category_url\n end\n end\n\n categories\n end", "def category\n string = Array(@data['categories'])[0]\n return '' if string.to_s.empty?\n\n string.split('/').map { |c|\n Silly::StringFormat.clean_slug_and_escape(c)\n }.join('/')\n end", "def set_category\n if @current_website.present?\n begin\n @category = @current_website.categories.includes(:pages, :categories).find_by(slug: params[:id])\n if [email protected]?\n @category = @current_website.categories.includes(:pages, :categories).friendly.find(params[:id])\n end\n rescue\n redirect_to '/', status: 302, notice: 'This is not the page you are looking for...move along.'\n end\n else\n redirect_to '/'\n end\n @resource = @category\n end", "def categories\n category\n end", "def set_category\n @categories = Category.friendly.find(params[:id])\n end", "def fix_category(store_path)\n self.category = store_path.split('/')[0...-2].join('/')\n end", "def category\n @articles = Article.with_category(params[:category], params[:search] || \"top_requested\")\n respond_with(:articles, template: \"articles/index\")\n end", "def all_categories\n end", "def set_category\n match_data = /^What is [-]?\\d+\\s*(?<operation>[\\+\\-\\*\\/])\\s*[-]?\\d+\\?$/.match(self.query) || {}\n self.category = CATEGORIES[match_data[:operation]]\n end", "def category\n @category ||= ''\n end", "def categories=(value)\n @categories = value\n end", "def getCategories()\n\t\tcat = Array.new\n\t\tcat.push(\"heroku\")\n\t\tcat.push(\"go\")\n\t\tcat.push(\"github\")\n\t\tcat.push(\"docker\")\n\t\tcat.push(\"css\")\n\t\tcat.push(\"apache\")\n\t\tcat.push(\"html\")\n\t\tcat.push(\"bootstrap\")\n\t\tcat.push(\"java ee\")\n\t\tcat.push(\"javafx\")\n\t\tcat.push(\"java\")\n\t\tcat.push(\"jquery\")\n\t\tcat.push(\"mips\")\n\t\tcat.push(\"c++\")\n\t\tcat.push(\"laravel\")\n\t\tcat.push(\"linux\")\n\t\tcat.push(\"opengl\")\n\t\tcat.push(\"sml\")\n\t\tcat.push(\"javascript\")\n\t\tcat.push(\"mongo db\")\n\t\tcat.push(\"c\")\n\t\tcat.push(\"yacc\")\n\t\tcat.push(\"circuit\")\n\t\tcat.push(\"php\")\n\t\tcat.push(\"mysql\")\n\t\tcat.push(\"node js\")\n\t\tcat.push(\"photoshop\")\n\t\tcat.push(\"rails\")\n\t\tcat.push(\"postgres\")\n\t\tcat.push(\"ruby\")\n\t\tcat.push(\"redis\")\n\t\tcat.push(\"mac osx\")\n\t\tcat.push(\"sass\")\n\t\tcat.push(\"ubuntu\")\n\t\tcat.push(\"bower\")\n\t\tcat.push(\"wordpress\")\n\t\tcat.push(\"css\")\n\t\tcat.push(\"hosted\")\n\t\tcat.push(\"python\")\n\t\tcat.push(\"maven\")\n\t\tcat.push(\"maven mojo\")\n\t\tcat.push(\"composer\")\n\t\tcat.push(\"mips\")\n\t\tcat.push(\"gulp\")\n\t\tcat.push(\"grunt\")\n\t\tcat.push(\"phpstorm\")\n\t\tcat.push(\"react\")\n\t\tcat.push(\"swift\")\n\t\tcat.push(\"wordpress\")\n\t\tcat.push(\"tomcat\")\n\t\tcat.push(\"redis\")\n\t\tcat.push(\"travis\")\n\t\treturn cat\n\tend", "def category= arg\n cat_arr = arg.to_s.split(':')[0..@depth-1] || []\n @category = cat_arr.join(':').to_s\n end", "def categories\n taxonomies.all :conditions => { :classification => :category }\n end", "def show_category\r\n # if the before_filter didn't work, it means a category_path wasn't specified, just\r\n # redirect to home\r\n permanent_redirect_to home_url\r\n end", "def category\n path = @path_remote.split('/')\n return path[3] # Could require changes depending on the structure of the maps repo\n end", "def getCategories(_, _, _)\n @db.categories\n end", "def category_selection(category)\n category = Public_apis.find_by_name(category)\n #goes over list item array . find method to find item\n\n \n end", "def getCat \n\t\tputs ''\n\t\tputs 'Fetching categories . . '\n\t\[email protected]('ul.dropdown li a').each do |cat|\n\t\t\tarr=[]\n\t\t\tarr.push cat.text\n\t\t\tarr.push cat['href']\n\t\t\t@arr_cat.push arr\n\t\t\tprint '. '\n\t\tend\n\t\ti=0\n\t\t@arr_cat.each do |pair|\n\t\t\[email protected] 'insert into category values (?, ? ,?)', i, pair[0], pair[1]\n \t\t\ti +=1\n \t\tend\n\tend", "def slugified_categories; end", "def primary_category\n self.category.split(\"/\").first\n end", "def category\n @services = Service.where(\"category = ?\", params[:category])\n @category = Service.categories.key(params[:category].to_i)\n end", "def find_categories(options = {})\n @language_permalink = options[:language_permalink]\n if @language_permalink\n options.delete(:language_permalink)\n Syntaxdb::Request.send_request(options, \"/languages/#{@language_permalink}/categories\")\n end\n end", "def get_categories_list\n args = receive_should_validate?\n get('redtube.Categories.getCategoriesList', {}, [], args[:should_validate])\n end", "def categories()\n @web.get('http://www.waymarking.com/cat/categorydirectory.aspx') do |page|\n ile = 0\n #puts page.parser.to_html.to_s\n cache = {}\n\n page.parser.css('div#content div.gutter a').each do |cat|\n href = cat.attr('href')\n m = Category::GUID_REGEXP.match href\n key = Waymarking::Utils.parameterize(cat.text)\n unless m.nil? then\n ile +=1\n raise DuplicatedCategory if cache.has_key? key\n\n cache[key] = m[1]\n #puts \"#{ile} #{key} #{cat.text} #{m[1]}\"\n else\n puts href\n end\n\n end\n\n cache2 = {}\n cache.keys.each do |key|\n @web.get(\"http://www.waymarking.com/cat/details.aspx?f=1&guid=#{cache[key]}&exp=True\") do |page2|\n begin\n cat = Waymarking::Category.from_html(page2)\n cache2[key] = cat\n rescue\n puts key\n end\n end\n end\n\n File.open('categories.yml', 'w') do |f|\n f.write YAML::dump(cache2)\n end\n end\n end", "def _category\n @_category ||= if category\n category\n elsif special_category\n special_category\n else\n nil\n end\n end", "def categories=(value)\n @categories = value\n end", "def categories=(value)\n @categories = value\n end", "def categories=(value)\n @categories = value\n end", "def link_to_external_category(class_category)\n if class_category.external? and params[:class_category_id].blank?\n link_to(strip_tags(sanitize_title(class_category.title)), external_category_url(:class_category_id => \"#{class_category.id}-#{parameterize_title(class_category.title)}\"))\n else\n sanitize_title(class_category.title)\n end\n end", "def set_category\n @category = Category.find_by_permalink(params[:id])\n end", "def parse_categories_structure(category_id = nil)\n super category_id, { product_link: '.productsArea .productArea .productDetail a',\n next_page_link: '.productsArea .tsk-pageview .next a' }\n end", "def set_category\n @category = Category.find_by(slug: params[:id])\n end", "def link_categories(cats)\n return [] unless cats\n cats.map do |cat|\n ['<a href=\"/categories/', cat, '.html\">', cat, '</a>'].join\n end\n end", "def categories\n # list results from search results piped in from side nav categories\n @experiments = Experiment.find(:status => true)\n respond_to do |format|\n format.html { render :action => \"list\" }\n format.xml { render :xml => @experiments }\n end\n\n end", "def categories(options=nil, &definition)\n if options\n if options[:for]\n @embed_for = options[:for]\n end\n end\n \n @embedded = CategoryScraper.new @fetcher_class,{:main_url => @main_url}, &definition\n end", "def category\n client.categories.find(data[\"categoryId\"])\n end", "def categories\n []\n end", "def targeting_app_store_categories(options = {})\n get('/targeting_criteria/app_store_categories', options)\n end", "def gather_uri(key = nil, cat = nil, val = nil)\n uri = \"http://jservice.io/api/\"\n if !key.nil? && !cat.nil?\n category = return_categories(key)\n category = category.select {|c| c[\"title\"] == cat}[0] if category.kind_of? Array\n uri += \"clues?category=#{category['id']}\"\n if !val.nil?\n uri += \"&value=#{val}\"\n end\n else\n uri += \"random?count=1\"\n end\n uri\nend", "def get_leaf_cat_urls\n cat_urls_table = @db[:cat_urls]\n cat_urls_table.map(:url)\n end", "def category=(value)\n @category = value\n end", "def category=(value)\n @category = value\n end", "def category=(value)\n @category = value\n end", "def category=(value)\n @category = value\n end", "def category=(value)\n @category = value\n end", "def category=(value)\n @category = value\n end", "def get_category\n category = params[:subcategory] || params[:category]\n @category = Category.find_by_slug!(category)\n end", "def city\n if !session[:user_id].nil?\n if params[:city] == \"new\"\n redirect_to :controller => :listings, :action => :req\n else\n if params[:city] == \"sf\"\n @city = \"San Francisco\"\n elsif params[:city] == \"ny\"\n @city = \"New York\"\n end\n @listings = Listing.find_all_by_city(@city)\n @categories ||= Array.new\n @listings.each do |l|\n if([email protected]?(l.category))\n @categories.push(l.category)\n end\n end\n end\n else\n redirect_to :controller => :user, :action => :login\n end\n end", "def category_link(category)\n if category.respond_to?('each')\n categories = category.sort!.map do |item|\n if item == 'blog'\n '<a href=\"/'+item+'/page1\">'+item.upcase+'</a>'\n else\n '<a href=\"/'+item+'/\">'+item.upcase+'</a>'\n end\n end\n\n connector = \"and\"\n case categories.length\n when 0\n \"\"\n when 1\n categories[0].to_s\n when 2\n \"#{categories[0]} #{connector} #{categories[1]}\"\n else\n \"#{categories[0...-1].join(', ')}, #{connector} #{categories[-1]}\"\n end\n else\n item = category\n if item == 'blog'\n '<a href=\"/'+item+'/page1\">'+item.upcase+'</a>'\n else\n '<a href=\"/'+item+'/\">'+item.upcase+'</a>'\n end\n end\n end", "def index\n @categories = collections_config.values\n end", "def categories\n document.css('#jumpto a').map(&:text)\n end", "def get_categories(add_params = nil)\n params = {\n }\n api_call('/global/categories(.:format)',:get,params,add_params)\n end", "def set_category\n @category = Category.friendly.find(params[:id])\n end", "def set_category\n @category = Category.friendly.find(params[:id])\n end", "def set_category\n @category = Category.friendly.find(params[:id])\n end", "def set_category\n @category = Category.friendly.find(params[:id])\n end", "def set_category\n @category = Category.friendly.find(params[:id])\n end", "def set_category\n @category = Category.friendly.find(params[:id])\n end", "def set_category\n @category = Category.friendly.find(params[:id])\n end", "def navigate(category)\n if @library == []\n @library = category.all\n else\n filter = category.to_s.downcase\n @library = @library.send(filter)\n end\n end", "def populate_category\n\t\t\t\tif params[:purpose] == \"category\"\n\t\t\t\t\t\tcategory = Category.find(params[:category_id])\n\t\t\t\t\t\t@sub_categories = category.sub_categories\n\t\t\t\telsif params[:purpose] == \"sub_category\"\n\t\t\t\t\t\tsub_category = SubCategory.find(params[:category_id])\n\t\t\t\t\t\t@inner_categories = sub_category.inner_categories\n\t\t\t\tend\n\t\tend", "def set_category\n @category = Category.friendly.find(params[:id])\n end", "def set_category\n @category = Category.friendly.find(params[:id])\n end", "def catalog_dcat()\n return uri(\"api/dcat.json\")\n end", "def get_categories\n cats = []\n params.each do |k,v|\n if k.starts_with? \"category\"\n name = v\n num = cat_number(k) \n cats << [name,num]\n end\n end\n return cats\n end", "def categories(params={})\n return @categories if (@categories && !params[:force])\n @categories = get_categories\n end", "def show\n redirect_to categories_path\n end", "def categories\n get('venues/categories').categories\n end", "def get_categories(query_obj=nil)\n uri = URI.parse(@uri + \"/Categories\")\n results = get(uri,query_obj)\n end", "def cover_url\n case self.category\n when \"Nutrition\"\n \"nutrition.jpeg\" \n when \"Activity\"\n \"activity.jpeg\"\n when \"Exercise\"\n \"exercise.jpeg\"\n when \"Rehabilitation\"\n \"rehabilitation.jpeg\"\n when \"Medication\"\n \"medications.jpeg\"\n else\n \"activity.jpeg\"\n end\n end", "def apply_category_filter\n return if params[:category].blank?\n Category.find(params[:category]).tap do |category|\n @posts = @posts.where category: category\n add_breadcrumb \"by category \\\"#{category.humanize}\\\"\"\n end\n end", "def scrape_category_page!(category, category_url)\n @all_emoji[category] = scrape_category_page(category_url)\n end", "def categories=(value)\n\t\t@categories = value\n\tend", "def categories=(value)\n\t\t@categories = value\n\tend", "def categories=(value)\n\t\t@categories = value\n\tend", "def add_browse_categories\n exhibit.searches.published.find_each do |s|\n sitemap.add sitemap.exhibit_browse_path(exhibit, s), priority: 0.5, lastmod: s.updated_at\n end\n end", "def get_category(category)\n CATEGORIES[category.downcase]\n end", "def test_show\n get '/category/1'\n end", "def set_item_category\n @item_category = current_company.item_categories.friendly.find(params[:id]) || []\n end", "def categories(str)\n raise NotImplementedError\n end", "def by_category\n @category = Category.roots.find_by_slug(params[:category])\n raise ListingException, \"missing category\" if @category.blank?\n @subcategory = @category.children.find_by_slug(params[:subcategory]) if params[:subcategory].present?\n terms = [ListingFilter.category(@subcategory.present? ? @subcategory.id : @category.id), ListingFilter.state('active')]\n query = {filter: {bool: {must: terms}}, sort: {id: \"desc\"}}\n @listings = Listing.search(query).page(page).per(per).records\n\n @subcategories = @category.children.with_listings\n\n @title = [@category.name, @subcategory.try(:name)].compact.join(\" : \") + \" | Category\"\n\n respond_to do |format|\n format.html { render(action: :index, layout: !request.xhr?) }\n end\n end", "def index\n @category = params[:category]\n conditions = {:page => params[:page], :order => 'id DESC'}\n case @category\n when nil\n @links = Link.paginate conditions\n when ''\n @links = Link.paginate conditions\n# when '未分类'\n# @links = Link.paginate_by_category '', conditions\n else\n conditions[:conditions] = \"category = '#{@category}'\"\n conditions[:per_page] = 300\n @links = Link.paginate conditions\n end\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @links }\n end\n end" ]
[ "0.72489065", "0.6903029", "0.688453", "0.6730876", "0.66785306", "0.6675466", "0.6574682", "0.65502435", "0.6531038", "0.6459287", "0.6447425", "0.6428674", "0.641266", "0.64035183", "0.6395815", "0.6330224", "0.6295994", "0.62713367", "0.626723", "0.62384194", "0.61880934", "0.6183479", "0.61808133", "0.6180611", "0.617921", "0.61788315", "0.6163772", "0.6132437", "0.60875994", "0.6068846", "0.6068434", "0.6054944", "0.6051891", "0.6030057", "0.60203725", "0.60158664", "0.6007516", "0.59908426", "0.5972608", "0.5966684", "0.59578156", "0.59517914", "0.5946067", "0.5945432", "0.5942902", "0.5942902", "0.5942902", "0.592379", "0.59211206", "0.591989", "0.5916326", "0.59149885", "0.59145886", "0.59109443", "0.59108204", "0.59069705", "0.5899148", "0.58955795", "0.5888025", "0.5880764", "0.5880764", "0.5880764", "0.5880764", "0.5880764", "0.5880764", "0.58730924", "0.5857505", "0.5841035", "0.58318865", "0.58316904", "0.58284867", "0.5827003", "0.5827003", "0.5827003", "0.5827003", "0.5827003", "0.5827003", "0.5827003", "0.58249676", "0.58222526", "0.5818461", "0.5818461", "0.58139324", "0.5804884", "0.5794874", "0.5788909", "0.5785027", "0.57751966", "0.5773035", "0.57681715", "0.5765894", "0.5763531", "0.5763531", "0.5763531", "0.57629126", "0.5760311", "0.5756313", "0.57476366", "0.57417315", "0.573549", "0.57338274" ]
0.0
-1
specifies the specific brand for that category
def brand_string(str) @brand = str end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_brand\n set_category\n @brand = @category.brands.find(params[:id]) if params[:id]\n end", "def get_brand(brand)\n case brand\n when \"master\"\n return \"mastercard\"\n when \"american_express\"\n return \"amex\"\n else\n return brand\n end\n end", "def set_brand\n @brand = Brand.friendly.find(params[:id])\n end", "def set_brand\n @brand = Brand.friendly.find(params[:id])\n end", "def set_brand\n if params[:brand_id]\n begin\n @brand = Brand.find(params[:brand_id])\n rescue Mongoid::Errors::DocumentNotFound\n # we need to rescue to avoid crashing the application\n # like for the category_id above\n end\n end\n end", "def is_brand? brand\n Handset.is_brand? request.user_agent, brand\n end", "def brand_name\n maintenance_record_hub.try(:brand) || car.try(:brand_name)\n end", "def brand_with_model; end", "def category_brand_init\n get_category\n @brand = Brand.find_by_slug!(params[:brand])\n @page = Page.make_from @brand\n end", "def brand\n cc_type\n end", "def brand\n cc_type\n end", "def set_product_brand\n @product_brand = Product::Brand.find(params[:id])\n end", "def brand_name\n product.brand_name\n end", "def set_brand\n @brand = Brand.find_by(name: params[:id])\n end", "def set_brand\n @brand = Brand.find(params[:id])\n end", "def set_brand\n @brand = Brand.find(params[:id])\n end", "def set_brand\n @brand = Brand.find(params[:id])\n end", "def set_brand\n @brand = Brand.find(params[:id])\n end", "def set_brand\n @brand = Brand.find(params[:id])\n end", "def set_brand\n @brand = Brand.find(params[:id])\n end", "def set_brand_filter\n # return true if @products.nil?\n # return true if generic_results?\n # \n # @brands = @products.map(&:brand).compact.uniq.sort_by(&:name)\n # true\n end", "def set_brand\n @brand = Brand.find(params[:id])\n end", "def set_brand\n @brand = Brand.find(params[:id])\n end", "def set_brand\n @brand = @current_user.brands.find(params[:id])\n end", "def brand_name\n product.brand_name\n end", "def set_device_brand\n @device_brand = DeviceBrand.find(params[:id])\n end", "def set_device_brand\n @device_brand = DeviceBrand.find(params[:id])\n end", "def brand_name\n brand_id ? brand.name : ''\n end", "def brand_name\n brand_id ? brand.name : ''\n end", "def brand\n @brand ||= Brand.find(brand_pid)\n end", "def set_cp_brand\n @cp_brand = Brand.find(params[:id])\n end", "def set_admin_brand\n @admin_brand = Admin::Brand.friendly.find(params[:id])\n end", "def organisation_brand(organisation)\n return unless organisation\n\n brand = organisation[\"details\"][\"brand\"]\n brand = \"executive-office\" if executive_order_crest?(organisation)\n brand\n end", "def set_product_brand\n @product_brand = ProductBrand.find(params[:id])\n end", "def set_brands_category\n @brands_category = BrandsCategory.find(params[:id])\n end", "def set_productbrand\n @productbrand = Productbrand.find(params[:id])\n end", "def brand_params\n params[:brand]\n end", "def set_brand_name\n @brand_name = BrandName.find(params[:id])\n end", "def brand()\n sql = \"SELECT * FROM manufacturers WHERE id = $1\"\n values = [@manuf_id]\n brand = SqlRunner.run(sql, values)\n return brand.map{|manuf| Manufacturer.new(manuf)}[0]\n end", "def set_brand_model\n @brand_model = BrandModel.find(params[:id])\n end", "def brands\n\t\t\tErp::Products::Brand.where(id: self.get_products_for_categories({}).select(:brand_id).where.not(brand_id: nil)).order(:name)\n\t\tend", "def brand_params\n params.fetch(:brand, {}).permit(:is_main, :image_url, :name, :description)\n end", "def buy_cat(cat)\n new_cat = Cat.all.find do |cat_instance|\n cat_instance.name == cat \n end\n if new_cat \n Cat.new(new_cat.name, self)\n else \n brand_new_cat = Cat.new(cat, self)\n end \n end", "def require_company?\n !!(self.brand && self.brand.name.to_s.match(/studer/i))\n end", "def initialize(brand)\n @brand = brand\n end", "def initialize(brand)\n @brand = brand\n end", "def set_admin_device_brand\n @device_brand = DeviceBrand.find(params[:id])\n end", "def brands_category_params\n params.require(:brands_category).permit(:category_id_id, :brand_id_id)\n end", "def company_brand_code\n self.dig_for_string(\"agentSummary\", \"office\", \"brandCode\")\n end", "def set_vehicle_brand\n @vehicle_brand = VehicleBrand.find(params[:id])\n end", "def brand_params\n params.require(:brand).permit(:name, :description, :viticulture, :image, :remote_image_url, :website, :country, :region, :appellation, :varietal, :soil_type, :enologist, :viticulturist, :vineyard_size, :total_production)\n end", "def initialize(brand)\n @brand = brand\n if !(BRANDS.include? (@brand))\n BRANDS << brand\n end \n end", "def brand_group_name\n\t\t\t\tbrand_group.present? ? brand_group.name : ''\n\t\t\tend", "def brand_params\n params.require(:brand).permit(:name, :description)\n end", "def cp_brand_params\n params.require(:brand).permit(:name, :description)\n end", "def set_marketshare_brand_type\n @brand_types = Marketshare::BrandType.all\n end", "def brand_params\n params.require(:brand).permit(:company_id, :eng_name, :pixel_id, :logo, name: {})\n end", "def set_backoffice_brand\n @backoffice_brand = Brand.find(params[:id])\n end", "def update\n if @brand_category.update(brand_category_params)\n head :no_content\n else\n render json: @brand_category.errors, status: :unprocessable_entity\n end\n end", "def brand_params\n params.require(:brand).permit(:name, :mana, :description, :avatar, :background, :country_id, :status)\n end", "def show\n @brand_category = BrandCategory.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @brand_category }\n end\n end", "def brand_params\n params.require(:brand).permit(:name)\n end", "def brand(text, options = {})\n options = canonicalize_options(options)\n options = ensure_class(options, 'brand')\n\n with_environment = options.delete(:with_environment)\n if with_environment && Rails.env != 'production'\n text = \"#{text} - #{Rails.env}\"\n options = ensure_class(options, \"rails-#{Rails.env}\") \n end\n\n url = options.delete(:url)\n \n if url.present?\n link_to(text, url, options)\n else\n content_tag(:span, text, options)\n end\n end", "def brand_params\n params.require(:brand).permit(\n :id,\n :name\n )\n end", "def create\n @brand_category = BrandCategory.new(params[:brand_category])\n\n respond_to do |format|\n if @brand_category.save\n format.html { redirect_to @brand_category, notice: 'Brand category was successfully created.' }\n format.json { render json: @brand_category, status: :created, location: @brand_category }\n else\n format.html { render action: \"new\" }\n format.json { render json: @brand_category.errors, status: :unprocessable_entity }\n end\n end\n end", "def brand_params\n params.require(:brand).permit(:BrandName)\n end", "def initialize(brand)\n #brands << brand - this is incorrect because in spec BRANDS is being placed\n if BRANDS.include?(brand)\n #.include? works better than .find because its an array method rather than an innumerable\n else\n BRANDS << brand\n end\n @brand = brand\n end", "def brand\n object.brand.id\n end", "def product_brand_params\n params.fetch(:product_brand, {})\n end", "def set_admin_features_brand\n @admin_features_brand = Admin::FeaturesBrand.find(params[:id])\n end", "def brand_params\n params.require(:brand).permit(:id, :name)\n end", "def index\n if params[:brand].blank?\n @custom_camo_hats = CustomCamoHat.all\n @order_item = current_order.order_items.new\n else\n @brand_id =Brand.find_by(name: params[:brand]).id\n @custom_camo_hats = CustomCamoHat.where(:brand_id => @brand_id)\n end\n end", "def set_vehiclebrand\n @vehiclebrand = Vehiclebrand.find(params[:id])\n end", "def show\n @products = @category.products.all\n\n if @products.exists?\n @products_max_price = @products.order(price: :desc).limit(1).take.price\n end\n @category_brands = []\n @products.each do |prod|\n if @category_brands.index(prod.brand).nil?\n @category_brands.push(prod.brand)\n end\n end\n @cart_item = current_cart.cart_items.new\n end", "def brand_params\n params.require(:brand).permit(:brand_name, :website_url, :email, :address1, :address2, :city, :primary_country_id, :brand_country_id, :state_province_id, :pincode, :phone_number, :default, :brand).merge(:user_id => current_user.id)\n end", "def add_new_brand(brands, options = {})\n product_brand = options[:product_brand] || \"\" \n brands.push({title: product_brand, stock: 0, total_prc: 0.0, total_sales: 0.0, count: 0, brand_avg_prc: 0.0})\n brands = brands.uniq! { |item| item[:title] }\nend", "def initialize(brand) # A new show must have a brand associated with it\n @brand = brand\n end", "def make_brands_section\n\tbrands = $products_hash[\"items\"].map {|toy| toy[\"brand\"]}.uniq\n\tbrands.each do |brand|\n\t\t$same_brand = $products_hash[\"items\"].select {|toy| toy[\"brand\"] == brand}\n\t print_brand_name brand\n\t brand_toy_stock brand\n\t\tprint_brand_toy_stock\n\t average_price_brand brand\n\t\tprint_average_price_brand\n\t total_sales_brand brand\n\t\tprint_total_sales_brand\n\tend\nend", "def isCompany?\n self.category =~ /(company|index|currency)/i\n end", "def brand_params\n params.require(:brand).permit(\n :logo,\n :logo_url, \n :name)\n end", "def brands\n options = {\n content_type: 'brand',\n include: 1,\n order: 'sys.createdAt'\n }\n\n objects('Brand', options)\n end", "def federation_brand_name=(value)\n @federation_brand_name = value\n end", "def create\n @brand_category = BrandCategory.new(brand_category_params)\n\n if @brand_category.save\n render json: @brand_category, status: :created, location: @brand_category\n else\n render json: @brand_category.errors, status: :unprocessable_entity\n end\n end", "def set_category\n end", "def show\n render json: @brand_category\n end", "def brand_name\n\tproducts_brand = ($products_hash[\"items\"].map {|each_brand| each_brand[\"brand\"]}).uniq\nend", "def brand(text, options = {})\n options = canonicalize_options(options)\n options = ensure_class(options, 'brand')\n url = options.delete(:url)\n \n if url.present?\n # link_to(text, url, options)\n else\n content_tag(:span, text, options)\n end\n end", "def create\r\n @brand = Brand.new(params[:brand])\r\n\r\n respond_to do |format|\r\n if @brand.save\r\n format.html { redirect_to administration_brands_url, notice: 'Brand was successfully created.' }\r\n format.json { render json: @brand, status: :created, location: @brand }\r\n else\r\n format.html { render action: \"new\" }\r\n format.json { render json: @brand.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end", "def brand_id(brand_name)\r\n Brand.find_by_name(brand_name).id\r\n end", "def brand_params\n params.require(:brand).permit(:description)\n end", "def admin_features_brand_params\n params.require(:admin_features_brand).permit(:product_id, :brand, :description)\n end", "def create\n @brand = Brand.new(params[:brand])\n\n respond_to do |format|\n if @brand.save\n format.html { redirect_to brands_path, notice: 'Brand was successfully created.' }\n format.json { render json: @brand, status: :created, location: @brand }\n else\n format.html { render action: \"new\" }\n format.json { render json: @brand.errors, status: :unprocessable_entity }\n end\n end\n end", "def category; end", "def create\n @product_brand = Product::Brand.new(product_brand_params)\n\n respond_to do |format|\n if @product_brand.save\n format.html { redirect_to @product_brand, notice: 'Brand was successfully created.' }\n format.json { render :show, status: :created, location: @product_brand }\n else\n format.html { render :new }\n format.json { render json: @product_brand.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @brand_category = BrandCategory.find(params[:id])\n\n respond_to do |format|\n if @brand_category.update_attributes(params[:brand_category])\n format.html { redirect_to @brand_category, notice: 'Brand category was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @brand_category.errors, status: :unprocessable_entity }\n end\n end\n end", "def set_auto_brand\n @auto_brand = AutoBrand.find(params[:id])\n end", "def url_for_co_hbrand_image(company)\n \"#{I18n.t('shf_medlemssystem_url')}/hundforetag/#{company.id}/company_h_brand\"\n end", "def create\n @custom_camo_hat = CustomCamoHat.new(custom_camo_hat_params)\n @custom_camo_hat.brand_id = params[:brand_id]\n\n respond_to do |format|\n if @custom_camo_hat.save\n format.html { redirect_to @custom_camo_hat, notice: 'Custom camo hat was successfully created.' }\n format.json { render :show, status: :created, location: @custom_camo_hat }\n else\n format.html { render :new }\n format.json { render json: @custom_camo_hat.errors, status: :unprocessable_entity }\n end\n end\n end", "def backoffice_brand_params\n params.require(:brand).permit(:name, :logo, :logo_cache, :featured)\n end", "def create\n @cp_brand = Brand.new(cp_brand_params)\n\n respond_to do |format|\n if @cp_brand.save\n format.html { redirect_to @cp_brand, notice: 'Brand was successfully created.' }\n format.json { render :show, status: :created, location: @cp_brand }\n else\n format.html { render :new }\n format.json { render json: @cp_brand.errors, status: :unprocessable_entity }\n end\n end\n end" ]
[ "0.7413724", "0.72763336", "0.69886476", "0.69886476", "0.69294107", "0.69266593", "0.6883197", "0.6807878", "0.6761751", "0.6756354", "0.6734865", "0.67167133", "0.6714422", "0.67134464", "0.6682297", "0.6682297", "0.6682297", "0.6682297", "0.6682297", "0.6682297", "0.6663644", "0.66494834", "0.6604125", "0.6599956", "0.65183735", "0.6455085", "0.6455085", "0.6454905", "0.6454905", "0.6422376", "0.64063066", "0.6392705", "0.6378278", "0.6369647", "0.6285408", "0.6272941", "0.6270098", "0.6225322", "0.6169613", "0.6148147", "0.6133859", "0.6131413", "0.61111206", "0.60881543", "0.6054097", "0.6054097", "0.6053658", "0.60289735", "0.60280526", "0.5987097", "0.59836435", "0.5974926", "0.5960467", "0.5947521", "0.5945094", "0.59184766", "0.5914209", "0.59140134", "0.59039086", "0.59020215", "0.58963746", "0.5894494", "0.58758867", "0.58646244", "0.5862921", "0.585245", "0.58515686", "0.5846532", "0.58230555", "0.5822669", "0.5811553", "0.5805074", "0.5801978", "0.5801582", "0.57909524", "0.5789163", "0.57885385", "0.57677996", "0.57637703", "0.5760269", "0.5748557", "0.57442206", "0.5742623", "0.5737353", "0.573355", "0.5725335", "0.57209253", "0.5715001", "0.5705426", "0.5702202", "0.56957257", "0.56878084", "0.5680567", "0.5668058", "0.5657371", "0.5651686", "0.56455237", "0.56374335", "0.5625792", "0.5607504" ]
0.6305044
34
specifies a regular expression used to ignore categories if the name matches the specified regular expression
def ignore(regex) @ignored = regex end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def locale_exclude_regex(kind)\n Regexp.union(@exclude_objects[kind.to_sym].map do |identifier|\n if identifier.is_a? String\n # Add leading/trailing slashes if necessary\n new_identifier = identifier.dup\n new_identifier[/^/] = '/' if identifier[0,1] != '/'\n new_identifier[/$/] = '/' unless [ '*', '/' ].include?(identifier[-1,1])\n\n /^[^\\/]*#{new_identifier.gsub('*', '(.*?)').gsub('+', '(.+?)')}$/\n else\n identifier\n end\n end)\n end", "def suppress name\n name = /#{name}/ unless name.is_a? Regexp\n (@suppressed[@current] ||= []) << name\n end", "def reserved_nets_regex; end", "def ignore(regexps)\n @options[:ignore] = [options[:ignore], regexps]\n registry[:silencer] = Silencer.new(self)\n end", "def reduce_case_insensitive(_production, _range, _tokens, _children)\n Regexp::IGNORECASE\n end", "def ignore(name, rx, tests: [], preconditions: [], process: nil)\n _leaf name, rx, ignorable: true, tests: tests, process: process, preconditions: []\n end", "def regex\n 'tag regex not defined'\n end", "def exclude(pattern)\n excludes << pattern\n end", "def ignore!(regexps)\n @options.delete(:ignore)\n @options[:ignore!] = regexps\n registry[:silencer] = Silencer.new(self)\n end", "def ignore(regexps)\n @options[:ignore] = [options[:ignore], regexps]\n Celluloid::Actor[:listen_silencer] = Silencer.new(self)\n end", "def exclude(pattern)\n excludes << pattern\n end", "def ignore(*patterns)\n @ignore_patterns += patterns\n end", "def name_filter\n self.gsub(/[^a-zA-Z\\s\\-\\(\\)]/, '')\n end", "def ignore!(regexps)\n @options.delete(:ignore)\n @options[:ignore!] = regexps\n Celluloid::Actor[:listen_silencer] = Silencer.new(self)\n end", "def ignore_case(search, case_type)\n (case_type == :smart && search !~ /[A-Z]/) || case_type == :ignore\n end", "def with_tag_blacklist(tags_blacklist_regex)\n @regex_black_list = Regexp.new(tags_blacklist_regex, Regexp::IGNORECASE) unless tags_blacklist_regex.nil? || tags_blacklist_regex.empty?\n self\n end", "def protect_regex(reg)\n Regexp.new('(^|(?!(!.*)))' + reg.source + '($|(?!(.*!)))')\n end", "def anything_but(value)\n append \"(?:[^#{sanitize value}]*)\"\n end", "def regexp; end", "def regexp; end", "def add_regexp_handling pattern, name, exclusive = false\n bitmap = @attributes.bitmap_for(name)\n @regexp_handlings << [pattern, bitmap]\n @exclusive_bitmap |= bitmap if exclusive\n end", "def set_resource_ignore_pattern(type, pattern)\n raise TypeError \"Ignore pattern must be a string or an array\" unless pattern.is_a?(String) || pattern.is_a?(Array)\n @ignore_rules[type] = pattern\n end", "def only(regex)\n @only = regex\n end", "def sanitized_category\n return category.underscore.gsub(/ /, '_').gsub(/[^a-z_0-9]/,'')\n end", "def ignore!(*regexps)\n directories_records.each { |r| r.ignore!(*regexps) }\n self\n end", "def add_regexp_handling(pattern, name)\n @attribute_manager.add_regexp_handling(pattern, name)\n end", "def private_nets_regex; end", "def ignore(*regexps)\n directories_records.each { |r| r.ignore(*regexps) }\n self\n end", "def blacklist\n Regexp.new \"(#{ BLACKLIST.join '|' })\"\n end", "def test_exclusion_match_nocase\r\n\t\tcontent = \"first line.\\nthis string contains a case insensitive match on: MyMatch123\"\r\n\t\tsnort_rule_content = SnortRuleContent.new\r\n\t\tsnort_rule_content.not_modifier = true\r\n\t\tsnort_rule_content.unescaped_string = \"mymatch123\"\r\n\t\tsnort_rule_content.nocase = true\r\n\t\tassert(!snort_rule_content.match(content,0),\"incorrect nocase exclusion match on content.\")\r\n\tend", "def checkTypo(arg,regex)\r\n return !(arg=~regex)\r\nend", "def container_registry_tag_regex\n @container_registry_tag_regex ||= /[\\w][\\w.-]{0,127}/\n end", "def test_exclusion_match_nocase_no_match\r\n\t\tcontent = \"first line.\\nthis string does not contain a case insensitive match on: MyMatch123\"\r\n\t\tsnort_rule_content = SnortRuleContent.new\r\n\t\tsnort_rule_content.not_modifier = true\r\n\t\tsnort_rule_content.unescaped_string = \"some other string\"\r\n\t\tsnort_rule_content.nocase = true\r\n\t\tmatch = snort_rule_content.match(content,0)\r\n\t\tassert_equal(0, match,\"nocase exclusion match on content didnt fire.\")\t\r\n\tend", "def exclude(pattern)\n exclusions << pattern\n exclusions.dup\n end", "def exclude(pattern)\n exclusions << pattern\n exclusions.dup\n end", "def assertNotMatchTest pattern, value\n assertNotMatch pattern, value\n end", "def =~(regex)\n (@name =~ regex)\n end", "def select_default_ignore_patterns\n\t@exclude_patterns = DEFAULT_IGNORE_PATTERNS.dup\n end", "def keep_file_regex; end", "def test_exclusion_match_nocase\r\n\t\tcontent = \"first line.\\nthis string does not contain a case sensitive match on: MyMatch123\"\r\n\t\tsnort_rule_content = SnortRuleContent.new\r\n\t\tsnort_rule_content.not_modifier = true\r\n\t\tsnort_rule_content.unescaped_string = \"mymatch123\"\r\n\t\tsnort_rule_content.nocase = false\r\n\t\tmatch = snort_rule_content.match(content,0)\r\n\t\tassert_equal(0, match,\"case sensitive exclusion match on content didnt fire.\")\t\r\n\tend", "def regexps; end", "def filterTutorials(x)\n x =~ %r{topics/.*/tutorials/.*/tutorial.*\\.md}\nend", "def isKeyword?\n self.category =~ /(keyword)/i\n end", "def name_regexp name\n klass, type, name = parse_name name\n\n case type\n when '#', '::' then\n /^#{klass}#{type}#{Regexp.escape name}$/\n else\n /^#{klass}(#|::)#{Regexp.escape name}$/\n end\n end", "def name_regexp name\n klass, type, name = parse_name name\n case type\n when '#', '::' then\n /^#{klass}#{type}#{Regexp.escape name}$/\n else\n /^#{klass}(#|::)#{Regexp.escape name}$/\n end\n end", "def clear_ignore_patterns\n\t@exclude_patterns = [ /^$/ ]\n end", "def term_non_matches\n @terms.reject { |t| self.class.match_against_term?(t) }\n end", "def test_exclusion_match\r\n\t\tcontent = \"first line.\\nthis string contains a case sensitive match on: MyMatch123\"\r\n\t\tsnort_rule_content = SnortRuleContent.new\r\n\t\tsnort_rule_content.not_modifier = true\r\n\t\tsnort_rule_content.unescaped_string = \"MyMatch123\"\r\n\t\tsnort_rule_content.nocase = false\r\n\t\tassert(!snort_rule_content.match(content,0),\"incorrect case sensitive exclusion match on content.\")\r\n\tend", "def reduce_no_match_string(_production, _range, _tokens, theChildren)\n NoMatchTest.new(theChildren.last)\n end", "def create_filter(regexs, field = nil)\n return nil if regexs.nil? || regexs.empty?\n\n # /<regex>/i match pattern ignore case\n proc { |e| regexs.index do |regex|\n return /#{regex}/i =~ e if field\n return /#{regex}/i =~ e[field] if field\n end }\n end", "def category!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 16 )\n\n type = CATEGORY\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 150:11: 'category'\n match( \"category\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 16 )\n\n end", "def reek_only_of(smell_category, *patterns)\n ShouldReekOnlyOf.new(smell_category, patterns)\n end", "def exclude_name(name)\n @rest_call.append_headers(\"X-Nuage-FilterType\", \"predicate\")\n @rest_call.append_headers(\"X-Nuage-Filter\", \"name ISNOT '#{name}'\")\n end", "def category_conditions\n [\"words.category LIKE ?\", \"%#{category}\"]\n end", "def ignore_theater name\n list = ['INDRP - El Campanil Theatre - Antioch, CA',\n 'PE - El Rey Theatre - Chico, CA',\n 'Castro',\n 'INDLF',\n 'EXSV - Highland Park 3 - Highland Park, CA',\n 'CFMY - Cinefamily@The Silent Movie Theatre - Los Angeles, CA',\n 'REL - Reel Cinema - Wofford Heights, CA',\n 'AR - Auditorium Rental, AS - Auditorium Screening, CW - Closed Weekdays, DP - Damaged Print, EF - Equipment Failure',\n 'FF - Film Festival, MC - Movie Canceled, NA - No Authorization, NB - No Bookings, ND - Called, No Data Available',\n 'NE - No Engagement, NP - No Patrons, PR - Pending Revenue, SS - Special Screening, TC - Temporarily Closed',\n 'INDRP - Downtown Independent - Los Angeles, CA',\n '21CC - Victory Theatre - Safford, AZ',\n 'EGYP - Egyptian - Hollywood, CA'\n ]\n list.each do |v|\n return true if name.include?(v)\n end\n return false\nend", "def category_matches? cat_name\n return cat_name.include?(category) || category == cat_name\n end", "def filtered_description(regexp)\n build_description_from(\n *nested_descriptions.collect do |description|\n description =~ regexp ? $1 : description\n end\n )\n end", "def not_keyword(string)\n\t\t(str(string) >> identifier_match.absent?).absent?\n\tend", "def matching_text_element_lines(regex, exclude_nested = T.unsafe(nil)); end", "def valid_name (name)\r\n /^\\w+$/.match (name)\r\n end", "def category_name\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 36 )\n return_value = CategoryNameReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n begin\n # at line 158:18: uri\n @state.following.push(TOKENS_FOLLOWING_uri_IN_category_name_1251)\n uri\n @state.following.pop\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look(-1)\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 36 )\n\n end\n\n return return_value\n end", "def pattern\n Regexp.union(pattern_classifiers.map(&:pattern))\n end", "def remove_company_type(name)\n return name.gsub(/(^|\\s)(gmbh|ag)($|\\s)/i, ' ').strip\n end", "def alert_terms_regex\n unless alert_terms.nil? or alert_terms.empty?\n @alert_terms_regex ||= Regexp.new(alert_terms.gsub(/\\s+/, '|'), true)\n end\n end", "def not_matching(regexp)\n regexp = Helpers.regexp_to_string regexp if regexp.is_a? Regexp\n @type = :doesNotMatch\n @value = regexp\n @request_builder\n end", "def skip_name?(control, name)\n if control.has_key?(:exclude) and control[:exclude].include? name\n true\n elsif control.has_key?(:include) and not control[:include].include? name\n true\n end\n end", "def parameter_notations(name)\n escaped_name = Regexp.escape(name)\n type = /<[^>]+>/\n /(#{escaped_name}|#{escaped_name}#{type})/\n end", "def add_ignore_criteria(name, &block)\n give_name(name, block)\n @ignore_criteria << block\n end", "def get_tag_regex(tag)\n return Regexp.new(\"\\\\A\\\\{#{tag} /\\\\}\")\n end", "def sb_regex(name = 'Default')\n exemplar = %Q{#\"^(/private)?/var/db/dslocal/nodes/Default(/|$)\"}\n pattern = name.eql?('Default') ? name : exemplar.sub(/Default/, name)\n pattern = Regexp.escape pattern\n Regexp.new pattern.gsub /\\//,'\\\\/'\n end", "def not_regexp(left, right)\n # could be DRYer, but this is more readable than: \"NOT #{self.=~(left,right)}\"\n raise if right.is_a? Regexp\n \"NOT #{left}#{quotify right}\"\n end", "def reduce_grouping_parenthenses(_production, _range, _tokens, theChildren)\n Regex::NonCapturingGroup.new(theChildren[1])\n end", "def sanitize_reject_regex!(params, rules)\n rules.each do |key, rule|\n if params.has_key?(key)\n params[key] = check_reject_regex(params[key], rule[:default_value], rule[:regex])\n else\n params[key] = rule[:default_value]\n end\n end\n end", "def ignore_resources(type, *patterns)\n @ignore_rules[type] += patterns\n end", "def is_noise?( name, url )\n name_key = @url_filter[url.downcase]\n if ( name_key != nil )\n if name.downcase.include? name_key\n return false\n else\n return true\n end\n else\n return false\n end\n end", "def carefull_name?(name)\n not !! (name =~ /\\w{8}-\\w{4}-\\w{4}-\\w{4}-\\w{12}/)\nend", "def custom_keyword_regexp\n return nil if @custom_keywords.empty?\n Regexp.new(\"^(#{@custom_keywords.join('|')})\\$\")\n end", "def categories(str)\n raise NotImplementedError\n end", "def exclude?(fn)\n calculate_exclude_regexp unless @exclude_re\n fn =~ @exclude_re\n end", "def never_expect_dump_file_to_match(regex, category = 'requests')\n should_never_happen do\n File.exist?(dump_file_path(category)) &&\n read_dump_file(category) =~ regex\n end\n end", "def ignore_files_with_regexps(files, regexps)\n files.select { |f| regexps.all? { |r| r.match(f.file).nil? } }\n end", "def sanitize_label(label)\n label.gsub(%r![^a-z0-9_\\-.]!i, \"\")\n end", "def does_not_contain_special str\n !(str.include?(\"#\") || str.include?(\"!\") || str.include?(\"$\"))\n end", "def filter_entries(entries)\n entries = entries.reject do |e|\n unless ['_posts', '.htaccess'].include?(e)\n # Reject backup/hidden\n ['.', '_', '#'].include?(e[0..0]) or e[-1..-1] == '~'\n end\n end\n entries = entries.reject { |e| ignore_pattern.match(e) }\n end", "def pattern2regex(pattern); end", "def pattern\n segs = @tags.map { |tagged_segment| build_segment(tagged_segment) }\n segs.last.gsub!(/\\.$/, '')\n segs.unshift \"^\"\n segs.push \"\\\\.?$\"\n Regexp.new(segs.join)\n end", "def add_subject_ignore(matcher)\n @subject_ignores << matcher\n self\n end", "def ignoring\n %w{*_test.lua *_spec.lua .*}\n end", "def match_tag_name(name)\n\n name.sub!(/^c:/, '')\n\n matches = []\n\n Conf.curation_types.each do |type|\n matches.push type if type.starts_with?(name)\n end\n\n return matches[0] if matches.length == 1\n end", "def get_link_filter\n return super if datastore['ExcludePathPatterns'].to_s.empty?\n\n patterns = opt_patterns_to_regexps( datastore['ExcludePathPatterns'].to_s )\n patterns = patterns.map { |r| \"(#{r.source})\" }\n\n Regexp.new( [[\"(#{super.source})\"] | patterns].join( '|' ) )\n end", "def clean_tag(name)\n name.gsub( /[^-.,_[:alnum:]]/, '_' )\n end", "def regex(pattern)\n Regexp.new pattern.regex\n end", "def clear_ignore_patterns\n @exclude_patterns = [ /^$/ ]\n end", "def country_exclude(affi_string)\n $country_exceptions.each do |not_a_country|\n if affi_string.include?(not_a_country)\n return affi_string.gsub(not_a_country, \"\")\n end\n end\n # if notting is removed\n return affi_string\nend", "def select_default_ignore_patterns\n @exclude_patterns = DEFAULT_IGNORE_PATTERNS.dup\n end", "def without_instruction(text)\n text.gsub(/^you (should|must)/i, '').gsub(/\\.$/, '')\n end", "def method_missing(name, *args)\n cleaned_name = name.to_s.gsub(/(un)?train_([\\w]+)/, '\\2')\n category = CategoryNamer.prepare_name(cleaned_name)\n if category_keys.include?(category)\n args.each { |text| eval(\"#{Regexp.last_match(1)}train(category, text)\") }\n elsif name.to_s =~ /(un)?train_([\\w]+)/\n raise StandardError, \"No such category: #{category}\"\n else\n super # raise StandardError, \"No such method: #{name}\"\n end\n end", "def ignored\n [\n '.agignore',\n '.cvsignore',\n '.gitignore',\n '.hgignore',\n ].map do |file_with_ignore_patterns|\n if File.exist? file_with_ignore_patterns\n patterns = File.read(file_with_ignore_patterns).split(\"\\n\")\n patterns.map do |pattern|\n next if pattern =~ /^#/\n next if pattern =~ /^\\s*$/\n \"-not \\\\( -path \\\"*#{pattern}*\\\" -prune \\\\)\"\n end.compact.join(' ')\n else\n ''\n end\n end.join(' ') + [\n \"-not \\\\( -path \\\"*\\\\.git*\\\" -prune \\\\)\"\n ].join(' ')\nend", "def test_shortcut_character_classes_are_negated_with_capitals\n assert_equal 'the number is ', \"the number is 42\"[/\\D+/]\n assert_equal 'space:', \"space: \\t\\n\"[/\\S+/]\n # ... a programmer would most likely do\n assert_equal ' = ', \"variable_1 = 42\"[/[^a-zA-Z0-9_]+/]\n assert_equal ' = ', \"variable_1 = 42\"[/\\W+/]\n end", "def selector_to_regex(str)\n Regexp.new(str.strip.gsub('.', '\\b\\S*\\.').gsub('#', '\\b\\S*#').gsub(' ', '(\\b.*\\s.*\\b)') << '\\b[^\\s]*\\z')\n end" ]
[ "0.6018575", "0.57293683", "0.56914043", "0.55394024", "0.5514741", "0.54915434", "0.5430975", "0.54145324", "0.54131943", "0.53974766", "0.5396474", "0.5388664", "0.5372349", "0.53656346", "0.5339012", "0.5329243", "0.52785164", "0.5269876", "0.5232071", "0.5232071", "0.5189346", "0.5157988", "0.51572126", "0.5144189", "0.51408786", "0.51373947", "0.5120918", "0.5105256", "0.51014644", "0.50942093", "0.5090633", "0.5075541", "0.50603974", "0.5060227", "0.5060227", "0.50445855", "0.5038951", "0.5029106", "0.50215214", "0.50133866", "0.50087655", "0.50034136", "0.4994114", "0.49873295", "0.49834502", "0.49693984", "0.4961558", "0.4937343", "0.4935034", "0.49342284", "0.49318492", "0.4920045", "0.49194077", "0.49139", "0.490587", "0.49009553", "0.48964357", "0.4886677", "0.48842707", "0.48726115", "0.48696736", "0.48684072", "0.48616225", "0.48555216", "0.48505616", "0.4820791", "0.48189318", "0.47895163", "0.47871792", "0.47803387", "0.47717622", "0.47705254", "0.476982", "0.4769123", "0.47625396", "0.47593656", "0.47571683", "0.47564632", "0.47546482", "0.47496226", "0.47411588", "0.47374314", "0.472395", "0.47194862", "0.4719462", "0.47168046", "0.47147685", "0.47065228", "0.46997675", "0.4699698", "0.4688893", "0.46878892", "0.4682659", "0.46769854", "0.46765167", "0.4669573", "0.46686974", "0.46673408", "0.46626592", "0.4662241" ]
0.6157789
0
specifies a regular expression to select only categories whose names match
def only(regex) @only = regex end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def category_conditions\n [\"words.category LIKE ?\", \"%#{category}\"]\n end", "def category_matches? cat_name\n return cat_name.include?(category) || category == cat_name\n end", "def category_candidates(category)\n return @category_cache[category] unless @category_cache[category].nil?\n # from whole name singularized\n candidates = []\n decorated_category = Cyclopedio::Syntax::NameDecorator.new(category, parse_tree_factory: @parse_tree_factory)\n @nouns.singularize_name(category.name, decorated_category.category_head).each do |name_singularized|\n candidates.concat(candidates_for_name(name_singularized,@category_filters))\n end\n candidate_set = create_candidate_set(category.name,candidates)\n return @category_cache[category] = candidate_set if !candidate_set.empty? || @category_exact_match\n # from simplified name\n candidate_set = candidate_set_for_syntax_trees(decorated_category.category_head_trees,@category_filters)\n return @category_cache[category] = candidate_set unless candidate_set.empty?\n # from original whole name\n candidate_set = candidate_set_for_name(category.name, @category_filters)\n @category_cache[category] = candidate_set\n end", "def category_with_name(name)\n categorized_items = categories_items\n categorized_items.detect{ |sub_item_array| sub_item_array[0].name == name }\n end", "def find_sorters cat_name\n sub_sorters.select{|ss|ss.category_matches?(cat_name)}\n end", "def isKeyword?\n self.category =~ /(keyword)/i\n end", "def parsed_category\n self.category = self[:category] unless self[:category].to_s =~ /^[A-Z]$/\n self[:category]\n end", "def valid_category_name?(category)\n category = category.to_sym\n\n return true if CATEGORIES.keys.include?(category)\n\n CATEGORIES.each do |k, v|\n if v[:children] && v[:children].keys.include?(category)\n return true\n end\n end\n\n false\n end", "def tokenizer_category_header_group\n TokenExtractor.new(\n :category_group,\n /(?<=(master|under\\s))\\s*[MU].{2}\\s*(?=maschi|femmine)/ix,\n 3\n )\n end", "def categories(str)\n raise NotImplementedError\n end", "def compare_catetegory category_name\n condition=\"%#{category_name}%\"\n category=Category.where(\"name like ? \",condition).first\n if category.nil?\n category=Category.create name:category_name\n end\n return category\n end", "def catses\n REXML::XPath.match(@xml, './app:categories', Names::XmlNamespaces)\n end", "def match_tag_name(name)\n\n name.sub!(/^c:/, '')\n\n matches = []\n\n Conf.curation_types.each do |type|\n matches.push type if type.starts_with?(name)\n end\n\n return matches[0] if matches.length == 1\n end", "def index\n @categories = current_user.categories.filter_conditions(params[:name])\n end", "def core_category_candidates(category)\n candidates = []\n singularize_name_nouns(category.name, category.head).each do |phrase|\n candidates.concat(candidates_for_name(phrase, @category_filters))\n\n end\n candidate_set = create_candidate_set(category.name,candidates.uniq)\n end", "def category\n catName = params[:title]\n @meals = Meal.where(\"category like ?\", catName)\n end", "def category_name\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 36 )\n return_value = CategoryNameReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n begin\n # at line 158:18: uri\n @state.following.push(TOKENS_FOLLOWING_uri_IN_category_name_1251)\n uri\n @state.following.pop\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look(-1)\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 36 )\n\n end\n\n return return_value\n end", "def handle_categories(category_names)\n return if !self.undergrad? || category_names.nil?\n self.categories = [] # eliminates any previous interests so as to avoid duplicates\n category_array = []\n category_array = category_names.split(',').uniq if category_names\n category_array.each do |cat|\n self.categories << Category.find_or_create_by(name: cat.downcase.strip)\n end\n end", "def extract_categories cats\n cats.inject Hash.new do |hash, tag|\n\n # iterate through groups if the tag belongs to multiple\n tag[\"groups\"].each do |group|\n name = group[\"name\"]\n hash[name] ||= []\n hash[name] << tag[\"name\"]\n end\n hash\n end\n end", "def categories_for_file(file)\n _, categories = CATEGORIES.find do |key, _|\n filename_regex, changes_regex = Array(key)\n\n found = filename_regex.match?(file)\n found &&= changed_lines(file).any? { |changed_line| changes_regex.match?(changed_line) } if changes_regex\n\n found\n end\n\n Array(categories || :unknown)\n end", "def tokenizer_relay_header_category\n TokenExtractor.new(\n :category,\n /(?<=Cat(\\.|\\s))[ABCDEFGH]/i,\n 1\n )\n end", "def category!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 16 )\n\n type = CATEGORY\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 150:11: 'category'\n match( \"category\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 16 )\n\n end", "def categories\n taxonomies.all :conditions => { :classification => :category }\n end", "def category_names=(names)\n names.each do |name|\n unless name == \"\"\n category = Category.find_or_create_by(name: name)\n self.categories << category unless self.categories.include?(category)\n end\n end\n end", "def categories_search(query, order = nil)\n Collection.find(:all, :select => 'DISTINCT collections.*', :joins => 'INNER JOIN linkings ON collections.id = linkings.category_id',\n :conditions => [\"(linkings.group_id IN (#{groups.map { |o| o.id }.join(',')})) AND (( (collections.`type` = 'Category' ) ) AND ((collections.name like ?) OR (collections.description like ?) OR \\\n (SELECT tags.name FROM tags INNER JOIN taggings ON tags.id = taggings.tag_id WHERE taggings.taggable_id = collections.id AND taggings.taggable_type = 'Category' AND tags.name LIKE ?) IS NOT NULL))\", \"%#{query}%\", \"%#{query}%\", \"%#{query}%\"],\n :order => order)\n end", "def extract_categories(placeholder)\n placeholder.gsub(/\\({2}/, '' ).gsub(/\\){2}/, '' ).split(':')\n end", "def category_candidates(category)\n return @category_cache[category] unless @category_cache[category].nil?\n candidates = candidates_for_name(singularize_name(category.name, category.head), @category_filters)\n if !candidates.empty?\n candidate_set = create_candidate_set(category.name,candidates)\n else\n candidate_set = candidate_set_for_syntax_trees(category.head_trees,@category_filters)\n end\n if candidate_set.empty?\n candidates = candidates_for_name(category.name, @category_filters)\n candidate_set = create_candidate_set(category.name,candidates) unless candidates.empty?\n end\n @category_cache[category] = candidate_set\n end", "def get_categories\n cats = []\n params.each do |k,v|\n if k.starts_with? \"category\"\n name = v\n num = cat_number(k) \n cats << [name,num]\n end\n end\n return cats\n end", "def pattern_candidates(pattern,representative)\n candidate_set_for_syntax_trees(representative.head_trees,@category_filters,pattern)\n end", "def category(name)\n @categories.find { |c| c.name == name}\n end", "def by_categories(query)\n # \"OR\" behavior by default\n apply_filter_list \"categories\", query\n\n self\n end", "def tokenizer_category_header_group\n TokenExtractor.new(\n :category_group,\n / *((master|under)\\s\\d\\d|[MU]\\d\\d)/i,\n / *tempo base */i\n )\n end", "def has_category?(item, category)\n return false if item[:categories].nil?\n item[:categories].collect{|c| c.downcase}.include? category.downcase\n end", "def category= arg\n cat_arr = arg.to_s.split(':')[0..@depth-1] || []\n @category = cat_arr.join(':').to_s\n end", "def context_type_category_header\n ContextTypeDef.new(\n :category_header,\n [\n# /(?<anything>^.*)/i,\n# /(?<empty>^\\s*\\r?\\n|^\\r?\\n|^\\s*\\n|^\\s*$)/i,\n# /(?<empty>^\\s*\\r?\\n|^\\r?\\n|^\\s*\\n|^\\s*$)/i,\n /(?<style>\\s+(?<distance>(50|100|200|400|800|1500)))\\s+(?<stroke>(stile|dorso|rana|farfalla|delfino|misti))/i,\n /-{60,}/\n\n ]\n )\n end", "def category_names\n @category_names || categories.map(&:name).join(' ')\n end", "def setup_categories # :nodoc:\n [:event, :time, :salutation, :recurrency, :preposition,\n :article, :day, :verb, :name, :number, :interrogation].each do |category|\n @matches[category] = []\n end\n @matches[:guessing] = false\n end", "def reduce_case_insensitive(_production, _range, _tokens, _children)\n Regexp::IGNORECASE\n end", "def categories_validate(categories)\r\n categories = categories.split(',')\r\n user_message = \"\"\r\n cats_seen = []\r\n categories.each do |cat|\r\n cat.strip!\r\n next if cats_seen.include?(cat)\r\n cats_seen << cat\r\n user_message << \" Category '#{cat}' was too long.\" if cat.length > 25\r\n good_chars = [*'0'..'9', *'a'..'z', *'A'..'Z', '/', '-', '(', ')', '&',\r\n '#', '@', '+', '.', '?', '!'].join ' '\r\n word_chars = [*'0'..'9', *'a'..'z', *'A'..'Z'].join\r\n unless cat.split(//).all? {|char| good_chars.include?(char) }\r\n user_message << \" Category '#{cat}' had weird characters.\"\r\n end\r\n unless cat.split(//).any? {|char| word_chars.include?(char)}\r\n user_message << \" Category '#{cat}' lacks a letter or digit.\"\r\n end\r\n end\r\n return user_message unless user_message == \"\"\r\n return true\r\n end", "def get_category_names \n @data.search(\"div.award h1\").collect { |title| title.text }\n end", "def categories\n add_to_query restrict_kind: 'category'\n end", "def assign_categories \n if @category_names \n self.categories = @category_names.split(/\\s+/).map do |name|\n Category.find_or_create_by_name(name)\n end\n end\n end", "def get_categories(row)\n categories = []\n cat = at_in(:category1 , row) # should invent some loop here\n categories << cat if cat\n cat = at_in(:category2 , row) # but we only support\n categories << cat if cat\n cat = at_in(:category3 , row) # three levels, so there you go\n categories << cat if cat\n categories\n end", "def category_names=(list)\n list = list.is_a?(Array) ? list : list.split(',').reject(&:blank?).collect { |item| item.try(:strip) }\n delete_unused_categories(list)\n add_new_categories(list)\n end", "def sanitized_category\n return category.underscore.gsub(/ /, '_').gsub(/[^a-z_0-9]/,'')\n end", "def cards_in_category(category)\n # want to return an array of cards that only has :STEM category\n # run through array and check if ':STEM' matches the last\n # element in the array\n @cards.select {|card| card.category == category}\n end", "def category_key!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 6 )\n\n type = CATEGORY_KEY\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 131:4: 'Category'\n match( \"Category\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 6 )\n\n end", "def isCompany?\n self.category =~ /(company|index|currency)/i\n end", "def construct_alpha_category(name1, name2, alpha_categories, i)\n sameSoFar = true\n index = 0\n length_of_shorter_name = [name1.length, name2.length].min\n\n # Attempt to find the first character that differs\n while sameSoFar && (index < length_of_shorter_name)\n char1 = name1[index].chr\n char2 = name2[index].chr\n\n sameSoFar = (char1 == char2)\n index += 1\n end\n\n # Form the category name\n if sameSoFar and (index < name1.length)\n # There is at least one character remaining in the first name\n alpha_categories[i] << name1[0,index+1]\n alpha_categories[i+1] << name2[0, index]\n elsif sameSoFar and (index < name2.length)\n # There is at least one character remaining in the second name\n alpha_categories[i] << name1[0,index]\n alpha_categories[i+1] << name2[0, index+1]\n else\n alpha_categories[i] << name1[0, index]\n alpha_categories[i+1] << name2[0, index]\n end\n\n alpha_categories\n end", "def categorized_list\n %w[artist copyright character meta general]\n end", "def find_category(file)\n file.match(Regexp.new(Regexp.escape(csv_root))) or \n raise AMEEM::Exceptions::Location.new(\"#{file} is not in CSV tree\")\n file.sub(Regexp.new(Regexp.escape(csv_root)),\"\");\n end", "def pattern_candidates(pattern,representative)\n candidate_set_for_syntax_trees(representative_head_trees(representative),@category_filters,pattern) #TODO representative_head_trees?\n end", "def has_category?(name)\n categories.exists?(name: name)\n end", "def category_names\n categories.map {|category| CategoryCode[category]}\n end", "def filterTutorials(x)\n x =~ %r{topics/.*/tutorials/.*/tutorial.*\\.md}\nend", "def format_category_name # WHY IS IT IN HERE?\n self.category.name.downcase.delete(' ')\n end", "def tokenizer_individual_category_header_group\n TokenExtractor.new(\n :category_group,\n / *master\\s*(\\d[05]})/i,\n / *(maschi|femmi)/i\n )\n end", "def isChart?\n self.category =~ /(price|chart)/i\n end", "def parse_name\n self.name.downcase.chomp! ':'\n\n if self.name.include? ':'\n parts = self.name.split(':')\n\n if %w(copyright character artist circle).include? parts[0]\n self.category = parts[0].to_sym\n end\n\n self.name = parts[1]\n end\n end", "def categories(options = {})\n options = options.merge(:facets => 'category', :facet_num_results => 100, :num_results => 0)\n search('*:*', options).facets['category'].values\n end", "def find_child(search_term, category)\n found_nodes = []\n traverse do |node|\n case category\n when \"description\"\n if node.description =~ /#{Regexp.quote(search_term)}/i\n found_nodes << node\n end\n when \"structure_marker\"\n if node.structure_marker =~ /#{Regexp.quote(search_term)}/i\n found_nodes << node\n end\n when \"file_name\"\n #first steps to make search robust; replace whitespace with underscore to mimic file string\n search_term.strip!\n search_term.gsub!(/\\s+/, '_')\n if node.file_name =~ /#{Regexp.quote(search_term)}/i\n found_nodes << node\n end\n else\n end\n end\n found_nodes\n end", "def filter_select(match, metadata)\n # Break on container_name mismatch\n unless match.hosts.empty? || match.hosts.include?(metadata[:host])\n return false\n end\n # Break on host mismatch\n unless match.container_names.empty? || match.container_names.include?(metadata[:container])\n return false\n end\n # Break if list of namespaces is not empty and does not include actual namespace\n unless match.namespaces.empty? || match.namespaces.include?(metadata[:namespace])\n return false\n end\n\n match_labels(metadata[:labels], match.labels)\n end", "def link_category\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 16 )\n value = nil\n category_name25 = nil\n\n begin\n # at line 110:35: ';' ( WS )? 'category' '=' '\\\"' category_name '\\\"'\n match(T__11, TOKENS_FOLLOWING_T__11_IN_link_category_669)\n # at line 110:39: ( WS )?\n alt_25 = 2\n look_25_0 = @input.peek(1)\n\n if (look_25_0 == WS)\n alt_25 = 1\n end\n case alt_25\n when 1\n # at line 110:39: WS\n match(WS, TOKENS_FOLLOWING_WS_IN_link_category_671)\n\n end\n match(T__25, TOKENS_FOLLOWING_T__25_IN_link_category_674)\n match(T__13, TOKENS_FOLLOWING_T__13_IN_link_category_676)\n match(T__14, TOKENS_FOLLOWING_T__14_IN_link_category_678)\n @state.following.push(TOKENS_FOLLOWING_category_name_IN_link_category_680)\n category_name25 = category_name\n @state.following.pop\n match(T__14, TOKENS_FOLLOWING_T__14_IN_link_category_682)\n # --> action\n value = (category_name25 && @input.to_s(category_name25.start, category_name25.stop))\n # <-- action\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 16 )\n\n end\n\n return value\n end", "def is_category?( category = '' )\n return false unless @is_category\n\n return true if category.blank?\n\n cat_obj = get_queried_object\n\n category = [category] unless category.is_a?(Array)\n category.map!(&:to_s)\n\n if category.include?(cat_obj.term_id.to_s)\n return true\n elsif category.include?(cat_obj.name)\n return true\n elsif category.include?(cat_obj.slug)\n return true\n end\n false\n end", "def categories\n @categories ||= wayfinder.decorated_vocabularies.sort_by(&:label)\n end", "def context_type_category_header\n ContextTypeDef.new(\n :category_header,\n [\n /^\\s*(\\r\\n|\\n|$|\\Z|Torna a inizio pagina)/i, # matches any kind of newline, an empty line or a line with only invisible chars\n /(?<!\\dx)(50\\s|100\\s|200\\s|400\\s|800\\s|1500\\s) *(stile|misti|dorso|rana|farf|SL|DO|RA|FA|MI|MX|DF|DS|RN).*(maschi|femmi)/i,\n /\\s*-{10}-*/\n ]\n )\n end", "def source_result_category_names\n category_names\n end", "def categories_for(race)\n case race.name\n when \"Junior Men\", \"Junior Women\"\n [ Category.find_or_create_by(name: race.name) ]\n else\n super race\n end\n end", "def set_category\n match_data = /^What is [-]?\\d+\\s*(?<operation>[\\+\\-\\*\\/])\\s*[-]?\\d+\\?$/.match(self.query) || {}\n self.category = CATEGORIES[match_data[:operation]]\n end", "def get_category_name\n Category.find(:first, :select => ['name'],:conditions=>['has_complexity=?',false]).name rescue ''\n end", "def ffck_categories()\n %w(Pitchoun Poussin Benjamin Minime Cadet Junior Senior Veteran Inconnu)\n end", "def clade_filter(term)\n # return unless (clade = Name.find_by(text_name: term))\n\n query = create_query(:Observation, :needs_id, { in_clade: term })\n\n show_selected_results(query)\n end", "def best_match_in(event_categories, result_age = nil)\n debug \"Category#best_match_in for #{name} in #{event_categories.map(&:name).join(', ')}\"\n\n candidate_categories = event_categories.dup\n\n equivalent_matches = candidate_categories.select { |category| equivalent?(category) }\n debug \"equivalent: #{equivalent_matches.map(&:name).join(', ')}\"\n return equivalent_matches.first if one_match?(equivalent_matches)\n\n # Sometimes categories like Beginner and Cat 4 are equivalent but need to\n # be separated if both categories exist\n exact_equivalent = equivalent_matches.detect { |category| category.name == name }\n debug \"exact equivalent: #{exact_equivalent}\"\n return exact_equivalent if exact_equivalent\n\n # If no weight match, ignore weight and match on age and gender\n if candidate_categories.any? { |category| weight == category.weight }\n candidate_categories = candidate_categories.select { |category| weight == category.weight }\n end\n debug \"weight: #{candidate_categories.map(&:name).join(', ')}\"\n return candidate_categories.first if one_match?(candidate_categories)\n return nil if candidate_categories.empty?\n\n # Eddy is essentially senior men/women for BAR\n if equipment == \"Eddy\"\n highest_senior_category = candidate_categories.detect do |category|\n category.ability_begin == 0 &&\n category.gender == gender &&\n !category.age_group? &&\n (category.equipment == \"Eddy\" || category.equipment.blank?)\n end\n debug \"eddy: #{highest_senior_category&.name}\"\n return highest_senior_category if highest_senior_category\n end\n\n # Equipment matches are fuzzier\n candidate_categories = candidate_categories.select { |category| equipment == category.equipment }\n debug \"equipment: #{candidate_categories.map(&:name).join(', ')}\"\n return candidate_categories.first if one_match?(candidate_categories)\n return candidate_categories.first if candidate_categories.one? && equipment?\n return nil if candidate_categories.empty?\n\n if equipment?\n equipment_categories = candidate_categories.select do |category|\n equipment == category.equipment && gender == category.gender\n end\n debug \"equipment and gender: #{equipment_categories.map(&:name).join(', ')}\"\n return equipment_categories.first if equipment_categories.one?\n end\n\n candidate_categories = candidate_categories.reject { |category| gender == \"M\" && category.gender == \"F\" }\n debug \"gender: #{candidate_categories.map(&:name).join(', ')}\"\n return candidate_categories.first if one_match?(candidate_categories)\n return nil if candidate_categories.empty?\n\n candidate_categories = if result_age && !senior? && candidate_categories.none? { |category| ages_begin.in?(category.ages) }\n candidate_categories.select { |category| category.ages.include?(result_age) }\n elsif junior? && ages_begin == 0\n candidate_categories.select { |category| ages_end.in?(category.ages) }\n else\n candidate_categories.select { |category| ages_begin.in?(category.ages) }\n end\n debug \"ages: #{candidate_categories.map(&:name).join(', ')}\"\n return candidate_categories.first if one_match?(candidate_categories)\n return nil if candidate_categories.empty?\n\n unless all_abilities?\n candidate_categories = candidate_categories.select { |category| ability_begin.in?(category.abilities) }\n debug \"ability: #{candidate_categories.map(&:name).join(', ')}\"\n return candidate_categories.first if one_match?(candidate_categories)\n return nil if candidate_categories.empty?\n end\n\n # Edge case for unusual age ranges that span juniors and seniors like 15-24\n if !senior? && ages_begin <= Ages::JUNIORS.end && ages_end > Ages::JUNIORS.end\n candidate_categories = candidate_categories.reject(&:junior?)\n debug \"overlapping ages: #{candidate_categories.map(&:name).join(', ')}\"\n return candidate_categories.first if one_match?(candidate_categories)\n return nil if candidate_categories.empty?\n end\n\n if junior?\n junior_categories = candidate_categories.select(&:junior?)\n debug \"junior: #{junior_categories.map(&:name).join(', ')}\"\n return junior_categories.first if junior_categories.one?\n\n candidate_categories = junior_categories if junior_categories.present?\n end\n\n if masters?\n masters_categories = candidate_categories.select(&:masters?)\n debug \"masters?: #{masters_categories.map(&:name).join(', ')}\"\n return masters_categories.first if masters_categories.one?\n\n candidate_categories = masters_categories if masters_categories.present?\n end\n\n # E.g., if Cat 3 matches Senior Men and Cat 3, use Cat 3\n # Could check size of range and use narrowest if there is a single one more narrow than the others\n unless candidate_categories.all?(&:all_abilities?) || all_abilities?\n candidate_categories = candidate_categories.reject(&:all_abilities?)\n end\n debug \"reject wildcards: #{candidate_categories.map(&:name).join(', ')}\"\n return candidate_categories.first if one_match?(candidate_categories)\n return nil if candidate_categories.empty?\n\n # \"Highest\" is lowest ability number\n # Choose exact ability category begin if women\n # Common edge case where the two highest categories are Pro/1/2 and Women 1/2\n if candidate_categories.one? { |category| category.ability_begin == ability_begin && category.women? && women? }\n ability_category = candidate_categories.detect { |category| category.ability_begin == ability_begin && category.women? && women? }\n debug \"ability begin: #{ability_category.name}\"\n return ability_category if ability_category.include?(self)\n end\n\n # Edge case for next two matchers: don't choose Junior Open 1/2/3 over Junior Open 3/4/5 9-12 for Junior Open 3/4/5 11-12,\n # but still match Junior Women with Category 1\n\n # Choose highest ability category\n highest_ability = candidate_categories.map(&:ability_begin).min\n if candidate_categories.one? { |category| category.ability_begin == highest_ability && (!category.junior? || category.ages.size <= ages.size) }\n highest_ability_category = candidate_categories.detect { |category| category.ability_begin == highest_ability && (!category.junior? || category.ages.size <= ages.size) }\n debug \"highest ability: #{highest_ability_category.name}\"\n return highest_ability_category if highest_ability_category.include?(self)\n end\n\n # Choose highest ability by gender\n if candidate_categories.one? { |category| category.ability_begin == highest_ability && category.gender == gender && (!category.junior? || category.ages.size <= ages.size) }\n highest_ability_category = candidate_categories.detect { |category| category.ability_begin == highest_ability && category.gender == gender && (!category.junior? || category.ages.size <= ages.size) }\n debug \"highest ability for gender: #{highest_ability_category.name}\"\n return highest_ability_category if highest_ability_category.include?(self)\n end\n\n # Choose highest minimum age if multiple Masters 'and over' categories\n if masters? && candidate_categories.all?(&:and_over?)\n if result_age\n candidate_categories = candidate_categories.reject { |category| category.ages_begin > result_age }\n end\n highest_age = candidate_categories.map(&:ages_begin).max\n highest_age_category = candidate_categories.detect { |category| category.ages_begin == highest_age }\n debug \"highest age: #{highest_age_category&.name}\"\n return highest_age_category if highest_age_category&.include?(self)\n end\n\n # Choose narrowest age if multiple Masters categories\n if masters?\n ranges = candidate_categories.select(&:masters?).map do |category|\n category.ages_end - category.ages_begin\n end\n\n minimum_range = ranges.min\n candidate_categories = candidate_categories.select do |category|\n (category.ages_end - category.ages_begin) == minimum_range\n end\n\n return candidate_categories.first if one_match?(candidate_categories)\n end\n\n # Choose narrowest age if multiple Juniors categories\n if junior?\n ranges = candidate_categories.select(&:junior?).map do |category|\n category.ages_end - category.ages_begin\n end\n\n minimum_range = ranges.min\n candidate_categories = candidate_categories.select do |category|\n (category.ages_end - category.ages_begin) == minimum_range\n end\n\n debug \"narrow junior ages: #{candidate_categories.map(&:name).join(', ')}\"\n return candidate_categories.first if one_match?(candidate_categories)\n end\n\n candidate_categories = candidate_categories.reject { |category| gender == \"F\" && category.gender == \"M\" }\n debug \"exact gender: #{candidate_categories.map(&:name).join(', ')}\"\n return candidate_categories.first if one_match?(candidate_categories)\n return nil if candidate_categories.empty?\n\n if wildcard? && candidate_categories.none?(&:wildcard?)\n debug \"no wild cards: #{candidate_categories.map(&:name).join(', ')}\"\n return nil\n end\n\n if candidate_categories.size > 1\n raise \"Multiple matches #{candidate_categories.map(&:name)} for #{name}, result age: #{result_age} in #{event_categories.map(&:name).join(', ')}\"\n end\n end", "def categorize\n if params.has_key?(:category)\n @category = Category.find_by_name(params[:category])\n @product = Product.where(category: @category)\n else\n @product = Product.all\n end\nend", "def all_matching_categories(categories)\n cross_version_category_mapping\n .select { |key, _value| categories.include?(key) }\n .values\n .flatten\n .uniq\n end", "def categories_for_solution_search(category)\n [Sfcatnode.root] + category.children\n end", "def extract_categories(title)\n # First find the article_id\n article_id = get_article_id(title)\n return nil if article_id.nil?\n \n (prefix1, prefix2) = make_path_prefix(article_id)\n fname = @enwiki_dir + \"/\" + prefix1 + \"/\" + prefix2 + \"/#{article_id}.xml\"\n catlist = []\n open(fname).each_line do |line|\n if line =~ /\\[\\[(Category:.*?)\\]\\]/\n # Remove stuff after \"|\" if any\n cat_title = $1.gsub /\\|.*/, ''\n cat_article_id = get_article_id(cat_title)\n catlist.push(cat_article_id) if not cat_article_id.nil?\n end\n end\n return catlist\n end", "def match?(name); end", "def test_character_classes_give_options_for_a_character\n animals = [\"cat\", \"bat\", \"rat\", \"zat\"]\n assert_equal ['cat', 'bat', 'rat'], animals.select { |a| a[/[cbr]at/] }\n end", "def test_character_classes_give_options_for_a_character\n animals = [\"cat\", \"bat\", \"rat\", \"zat\"]\n assert_equal [\"cat\", \"bat\", \"rat\"], animals.select { |a| a[/[cbr]at/] }\n end", "def categories\n stories.map(&:category).delete_if {|c| c.nil? or c.global}.uniq\n end", "def match(categories)\n result = []\n for category in categories\n score = score(category)\n result << {value: score, category: category}\n end\n\n highest_value = result.map{|x| x[:value]}.sort.last\n selected_category = nil\n if highest_value > 0\n result.each do |hash|\n if hash[:value] == highest_value\n selected_category = hash[:category]\n end\n end\n end\n\n return selected_category\n\n end", "def best_match_in(event)\n logger.debug \"Category#best_match_in #{name} in #{event.name}: #{event.categories.map(&:name).join(', ')}\"\n\n candidate_categories = event.categories\n\n equivalent_match = candidate_categories.detect { |category| equivalent?(category) }\n logger.debug \"equivalent: #{equivalent_match&.name}\"\n return equivalent_match if equivalent_match\n\n candidate_categories = candidate_categories.select { |category| weight == category.weight }\n logger.debug \"weight: #{candidate_categories.map(&:name).join(', ')}\"\n\n candidate_categories = candidate_categories.select { |category| equipment == category.equipment }\n logger.debug \"equipment: #{candidate_categories.map(&:name).join(', ')}\"\n\n candidate_categories = candidate_categories.select { |category| ages_begin.in?(category.ages) }\n logger.debug \"ages: #{candidate_categories.map(&:name).join(', ')}\"\n\n candidate_categories = candidate_categories.reject { |category| gender == \"M\" && category.gender == \"F\" }\n logger.debug \"gender: #{candidate_categories.map(&:name).join(', ')}\"\n\n candidate_categories = candidate_categories.select { |category| ability_begin.in?(category.abilities) }\n logger.debug \"ability: #{candidate_categories.map(&:name).join(', ')}\"\n return candidate_categories.first if candidate_categories.one?\n return nil if candidate_categories.empty?\n\n if junior?\n junior_categories = candidate_categories.select { |category| category.junior? }\n logger.debug \"junior: #{junior_categories.map(&:name).join(', ')}\"\n return junior_categories.first if junior_categories.one?\n if junior_categories.present?\n candidate_categories = junior_categories\n end\n end\n\n if masters?\n masters_categories = candidate_categories.select { |category| category.masters? }\n logger.debug \"masters?: #{masters_categories.map(&:name).join(', ')}\"\n return masters_categories.first if masters_categories.one?\n if masters_categories.present?\n candidate_categories = masters_categories\n end\n end\n\n # E.g., if Cat 3 matches Senior Men and Cat 3, use Cat 3\n # Could check size of range and use narrowest if there is a single one more narrow than the others\n candidate_categories = candidate_categories.reject { |category| category.all_abilities? }\n logger.debug \"reject wildcards: #{candidate_categories.map(&:name).join(', ')}\"\n return candidate_categories.first if candidate_categories.one?\n return nil if candidate_categories.empty?\n\n # \"Highest\" is lowest ability number\n highest_ability = candidate_categories.map(&:ability_begin).min\n if candidate_categories.one? { |category| category.ability_begin == highest_ability }\n highest_ability_category = candidate_categories.detect { |category| category.ability_begin == highest_ability }\n logger.debug \"highest ability: #{highest_ability_category.name}\"\n return highest_ability_category\n end\n\n candidate_categories = candidate_categories.reject { |category| gender == \"F\" && category.gender == \"M\" }\n logger.debug \"exact gender: #{candidate_categories.map(&:name).join(', ')}\"\n return candidate_categories.first if candidate_categories.one?\n return nil if candidate_categories.empty?\n\n logger.debug \"no wild cards: #{candidate_categories.map(&:name).join(', ')}\"\n return candidate_categories.first if candidate_categories.one?\n return nil if candidate_categories.empty?\n\n raise \"Multiple matches #{candidate_categories.map(&:name)} for #{name} in #{event.categories.map(&:name).join(', ')}\"\n end", "def get_category_newsgroup(category)\n case category\n when \"sell\"\n \"tori.myydaan\"\n when \"buy\"\n \"tori.ostetaan\"\n when \"give\"\n \"tori.myydaan\"\n when \"lost\"\n \"tori.kadonnut\"\n when \"rides\"\n \"tori.kyydit\"\n else\n nil \n end \n end", "def test_passed_by_category(category)\n tests.by_category_name(category)\n end", "def get_category(category)\n CATEGORIES[category.downcase]\n end", "def category_names(categories)\n return t('challenges.no_category') if categories.empty?\n\n categories.map(&:name).join(', ')\n end", "def categories(metrics)\n cats = Set.new\n metrics.keys.each do |meta|\n next if meta.scope.nil? # ignore controller\n if match=meta.metric_name.match(/\\A([\\w|\\d]+)\\//)\n cats << match[1]\n end\n end # metrics.each\n cats\n end", "def select_fittest(category, options = {})\n category = case category\n when Category\n category\n when String\n categories.first(:conditions => ['upper(categories.name) = upper(?)', category])\n end\n uhook_select_fittest category, options unless category.nil?\n end", "def search(text, category)\r\n\t\ttext = \"%#{text}%\"\r\n\t\tstmntSQL = \"title LIKE ? OR description LIKE ? COLLATE utf8_general_ci\"\r\n\t\tif category > 0\r\n\t\t\tcategory = Category.find(category)\r\n\t\t\tcategory.campaigns.where(stmntSQL, text, text).where(status: \"ready\")\r\n\t\telse\r\n\t\t\tCampaign.where(stmntSQL, text, text).where(status: \"ready\")\r\n\t\tend\r\n\tend", "def refine_food(food) \n case food[\"type\"]\n when /coffee/i\n food[\"category\"] = \"coffee\"\n when /bar/i\n food[\"category\"] = \"bar\"\n when /thai/i\n food[\"category\"] << \"thai\"\n food[\"type\"] = \"thai restaurant\"\n when /indian|curry/i\n food[\"category\"] << \"indpak\"\n when /french|france/i\n food[\"category\"] << \"french\"\n food[\"type\"] =\"french restaurant\"\n when /ital(ian|y)/i\n food[\"category\"] << \"italian\"\n food[\"type\"] = \"italian restaurant\"\n when /fish|seafood/i\n if (food[\"type\"] =~ /chips/i)\n then food[\"category\"] << \"fishnchips\"\n else\n food[\"category\"] << \"seafood\"\n food[\"type\"] = \"fish restaurant\"\n end\n end\n return food\n end", "def categories\n Hash[self.class.catalogs.map { |fld, klass|\n name = fld.gsub(/_id$/, '_name');\n [fld, {:id => self.send(fld), :name => self.send(name)}] rescue nil\n }.reject {|cat| cat.nil?}]\n end", "def category_of_subject subject, &block\n found_cat = categories.select {|cat| cat.include? subject }\n yield found_cat if cat && block\n found_cat\n end", "def just_like_orange_sub\n # Find the breed of the cat named 'Orange'. \n # Then list the cats names and the breed of all the cats of Orange's breed.\n # Exclude the cat named 'Orange' from your results.\n # Order by cats name alphabetically.\n\n # USE A SUBQUERY\n execute(<<-SQL)\n SELECT cats.name, cats.breed\n FROM cats\n WHERE cats.breed = (\n SELECT cats.breed\n FROM cats\n WHERE cats.name = 'Orange'\n ) AND cats.name != 'Orange'\n ORDER BY cats.name;\n SQL\nend", "def extractFacetCategories( text )\n # initialize XML document for parsing\n doc = REXML::Document.new( text )\n\n # extract all facet categories and facet data from the XML attributes\n facets = Hash.new\n doc.elements.each( '/document/attributes/attribute' ) do |element|\n element_data = element.text\n type_attr = element.attribute( \"type\" ).to_s\n if( type_attr =~ /title/ )\n facets['title'] = element_data\n elsif( type_attr =~ /year/ )\n facets['year'] = element_data\n end\n end\n\n doc.elements.each( '/document/facets/facet' ) do |element|\n element_data = element.text\n type_attr = element.attribute( \"type\" ).to_s\n if( type_attr =~ /technology/ )\n facets['technology'] = element_data\n elsif( type_attr =~ /company/ )\n facets['company'] = element_data\n elsif( type_attr =~ /person/ )\n facets['person'] = element_data\n elsif( type_attr =~ /organization/ )\n facets['organization'] = element_data\n elsif( type_attr =~ /city/ )\n facets['city'] = element_data\n elsif( type_attr =~ /provinceorstate/ )\n facets['state'] = element_data\n end\n end\n \n facets.merge! extract_location_info( doc )[:facets]\n\n return facets\n end", "def recipe_grep name='.'\n @recipe.select{ |r| r.name === name }\n end", "def categories(body, headers, expectation = nil)\n expectation ||= Occi::Core::Category\n logger.debug \"Parsing #{expectation} from #{body.inspect} and #{headers.inspect}\" if logger_debug?\n\n cats = transform(body, headers).map do |line|\n cat = Text::Category.plain_category(line, false)\n lookup \"#{cat[:scheme]}#{cat[:term]}\", expectation\n end\n\n setify(cats)\n end", "def name_filter(column, filter)\n value = filter[:value].to_s.parameterize.split('-')\n\n regex = value.map do |v|\n if v =~ /^\\d+$/\n roman = RomanNumerals.to_roman(Integer v).downcase\n v = \"(#{v}|#{roman})\"\n end\n # [[:<:]] begining of a word\n '[[:<:]]' + v + '.*?'\n end.join\n\n sanitize_sql_array([\"name_slug ~ ?\", regex])\n end", "def name_filter(results, search)\n words = search.split(' ')\n for i in 0...words.size\n results = results.where(\"UPPER(name) LIKE ?\", \"%#{words[i]}%\")\n end\n return results\n end", "def categories\n if self.channel.generator == 'Twitter'\n return @item.title.scan(/#([^#\\s]+)/).flatten\n end\n return [] if @item.categories.empty?\n @item.categories.collect{|c| c.content}.reject{|c| c == '' || c.match(/^\\s+$/)}\n end", "def categories_given_items(items)\n\n categorized_items = Array.new\n\n items.each{ |item|\n sub_array = categorized_items.detect{ |sub_item_array| sub_item_array[0].name == item.name }\n if sub_array != nil\n sub_array.push(item)\n else\n new_sub_array = Array.new\n new_sub_array.push(item)\n categorized_items.push(new_sub_array)\n end\n }\n categorized_items\n end" ]
[ "0.6640181", "0.64921373", "0.64035004", "0.6203284", "0.597015", "0.59113747", "0.58240163", "0.56399596", "0.5637851", "0.56202656", "0.5564101", "0.55602276", "0.55597395", "0.55504453", "0.5531424", "0.5494935", "0.54743546", "0.544384", "0.5442288", "0.5437962", "0.54340506", "0.54320097", "0.5431093", "0.5422292", "0.5416466", "0.5400377", "0.5396506", "0.5394435", "0.5359895", "0.53519815", "0.529567", "0.5290551", "0.5271324", "0.5249885", "0.52473396", "0.52444506", "0.5226448", "0.52249414", "0.5219298", "0.5218998", "0.5207495", "0.519929", "0.5198651", "0.5196653", "0.5186559", "0.5177169", "0.5172592", "0.51650774", "0.51516277", "0.51151264", "0.51108545", "0.5108815", "0.51035416", "0.50973874", "0.5094136", "0.5093024", "0.50337595", "0.50246793", "0.5021576", "0.5016748", "0.50165886", "0.5004607", "0.49895185", "0.49826035", "0.49823394", "0.4982282", "0.49800527", "0.49750754", "0.49699643", "0.49688575", "0.4955496", "0.49545175", "0.49526328", "0.49486825", "0.4946496", "0.49370244", "0.49234158", "0.49225688", "0.49181968", "0.4916379", "0.49121037", "0.49117428", "0.4908795", "0.49013183", "0.48989257", "0.48965168", "0.48909634", "0.48887706", "0.48860815", "0.48822123", "0.48785758", "0.4876643", "0.48755646", "0.48699763", "0.48648843", "0.4860985", "0.48557055", "0.48535767", "0.48518205", "0.4847742", "0.48401678" ]
0.0
-1
specifies the link to be excluded in scraping in order of appearance from top to bottom
def exclude_links(*array) @excluded_link_indexes = array end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ignore_links\n @link_rules.reject\n end", "def skip_link!\n raise(SkipLink)\n end", "def assert_absence_of_non_kamishibai_links(options = {})\n ignores = options[:except] || []\n ignores += accepted_non_kamishibai_links\n\n css_select(\"a\").each do |anchor|\n href = anchor.attributes[\"href\"]\n next if ignores.detect{|regexp| href =~ regexp}\n method = anchor.attributes[\"method\"] || anchor.attributes[\"data-method\"]\n next if !method.blank? && method.upcase != \"GET\"\n if @request.user_agent != user_agent_strings_for(:galapagos)\n if href !~ /^http/ && href !~ /^#/\n assert false, \"Non-kamishibai local link for #{href} in #{@templates.keys}\"\n end\n else\n if href =~ /^#!_/\n assert false, \"Kamishibai local link advertantly for #{href} in #{@templates.keys}\"\n end\n end\n end\n end", "def disable_link\n @disable_link = true\n end", "def skip_link?(link)\n @skip_link_patterns.any? { |pattern| link.path =~ /#{pattern}/ }\n end", "def skip_link?(link)\n @skip_link_patterns.any? { |pattern| link.path =~ /#{pattern}/ }\n end", "def ignore_urls\n @url_rules.reject\n end", "def check_only_links\n end", "def skip_link?(link)\n @skip_link_patterns.each { |p| return true if link.path =~ p}\n return false\n end", "def skip_link?(link)\n @skip_link_patterns.any? { |pattern| link.path =~ pattern }\n end", "def skip_link?(link)\n @skip_link_patterns.any? { |pattern| link.path =~ pattern }\n end", "def skip_url(url)\n # domains to skip\n # www.britannica.com - sometimes has ad window that overlays. \n # www.montereybayaquarium.org - redirects to itself, which requires back twice to return to search results\n # www.livescience.com - causes intermittent timeouts.\n urls = [\n 'www.britannica.com', 'www.montereybayaquarium.org', 'www.livescience.com'\n ]\n for u in urls\n return true if url.include? u\n end\n return false\n end", "def excluded?(url)\n url = safe_unescape(url)\n @skip_list.each do |entry|\n return entry.last if url.include? entry.first\n return entry.last if entry.first == NULL_MATCH\n end\n\n false\n end", "def feed_link_should_not_be_highlighted(feed)\n within '#sidebar #folders-list' do\n expect(page).not_to have_css \"a[data-feed-id='#{feed.id}'].highlighted-link\"\n end\nend", "def reject_links(rel, ignore: [], &blk)\n blk ||= ->(_target){true}\n\n (candidates, safe)= repr\n .all_links\n .partition{|link| link.rel?(rel) }\n\n selected = candidates.reject(&link_checker(blk, ignore))\n\n new_repr = Representation.new(repr.href,\n repr.properties,\n safe+selected,\n repr.hal_client)\n\n self.class.new(new_repr, orig_repr)\n end", "def reject_social_links(attributes)\n attributes[\"url\"].blank? && !self.social_links.where(site: attributes[\"site\"]).exists?\n end", "def custom_link_to_unless(*args,&block)\n args.insert 1, capture(&block) if block_given?\n link_to_unless *args\n end", "def skip_link?(link)\n link.nil? or link.empty? or link =~ %r{^(#|[\\w-]+:(?!//))} or\n (link =~ /\\.(\\w{2,4})([?#]|\\Z)/ && SKIP_EXTENSIONS.include?($1.downcase))\n end", "def nonadword_urls(page)\n\t\tnonadwords_xpath(page).inject([]) do |result, adword|\n\t\t\tresult << fetch_nonadword_url(adword)\n\t\tend\t\t\n\tend", "def filterProviderOffer(links)\n useless_links = links.pop #take the offers\n links.delete(useless_links)\n links\n end", "def noreferrer?; end", "def unconnected_links\n\t\tLink.available.select { |l| links.find_by(provider: l[:link_name] || l[:name].downcase) == nil }\n\tend", "def unglobally_exclude_tag_link(tag)\n whether_hidden = !current_user.globally_excluded?(tag) ? \" hidden\" : \"\";\n link_to_remote t(\"winnow.tags.main.unglobally_exclude_tag\"),\n :url => unglobally_exclude_tag_path(tag),\n :method => 'put',\n :confirm => t(\"winnow.tags.main.unglobally_exclude_tag_confirm\", :tag => tag.name),\n :html => {\n :title => t(\"winnow.tags.main.unglobally_exclude_tag_tooltip\", :tag => tag.name),\n :class => \"unglobally_exclude_tag\" + whether_hidden,\n :id => dom_id(tag, 'unglobally_exclude')\n }\n end", "def prune_links\n links = []\n result = self.perform\n ft_links = result.ft_links\n ft_links.each do |ft_link|\n http = Curl.get(ft_link)\n doc = Nokogiri::HTML(http.body_str)\n link = doc.xpath('//*[@id=\"copy_paste_links\"]').children.first.to_s.chomp\n links.push link if link.empty? == false\n end\n links\n end", "def globally_exclude_tag_link(tag)\n whether_hidden = current_user.globally_excluded?(tag) ? \" hidden\" : \"\";\n link_to_remote t(\"winnow.tags.main.globally_exclude_tag\"),\n :url => globally_exclude_tag_path(tag),\n :method => 'put',\n :html => {\n :title => t(\"winnow.tags.main.globally_exclude_tag_tooltip\", :tag => tag.name),\n :class => \"globally_exclude_tag\" + whether_hidden,\n :id => dom_id(tag, 'globally_exclude')\n }\n end", "def hide_filter\n perm_links = ministry.lmi_hide.map { |lmi| \"lmi_total_#{lmi}\" }\n query = table[:perm_link].does_not_match(\"%_custom_%\")\n query = query.and(table[:perm_link].not_in(perm_links)) if perm_links.any?\n query\n end", "def exclude; end", "def what_links_here_except_obsolete_combinations\n taxon.what_links_here.all.reject do |item|\n item.table == 'taxa' &&\n item.field == :current_taxon_id &&\n item.id.in?(obsolete_combinations_ids)\n end\n end", "def skip_links_like(*patterns)\n @skip_link_patterns.concat [patterns].flatten.compact\n self\n end", "def folder_link_should_not_be_highlighted(folder)\n if folder == 'none'\n id = 'none'\n else\n id = folder.id\n end\n within \"#sidebar #folders-list #folder-#{id}\" do\n expect(page).not_to have_css \"a[data-feed-id='all'].highlighted-link\"\n end\nend", "def uncrawled_urls(limit: 0, skip: 0, &block)\n urls(crawled: false, limit: limit, skip: skip, &block)\n end", "def uncrawled_urls(limit: 0, skip: 0, &block)\n urls(crawled: false, limit: limit, skip: skip, &block)\n end", "def excluded; end", "def strip_links(html); end", "def strip_links(html); end", "def strip_links(html); end", "def filter_links(links)\n filtered_links = links.map do |link|\n next unless link.include?(\"data\") #TODO: Extract into query parameter\n link\n end.compact\n\n #TODO: Extract into exclusion array parameter - Could I pull in file size and exclude equal size files?\n filtered_links -= [\"/pub/time.series/ch/ch.data.0.Current\"] #Duplicate of ch.data.1.AllData - 3.3 GB\n filtered_links -= [\"/pub/time.series/cs/cs.data.0.Current\"] #Duplicate of cs.data.1.AllData - 3.1 GB\n\n filtered_links.reject { |link| link =~ /(dataset|datatype|dataelement|dataclass|data.type|data_type|tdata|dataseries)/ }\n end", "def extract_desired_links(links)\n links.reduce([]) do |subset, page| \n subset << page if whitelisted_page? page\n subset\n end\n end", "def non_http\n @non_http ||= all.select { |link| link !~ /^http(s)?:\\/\\//i}\n end", "def skip_links_like(*patterns)\n if patterns\n patterns.each do |pattern|\n @skip_link_patterns << pattern\n end\n end\n self\n end", "def invalidate_link link\n @links.delete(link.condition)\n \n end", "def strip_google_tracking_links(doc)\n doc.css(\"a\").each do |node|\n href = node.attr(\"href\").to_s\n next if href.blank?\n\n query_string = URI.parse(href).query\n actual_url = Rack::Utils.parse_nested_query(query_string)[\"q\"]\n\n node[\"href\"] = actual_url\n end\n end", "def assert_links_visibility(should_be_visible, skip_links: [])\n links = %w[previous_step help_sign_up save_and_exit] - skip_links\n\n links.each do |link_name|\n action = should_be_visible ? :to : :not_to\n expect(rendered).send(action, have_selector('a', text: l10n(link_name)))\n end\n end", "def entry_should_not_be_highlighted(entry)\n entry_should_be_visible entry\n within \"#feed-entries #entry-#{entry.id}\" do\n expect(page).not_to have_css 'a.open-entry-link.highlighted-entry'\n expect(page).not_to have_css 'i.fa-caret-right.current-entry', visible: true\n end\nend", "def skip_links_like(*patterns)\n @skip_link_patterns.concat [patterns].flatten.compact.map { |x| x.source }\n self\n end", "def skip_links_like(*patterns)\n @skip_link_patterns.concat [patterns].flatten.compact.map { |x| x.source }\n self\n end", "def skip_links_like(*patterns)\n @skip_link_patterns.concat [patterns].flatten.compact.map { |x| x.source }\n self\n end", "def should_skip(url)\n skip = false\n\n ::Crawl::SkipList::SKIP_URLS_REGEX.each do |regex_to_skip|\n skip = true if url.host =~ regex_to_skip\n end\n\n return skip\n end", "def drop_link\n return @drop_link if @drop_link\n to_link = @to_link\n \n # continue along the road (to_link and further down) as long as\n # no choices must be made between roads\n # (method: more connectors may connect a road to the same downstream road)\n while to_link.outgoing_connectors.map{|conn|conn.to_link}.uniq.size == 1\n to_link = to_link.outgoing_connectors.first.to_link\n end\n @drop_link = to_link\n end", "def clear_link(skip_requesting = false)\n link.skip_requesting = skip_requesting\n self.link = nil\n end", "def assert_not_first_page\n assert_link PREV_LABEL\n end", "def bad_urls\n result = []\n @pages.each do |page|\n result << page.hlu\n end\n result.compact!\n end", "def unlinked\n reject(&:linked?)\n end", "def garantir_link\n if link.present?\n link_params = link\n _link = link_params.split('/').reject { |l| l.blank? || l == 'http:' }\n _link[0].sub!(/s.|[^.]*.|\\s./, '') if _link[0].split('.').length == 3\n if ['herokuapp.com', 'codepen.io'].include? _link[0]\n link_params.gsub!('/' + _link[2] + '/', '/debug/')\n link_params.remove!('?' + _link[-1].split('?')[-1])\n elsif !_link[0]['.com'].try(:presence?)\n # sem link\n link_params.sub!('/', '') if _link[0] == '/'\n else\n errors.add(:link, 'Não é permitido.')\n return false\n end\n self.link = link_params\n end\n end", "def start_link_should_not_be_highlighted\n within '#sidebar' do\n expect(page).not_to have_css 'a#start-page.highlighted-link'\n end\nend", "def pretend_links_do_not_exist\n test_mode && remove_old_links\n end", "def skip_query_string?(link)\n @opts[:skip_query_strings] && link.query\n end", "def skip_query_string?(link)\n @opts[:skip_query_strings] && link.query\n end", "def check_answer_no_link_list(url:, question:, answers:, name:)\n render(\n 'shared/check_answers/no_link_items',\n name: name,\n url: url,\n question: question,\n answers: answers\n )\n end", "def skip_query_string?(link)\n @opts[:skip_query_strings] && link.query\n end", "def skip_query_string?(link)\n @opts[:skip_query_strings] && link.query\n end", "def extractLinks(page)\n\tbase_wiki_url = \"https://en.wikipedia.org\"\n\tlinks = page.search(\"//a\")\n\tlinks = links.map{|item| item[\"href\"]}\n\n\t#Appending with base_wiki_page to make it full fledged page.\n\tlinks = links.map{|link| base_wiki_url+link.to_s}\n\n\treturn stripUnwantedLinksBasedOnCondition(links)\nend", "def skip_page(site, page, message)\r\n site.collections[rp_key].docs.delete page.page\r\n Jekyll.logger.warn \"Skipping #{page.basename}: #{message}\"\r\n end", "def i_dont_reach(which_page)\n result = go(which_page)\n assert (Net::HTTPNotFound===result or Net::HTTPForbidden===result), __last_because + \" (can reach #{which_page}: #{result})\"\n end", "def remove_oneway_links\n @links.delete_if { |k,l|\n l.oneway?\n }\n end", "def should_not_include_noreferrer?\n config = @target_blank_config\n case config\n when nil, NilClass\n false\n else\n noreferrer = config.fetch(\"noreferrer\", true)\n if noreferrer == false\n return true\n else\n return false\n end\n end\n end", "def pi_j_a(j, a)\n @classes[j].select{|state| !state.back_links.select{|link| link[0] == a}.empty?}\n end", "def link_to_remote_unless(condition, name, options = {}, html_options = nil, &block)\n link_to_remote_if !condition, name, options, html_options, &block\n end", "def skip_after; end", "def no_item_thumb_links\n no_column.links(:class => \"img\", :href => /.*/) # ---------------------------------- EVERYTHING USING THIS NEEDS REPLACING!!!!!!!!\n end", "def strip_links\n gsub(%r{</?a.*?>}, \"\")\n end", "def valid_posts\n items.keep_if { |item| item.link != fetch_expanded_url }.shuffle!\n end", "def crawl\n while NG_URL.where(:a_hrefs_unprocessed => { :$not => { :$size => 0}}).count > 0 do\n next_unprocessed_url\n end\n end", "def skips; end", "def html_filter_annotate_bare_links\n @html.search('a[@href]').each do |node|\n href = node.attributes['href'].content\n text = node.inner_text\n\n next unless href == text || href[0] == '#' ||\n CGI.unescapeHTML(href) == \"mailto:#{CGI.unescapeHTML(text)}\"\n\n node.set_attribute('data-bare-link', 'true')\n end\n end", "def clarify_external_links(hpricot)\n (hpricot/\"a\").each {|link| link.set_attribute('class', 'external') and link.set_attribute('target', '_new') unless link.attributes['href'].match(/:\\/\\/(\\w+\\.)?juscribe\\.com(\\/[-_\\/\\?\\w]*)?$/)} # Test forging with juscribe.com.hacker.com\n end", "def entry_should_not_be_visible(entry)\n expect(page).to have_no_css \"#feed-entries #entry-#{entry.id}\"\nend", "def unwrap_links(elements); end", "def url(link)\n '#'\n end", "def noreferrer?\n rel?('noreferrer')\n end", "def extract_links\n content.css('a').map { |a| a['href'] unless a['href'] == '#' }.compact.uniq\n end", "def skip\n @page_handler.skip\n end", "def no_index\n set_meta_tags nofollow: true\n end", "def disallowNavigation\n self.navigationAllowed = false\n end", "def disallowNavigation\n self.navigationAllowed = false\n end", "def link_to_unless(condition, name, options = {}, html_options = {}, &block) # :nodoc:\n condition ? content_tag(:span, name, html_options) : link_to(name, options, html_options, &block)\n end", "def parse_skip_token(next_link)\n return nil unless next_link\n next_link[/.*?skipToken=(.*?)$/i, 1]\n end", "def assert_not_last_page\n assert_link NEXT_LABEL\n end", "def non_existent_group_links\n source_project.project_group_links\n .where.not(group_id: group_links_in_target_project)\n end", "def get_internal_links(doc)\n doc.internal_full_links\n .map(&:without_anchor) # Because anchors don't change page content.\n .uniq\n .reject do |link|\n ext = link.to_extension\n ext ? !%w[htm html].include?(ext) : false\n end\n end", "def unfollowed_events\n events.where(visited: false).select do |event|\n !(event.next_event? || event.choices?)\n end\n end", "def visit_link?(link, from_page = nil)\n [email protected]_page?(link) &&\n !skip_link?(link) &&\n !skip_query_string?(link) &&\n allowed(link) &&\n !too_deep?(from_page)\n end", "def visit_link?(link, from_page = nil)\n [email protected]_page?(link) &&\n !skip_link?(link) &&\n !skip_query_string?(link) &&\n allowed(link) &&\n !too_deep?(from_page)\n end", "def reject_related(rel, ignore: [], &blk)\n reject_embedded(rel, ignore: ignore, &blk).reject_links(rel, ignore: ignore, &blk)\n end", "def set_exclude_from_analytics\n @exclude_from_analytics = request.env['HTTP_USER_AGENT'].try(:match, /http/i) || request.remote_ip == \"68.108.56.31\" || (!current_user.nil? && current_user.email.match(/bluefields.com/i))\n end", "def i_see_link(opts = nil)\n assert has_link?(opts), __last_because + \" (dont see link <a #{opts.inspect})\"\n end", "def i_have_seen_this_url_before?(url) \n @known_urls.include?(url.remove_fragment)\n end", "def reject_embedded(rel, ignore: [], &blk)\n blk ||= ->(_target){true}\n\n (embedded, links) = repr\n .all_links\n .partition(&:embedded?)\n\n (candidates, safe)= embedded\n .partition{|link| link.rel?(rel) }\n\n selected = candidates.reject(&link_checker(blk, ignore))\n\n new_repr = Representation.new(repr.href,\n repr.properties,\n links+safe+selected,\n repr.hal_client)\n\n self.class.new(new_repr, orig_repr)\n\n end", "def skips_post \n @skips_post\n end", "def reject_link(hash)\n p hash\n hash.each do |key|\n unless key=='user_id'\n return hash[key].blank?\n end\n end \n # reject if not real link ids (either)\n end" ]
[ "0.7324492", "0.6665341", "0.6572314", "0.6433892", "0.64058566", "0.64058566", "0.63779813", "0.63513035", "0.6261599", "0.6244386", "0.6244386", "0.61454684", "0.6128934", "0.6115231", "0.6086929", "0.60516155", "0.60129535", "0.60029626", "0.5953386", "0.59207606", "0.5916218", "0.590083", "0.58473104", "0.5806026", "0.5801504", "0.58004904", "0.58004355", "0.5780765", "0.577868", "0.57742214", "0.5736036", "0.5736036", "0.5722643", "0.57110244", "0.57110244", "0.57110244", "0.56633246", "0.5653653", "0.56414986", "0.5634098", "0.56283265", "0.56197816", "0.5604057", "0.55899054", "0.558745", "0.558745", "0.558745", "0.55819196", "0.55765533", "0.5564528", "0.5559991", "0.5552989", "0.5520544", "0.55030537", "0.550059", "0.5496763", "0.54654247", "0.54654247", "0.5465266", "0.5463655", "0.5463655", "0.54206663", "0.541081", "0.537889", "0.53650755", "0.53521895", "0.53508866", "0.5350092", "0.5345138", "0.53394973", "0.5336401", "0.5331949", "0.53311807", "0.53084433", "0.5303558", "0.5300318", "0.5300104", "0.5299798", "0.52967507", "0.5281246", "0.5280354", "0.5275097", "0.5255093", "0.5246393", "0.5246393", "0.5245932", "0.52443683", "0.52346367", "0.5215181", "0.52109146", "0.52083945", "0.5203105", "0.5203105", "0.52027917", "0.52002114", "0.51907897", "0.5183388", "0.5182383", "0.5181878", "0.51785606" ]
0.6323119
8
used to specify subcategories. see Fetcher.categories
def categories(options=nil, &definition) if options if options[:for] @embed_for = options[:for] end end @embedded = CategoryScraper.new @fetcher_class,{:main_url => @main_url}, &definition end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def subcategories\n @subcategories ||= Category.find_all_subcategories(category_name)\n end", "def sub_categories(params = {})\n @sub_categories ||= client.categories.all(params.merge(parent: id))\n end", "def index\n @subcategories = @category.subcategories\n end", "def get_subcategories\r\n sub_categories = Taxonomy.get_subcategories\r\n render json: sub_categories, root: 'categories', adapter: :json, status: :ok\r\n end", "def populate_category\n\t\t\t\tif params[:purpose] == \"category\"\n\t\t\t\t\t\tcategory = Category.find(params[:category_id])\n\t\t\t\t\t\t@sub_categories = category.sub_categories\n\t\t\t\telsif params[:purpose] == \"sub_category\"\n\t\t\t\t\t\tsub_category = SubCategory.find(params[:category_id])\n\t\t\t\t\t\t@inner_categories = sub_category.inner_categories\n\t\t\t\tend\n\t\tend", "def categories\r\n return handle_resource_not_found unless @article_category\r\n\r\n add_category_breadcrumbs(@article_category)\r\n if @article_category.leaf?\r\n @title_text = @article_category.display_text\r\n @articles = fetch_articles(order_by, nil, :conditions => {:article_category_id => @article_category.id}, :include => [:user, :article_category])\r\n @other_recent_posts = @article_category.parent.all_recent_posts(:exclude_category => @article_category) if @article_category.parent\r\n render 'articles/subcategories'\r\n else\r\n return unless check_category_for_stale\r\n @subcategories_with_posts = @article_category.children.with_published_articles\r\n render 'articles/categories'\r\n end\r\n end", "def all_sub_categories(params = {})\n @all_sub_categories ||= sub_categories(params) + sub_categories.flat_map do |sub|\n sub.all_sub_categories(params)\n end\n end", "def sub_category\n @sub_category = SubCategory.new\n @sub_categories = SubCategory.find_all\n end", "def to_categories(sub_context)\n Array(sub_context).map { |id|\n categories[id] \n }.compact\n end", "def set_categories\n new_categories = extract_categories_from_url\n unless new_categories.nil?\n @categories = new_categories\n else\n @categories = Type::DefaultCategories\n end\n end", "def apply_category_filter\n top_level_category_ids = Category.top_level.\n where(id: @category_ids).\n pluck(:id)\n subcategory_ids = @category_ids +\n Category.where(parent_id: top_level_category_ids).pluck(:id)\n\n subcategory_ids\n end", "def all_categories\n end", "def categories\n add_to_query restrict_kind: 'category'\n end", "def categories=(value)\n @categories = value\n end", "def set_sub_category\n @sub_category = SubCategory.find(params[:id])\n end", "def set_sub_category\n @sub_category = SubCategory.find(params[:id])\n end", "def subcategories_json\n {id: id, name: name, depth: depth}\n end", "def subcategories_json\n {id: id, name: name, depth: depth}\n end", "def categories=(value)\n @categories = value\n end", "def categories=(value)\n @categories = value\n end", "def categories=(value)\n @categories = value\n end", "def set_sub_sub_category\n @sub_sub_category = SubSubCategory.find(params[:id])\n end", "def category= arg\n cat_arr = arg.to_s.split(':')[0..@depth-1] || []\n @category = cat_arr.join(':').to_s\n end", "def set_category\n end", "def show\n\t\t@category = Category.find(params[:id])\n\t\t@sub_categories = @category.sub_categories\n\tend", "def getCategories(_, _, _)\n @db.categories\n end", "def get_categories(add_params = nil)\n params = {\n }\n api_call('/global/categories(.:format)',:get,params,add_params)\n end", "def getAllSubcategories(reset=false) \n reset ? @allSubcategories = [self] + self.setAllSubcategories() : \n @allSubcategories ||= [self] + self.setAllSubcategories() \nend", "def categories=(value)\n\t\t@categories = value\n\tend", "def categories=(value)\n\t\t@categories = value\n\tend", "def categories=(value)\n\t\t@categories = value\n\tend", "def get_subc(category)\n retval = []\n #Category.where(category_id: category.id).each do |c| \n category.categories.each do |c| \n retval += [c] + get_subc(c)\n end\n retval\n end", "def add_categories(collection)\n\tcategory = \"\"\n\tcollection[\"results\"][\"collection1\"].each do |article|\n\t\tif article[\"category\"] == \"\"\n article[\"category\"] = category\n else\n category = article[\"category\"]\n end\n\tend\nend", "def categories\n category\n end", "def set_category(category_id, subcategory_id)\n @report.category_type = category_id != 'none' ? :category : :subcategory\n\n sum_all = category_id == 'sum_all' || subcategory_id == 'sum_all'\n\n if @report.category_type == :category\n categories = category_id == 'iterate_all' ? Category.all : Category.where(id: category_id)\n else\n categories = subcategory_id == 'iterate_all' ? Subcategory.all : Subcategory.where(id: subcategory_id)\n end\n\n @report.categories = Filter.new(categories, sum_all)\n end", "def channel_categories\n build :channel_categories, :using => data_for(:channel_categories)\n end", "def categories(options = {})\n fetch_categories.at('categories').children_of_type('category').inject([]){ |r, i| r << parse_single_category_xml(i) }\n\tend", "def set_suba_category\n @suba_category = SubaCategory.find(params[:id])\n end", "def fetch_categories\r\n page = File.open(\"#{Rails.root}/public/Categories.html\") { |f| Nokogiri::HTML(f) }\r\n Category.save_fetched_categories_and_subcategories(page)\r\n end", "def categories\n categories = Array.new\n unless self.category.nil?\n categories << self.category\n categories += self.category.ancestors\n end # unless\n categories.reverse\n end", "def categories_list\n self.categories.collect{|c| c.name}\n end", "def set_sub_category_type\n @sub_category_type = SubCategoryType.find(params[:id])\n end", "def main_categories\n categories.to_a[0..3]\n end", "def set_sub2_category\n @sub2_category = Sub2Category.find(params[:id])\n end", "def categories_for(race)\n [ race.category ] + race.category.descendants\n end", "def index\n @sub_categories = SubCategory.all\n end", "def categories\n []\n end", "def index\n @sub_sub_categories = SubSubCategory.all\n end", "def categories\n # {{{\n if !@categories then\n inherited_cats = []\n if respond_to?(:parent_groups) then\n inherited_cats = parent_groups.map { |g| g.categories }.flatten\n end\n own_cats = User_Category.all_with(User_Category.user_group_id == user_group_id).sort_by(:category_name, :asc)\n @categories = inherited_cats + own_cats.to_a\n end\n @categories\n end", "def set_subcategorium\n @subcategorium = Subcategorium.find(params[:id])\n end", "def subcategories\n\t\trespond_to do |format|\n\t \tformat.json {\n\t \t\tparam = params[:payload]\n\n\t\t\t\tcategory = RequirementCategory.find_by_id(param[:target_id])\n\t\t\t\tputs category.id\n\n\t\t\t\t@subcategory = category.requirement_subcategories\n\t \t\trender :json => @subcategory\n\t \t}\n\t end\t\t\n\tend", "def index\n @cp_subcategories = Subcategory.all\n @title = \"Subcategories\"\n end", "def set_subcategoriaproduto\n @subcategoriaproduto = Subcategoriaproduto.find(params[:id])\n end", "def set_categories\r\n @categories = Category.where('courses_count > 0').order(:name)\r\n end", "def index\n @subcategories = Subcategory.all\n @categories = Category.all\n end", "def set_sub_catagory\n @sub_catagory = SubCatagory.find(params[:id])\n end", "def sub_category_link(sub_category, category)\n if category.respond_to?('each')\n category = category[0]\n end\n '<a href=\"/'+category+'/'+sub_category+'/\">'+sub_category.upcase+'</a>'\n end", "def all_categories(shortlist = false)\n\t\tall = shortlist ? GLOBAL_CATEGORIES[0..3] : GLOBAL_CATEGORIES\n\t\t#all << 'An extra category'\n\tend", "def categories\n pages = page.children.all(\n :conditions => { :class_name => 'ShopCategoryPage' },\n :order => 'pages.position ASC'\n ).map(&:shop_category)\n end", "def by_category\n @category = Category.roots.find_by_slug(params[:category])\n raise ListingException, \"missing category\" if @category.blank?\n @subcategory = @category.children.find_by_slug(params[:subcategory]) if params[:subcategory].present?\n terms = [ListingFilter.category(@subcategory.present? ? @subcategory.id : @category.id), ListingFilter.state('active')]\n query = {filter: {bool: {must: terms}}, sort: {id: \"desc\"}}\n @listings = Listing.search(query).page(page).per(per).records\n\n @subcategories = @category.children.with_listings\n\n @title = [@category.name, @subcategory.try(:name)].compact.join(\" : \") + \" | Category\"\n\n respond_to do |format|\n format.html { render(action: :index, layout: !request.xhr?) }\n end\n end", "def index\n @subcategories = Subcategory.all\n end", "def index\n @subcategories = Subcategory.all\n end", "def categories\n @categories ||= (@doc/\"Category\").collect { |it| Element.new(it) }\n end", "def index\n params.permit(:category_id)\n if params[:category_id]\n @subcategories = Subcategory.where(category_id: params[:category_id])\n else\n @subcategories = Subcategory.all\n end\n end", "def category_ids\n @new_category_ids or super\n end", "def categories\n Category.categories_for_movie self\n end", "def category\n @category = NewsCategory.find_by! slug: params[:category_slug]\n @collection = News.of_type(:news).in_category(@category).page_for_visitors(current_page)\n end", "def getCategories()\n\t\tcat = Array.new\n\t\tcat.push(\"heroku\")\n\t\tcat.push(\"go\")\n\t\tcat.push(\"github\")\n\t\tcat.push(\"docker\")\n\t\tcat.push(\"css\")\n\t\tcat.push(\"apache\")\n\t\tcat.push(\"html\")\n\t\tcat.push(\"bootstrap\")\n\t\tcat.push(\"java ee\")\n\t\tcat.push(\"javafx\")\n\t\tcat.push(\"java\")\n\t\tcat.push(\"jquery\")\n\t\tcat.push(\"mips\")\n\t\tcat.push(\"c++\")\n\t\tcat.push(\"laravel\")\n\t\tcat.push(\"linux\")\n\t\tcat.push(\"opengl\")\n\t\tcat.push(\"sml\")\n\t\tcat.push(\"javascript\")\n\t\tcat.push(\"mongo db\")\n\t\tcat.push(\"c\")\n\t\tcat.push(\"yacc\")\n\t\tcat.push(\"circuit\")\n\t\tcat.push(\"php\")\n\t\tcat.push(\"mysql\")\n\t\tcat.push(\"node js\")\n\t\tcat.push(\"photoshop\")\n\t\tcat.push(\"rails\")\n\t\tcat.push(\"postgres\")\n\t\tcat.push(\"ruby\")\n\t\tcat.push(\"redis\")\n\t\tcat.push(\"mac osx\")\n\t\tcat.push(\"sass\")\n\t\tcat.push(\"ubuntu\")\n\t\tcat.push(\"bower\")\n\t\tcat.push(\"wordpress\")\n\t\tcat.push(\"css\")\n\t\tcat.push(\"hosted\")\n\t\tcat.push(\"python\")\n\t\tcat.push(\"maven\")\n\t\tcat.push(\"maven mojo\")\n\t\tcat.push(\"composer\")\n\t\tcat.push(\"mips\")\n\t\tcat.push(\"gulp\")\n\t\tcat.push(\"grunt\")\n\t\tcat.push(\"phpstorm\")\n\t\tcat.push(\"react\")\n\t\tcat.push(\"swift\")\n\t\tcat.push(\"wordpress\")\n\t\tcat.push(\"tomcat\")\n\t\tcat.push(\"redis\")\n\t\tcat.push(\"travis\")\n\t\treturn cat\n\tend", "def index\n @sub2_categories = Sub2Category.all\n end", "def categories(*values)\n values.inject(self) { |res, val| res._categories(val) }\n end", "def categories(*values)\n values.inject(self) { |res, val| res._categories(val) }\n end", "def categories_for(race)\n case race.name\n when \"Junior Men\", \"Junior Women\"\n [ Category.find_or_create_by(name: race.name) ]\n else\n super race\n end\n end", "def set_categories\n\t\t@categories = Category.all\t\n\tend", "def categories\n response[\"categories\"].map!{|category| Foursquared::Response::Category.new(client, category)} if response[\"categories\"]\n end", "def multiple_categories\n @categories = Category.all\n end", "def get_related_categories(query='', results=10, start=0)\r\n get_related('Categories',query, results, start)\r\n end", "def get_category(subcategory_id)\n category_id = Subcategory.find(subcategory_id).category_id\n Category.find(category_id)\n end", "def categories_items\n all_items = self.all_active_items\n categories_given_items(all_items)\n end", "def scrape_categories(options = {}) # :nodoc:\n if self.class.category_scrapers\n self.class.category_scrapers.inject([]){|cats,scraper| cats + scraper.scrape(options)}\n end\n end", "def categories\n if nodes = @node.xpath(\"category\")\n nodes.map { |node| RSSCategory.new(node) }\n end\n end", "def categories\n taxonomies.all :conditions => { :classification => :category }\n end", "def _category\n @_category ||= if category\n category\n elsif special_category\n special_category\n else\n nil\n end\n end", "def get_categories\n @redis.smembers category_collection_key\n end", "def categories\n data['JobCategory'] || []\n end", "def _build_category_list\n raw_categories = CatAPI.get_categories\n category_list = raw_categories.map {|category| category['name']}.sort\n # the \"kittens\" category is empty, and never returns photos\n category_list.delete(\"kittens\")\n return category_list.unshift(\"your favorites\")\nend", "def categories\n rpg_shop.handled_categories\n end", "def categories(args={})\n res = api_request('private.request.getCategories', 'GET')\n res = JSON.parse(res)['category'] rescue []\n\n unless args[:include_deleted] and args[:include_deleted] == true\n res.reject!{|k, v| v['fDeleted'] == '1'} rescue []\n end\n \n return res\n end", "def get_categories( parent = nil )\n\t\t\n\t\tcall = 'GetCategories'\n\n\t\tif parent.nil?\n\t\t\tbody = <<-END\n\t\t\t\t<CategorySiteID>0</CategorySiteID>\n\t \t\t\t<DetailLevel>ReturnAll</DetailLevel>\n\t \t\t\t<LevelLimit>1</LevelLimit>\n\t\t\tEND\n\t\telse\n\t\t\tbody = <<-END\n\t\t\t\t<CategoryParent>#{parent}</CategoryParent>\n\t\t\t\t<DetailLevel>ReturnAll</DetailLevel>\n\t\t\t\t<LevelLimit>2</LevelLimit>\n\t\t\tEND\n\t\tend\n\n\t\[email protected]_trade_call( call, body )\n\n\tend", "def assign_categories=(value)\n @assign_categories = value\n end", "def categories\n Hash[self.class.catalogs.map { |fld, klass|\n name = fld.gsub(/_id$/, '_name');\n [fld, {:id => self.send(fld), :name => self.send(name)}] rescue nil\n }.reject {|cat| cat.nil?}]\n end", "def set_categories\n @categories = Category.order(:name).pluck(:name, :id)\n end", "def index\n @categories = Category.all\n @subcategories = Subcategory.all\n end", "def index\n @suba_categories = SubaCategory.all\n end", "def categories(params={})\n return @categories if (@categories && !params[:force])\n @categories = get_categories\n end", "def subcategories_and_ids_list\n url_no_page = \"#{SUBCATEGORIES_URL}?#{TOKEN_PREFIX}#{OAUTH_TOKEN}&#{PAGE_PREFIX}\"\n subcategory_names_and_ids = []\n (1..4).each do |i|\n url = url_no_page + \"#{i}\"\n subcategories = JSON.parse(open(url).read)\n subcategories[\"subcategories\"].each do|subcategory|\n subcategory_names_and_ids << {name: subcategory[\"name\"], id: subcategory[\"id\"]}\n end\n end\n subcategory_names_and_ids\n end", "def parse_categories_structure(category_id = nil)\n super category_id, { product_link: '.productsArea .productArea .productDetail a',\n next_page_link: '.productsArea .tsk-pageview .next a' }\n end", "def show\n @sub_categories = SubCategory.where(category_id: params[:id]).order(:order)\n end", "def categories=(categories)\n end", "def method_missing(method, args)\n @categories.send(method, *args)\n end", "def categories=(categories)\n @categories = categories\n end", "def categories(arg_)\n @config.lock\n\n objdata_ = _get_objdata(arg_)\n return nil unless objdata_\n hash_ = {}\n objdata_[2].each do |tup_, tupcats_|\n tupcats_.each do |cat_|\n hash_[cat_] = @categories[cat_][1].map{ |elem_| tup_[elem_] }\n end\n end\n hash_\n end" ]
[ "0.7728158", "0.7588042", "0.6880803", "0.68671745", "0.68423116", "0.6797682", "0.67143977", "0.67024696", "0.66994005", "0.6678905", "0.66560155", "0.65864986", "0.6547976", "0.6546187", "0.65269643", "0.6526152", "0.6513359", "0.6513359", "0.64364403", "0.64364403", "0.64364403", "0.64247423", "0.6421208", "0.64084774", "0.6396931", "0.6382752", "0.6335856", "0.6330096", "0.6319579", "0.6319579", "0.6319579", "0.6314099", "0.63065255", "0.6288895", "0.62707126", "0.62623847", "0.6243675", "0.6219486", "0.62151825", "0.6158969", "0.613831", "0.6118813", "0.6103818", "0.60983545", "0.6091787", "0.6066272", "0.60642606", "0.60402954", "0.6038926", "0.60164875", "0.6013277", "0.60088533", "0.59926224", "0.59917825", "0.5989438", "0.598938", "0.59893477", "0.597319", "0.5966911", "0.5965378", "0.5963643", "0.5963643", "0.5945537", "0.5944244", "0.5943442", "0.59417725", "0.5917511", "0.59135", "0.5911838", "0.5909904", "0.5909904", "0.5901575", "0.59006774", "0.5894347", "0.5890091", "0.58875483", "0.5886122", "0.5883229", "0.58794105", "0.587327", "0.5872919", "0.5870916", "0.58689785", "0.58689255", "0.586479", "0.58588326", "0.58573145", "0.5856961", "0.5853548", "0.5852699", "0.58500856", "0.584027", "0.58390945", "0.5838858", "0.58365226", "0.5825648", "0.58242995", "0.58242285", "0.5821325", "0.58108944", "0.5809843" ]
0.0
-1
The transport port to which the media stream is sent.
def transport_port=(port) if port.match /\// end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def port\n @connection.port\n end", "def port\n self.port\n end", "def default_port\n transport.default_port\n end", "def port\n return @port.to_i\n end", "def remote_port\n return @remote_port\n end", "def local_port\n return @local_port\n end", "def port\n @port\n end", "def port\n @port\n end", "def port\n @port\n end", "def port\n request.port\n end", "def getPort()\n return @port\n\tend", "def rtcp_port\n super\n end", "def actual_port; end", "def actual_port; end", "def port\n data[:port]\n end", "def port\n configuration.port\n end", "def port\n configuration.port\n end", "def rport\n\t\t@target_port\n\tend", "def port\n @port ||= target.split(':',2).last.to_i\n end", "def getPort()\n return @uri.port\n end", "def port\n get_value :port\n end", "def port\n @attributes[:port]\n end", "def socket_port; end", "def src_port\n @src_port.length == 1 ? @src_port[0] : @src_port\n end", "def data_channel_port\n super\n end", "def port\n @request_spec_server.port\n end", "def port\n return @forwarded_port || @port\n end", "def transport\n self.connection.transport\n end", "def port\n 7779\n end", "def rtp_port\n super\n end", "def port\n super\n end", "def port\n super\n end", "def port\n nodes[0][1].to_i\n end", "def port\n @uri.port\n end", "def port\n @options[:port]\n end", "def port; end", "def port; end", "def port; end", "def port; end", "def port; end", "def port; end", "def port; end", "def port; end", "def get_transporter\n TCPSocket.open(@host, @port)\n end", "def port_string; end", "def port\n @socket.connect_address.ip_port\n rescue SocketError\n # Not bound to any local port\n rescue IOError\n # Socket has been closed\n end", "def port\n p = attributes['port'].to_i\n (p == 0 ? nil : p)\n end", "def port\n end", "def standard_port; end", "def beef_port\n public_port || local_port\n end", "def port\n @manager.primary_pool.port\n end", "def port\n @port ||= use_ssl ? 636 : 389\n end", "def port\n @port ||= Port.new(@event.at('@port'), @event.at('@svc_name'), @event.at('@protocol'))\n end", "def tunnel_port\n super\n end", "def preferred_port\n @preferred_port\n end", "def port\n @port || 161\n end", "def port\n @port ||= presenter.port\n end", "def rtp_port=(port)\n @rtp_port = port\n @rtcp_port = @rtp_port + 1\n end", "def serial_port\n\t\t\treturn @sp if port_initialized?\n\t\tend", "def getTransport()\n\t\treturn @transport\n\tend", "def transport\n instance.transport\n end", "def local_port\n socket = Socket.new(:INET, :STREAM, 0)\n socket.bind(Addrinfo.tcp(\"127.0.0.1\", 0))\n port = socket.local_address.ip_port\n socket.close\n port\n end", "def server_port; end", "def port\n connect_address.ip_port\n rescue SocketError\n # Not bound to any local port\n rescue IOError\n # Socket has been closed\n end", "def port_required\n data.port_required\n end", "def port\n if @port == DEFAULT_HTTP_PORT\n DEFAULT_SSL_PORT\n else\n @port\n end\n end", "def port\n @presenter.port\n end", "def host_with_port\n uhost, uport = self.host, self.port\n if port != protocol.default_port\n \"#{uhost}:#{uport}\"\n else\n uhost\n end\n end", "def peer\n \"#{rhost}:#{rport}\"\n end", "def default_port\n data.default_port\n end", "def standard_port?; end", "def remote_port=(value)\n @remote_port = value\n end", "def port\n return @port if @port\n\n @server = TCPServer.new('127.0.0.1', 0)\n @port = @server.addr[1].to_i\n @server.close\n\n return @port\n end", "def socket\n @socket\n end", "def port=(_arg0); end", "def server_port\n AgileProxy.config.server_port\n end", "def listening_port\n @dbi.endpoint.port\n end", "def listening_port\n @dbi.endpoint.port\n end", "def port\n @port ||= opts.fetch(:port, parsed_uri.port)\n end", "def port\n @hash[\"Listen\"].to_i\n end", "def mail_port\n if @yaml[\"mail\"][\"port\"] != 0\n @yaml[\"mail\"][\"port\"]\n end\n end", "def stunnel_port(port)\n port = port.to_i\n if port < 50000\n return port + 10000\n else\n return port - 10000\n end\n end", "def transport\n remote? ? 'remote' : protocol\n end", "def port\n 20000 + ($$ % 40000)\n end", "def port; config[:port]; end", "def get_open_port\n socket = Socket.new(:INET, :STREAM, 0)\n socket.bind(Addrinfo.tcp(\"127.0.0.1\", 0))\n port = socket.local_address.ip_port\n socket.close\n port\n end", "def optional_port; end", "def rport\n datastore['RPORT']\n end", "def port=(_); end", "def raw_host_with_port; end", "def host_with_port\n @context.registers[:host_with_port]\n end", "def local_port\n get('beef.http.port') || '3000'\n end", "def to_s\n\t\treturn \"%s:%d\" % [ self.peer_host, self.peer_port ]\n\tend", "def client_port\n host_settings['client_port']\nend", "def local_port=(value)\n @local_port = value\n end", "def true_port\r\n port = servlet_response.getLocalPort\r\n $log.debug(\"True port is #{port}\")\r\n port\r\n end", "def drb_port\n instance.options[:drb_port]\n end", "def drb_port\n instance.options[:drb_port]\n end", "def trace_observer_port\n port_from_host_entry || NewRelic::Agent.config[:'infinite_tracing.trace_observer.port']\n end", "def socket #:nodoc:\n return @socket\n end" ]
[ "0.7513234", "0.71351516", "0.71020275", "0.7032989", "0.69719315", "0.6951083", "0.6916727", "0.6916727", "0.6916727", "0.68983835", "0.6897157", "0.6877854", "0.6851919", "0.6851919", "0.6841713", "0.6832453", "0.6832453", "0.6766841", "0.6677953", "0.6671478", "0.66423565", "0.6568565", "0.65676695", "0.65517783", "0.65420353", "0.6514259", "0.6493524", "0.64856994", "0.6462433", "0.64572597", "0.64345294", "0.64345294", "0.64226764", "0.6421574", "0.64105135", "0.63672024", "0.63672024", "0.63672024", "0.63672024", "0.63672024", "0.63672024", "0.63672024", "0.63672024", "0.6337126", "0.6331831", "0.6325343", "0.63221765", "0.6280325", "0.62602913", "0.6254488", "0.6245696", "0.62369037", "0.62043667", "0.61778694", "0.61697716", "0.61108905", "0.6099573", "0.6080986", "0.606875", "0.6065267", "0.6064365", "0.6013498", "0.59776247", "0.5943487", "0.5912035", "0.59012926", "0.5881099", "0.5872001", "0.5867747", "0.5859269", "0.58326083", "0.58323425", "0.5824709", "0.58207124", "0.5799233", "0.57989395", "0.5787193", "0.5787193", "0.5784161", "0.578257", "0.57772607", "0.57732445", "0.5773137", "0.57650054", "0.57548267", "0.57542837", "0.57498693", "0.57489836", "0.57474226", "0.57261145", "0.5707937", "0.56995887", "0.5696119", "0.5689214", "0.5675678", "0.567217", "0.56643546", "0.56643546", "0.5656535", "0.5639183" ]
0.68838
11
Checks to see if it has connection fields set.
def has_connection_fields? !!(connection_network_type && connection_address_type && connection_address) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def valid_connection?\n @connection && @connection.valid?\n end", "def checkConnection\n unless connected?\n raise DictError.new(), \"Not connected.\"\n end\n end", "def checkConnection\n unless connected?\n raise DictError.new(), \"Not connected.\"\n end\n end", "def connected?\n !connection.blank?\n end", "def connections?\n @connections.any?\n end", "def connection_in_info?\n info.respond_to?(:ood_connection_info)\n end", "def conn?\n conn != nil\n end", "def connected?\n # FIXME: check if connection is connected\n not @connection.nil?\n end", "def connected?\n @connection.present?\n end", "def connected?\r\n [email protected]?\r\n end", "def check_connection\n logger.debug \"Checking connection (#{is_connected?})\"\n is_connected? || connect_database\n # return connection status\n is_connected?\n end", "def check_connection\n logger.debug \"Checking connection (#{is_connected?})\"\n is_connected? || connect_database\n # return connection status\n is_connected?\n end", "def is_connected?\n\t\tif @connection == nil\n\t\t\treturn false\n\t\telse\n\t\t\treturn true\n\t\tend\n\tend", "def conn_errors?\n [email protected]?\n end", "def check_nosql_connection\n @configuration.no_sql_connection.valid? && @configuration.no_sql_connection.has_connection?\n end", "def connected?\n return !new_record? && !settings.empty?\n end", "def connected?\n return !@@connection.nil?\n end", "def connection_valid?\n begin\n result = client.call(:fe_dummy).body[:fe_dummy_response][:fe_dummy_result]\n @observations << \"app_server: #{result[:app_server]}, db_server: #{result[:db_server]}, auth_server: #{result[:auth_server]}\"\n result[:app_server] == \"OK\" and result[:db_server] == \"OK\" and result[:auth_server] == \"OK\"\n rescue => e\n @errors << e.message\n @backtrace = e.backtrace\n false\n end\n end", "def in_connection?\r\n self.aln_connection_id.nil? ? false : true\r\n end", "def is_connected?\n if @connection == nil\n return false\n else \n return true\n end\n end", "def connected?\n [email protected]?\n end", "def connected?\n @connection && [email protected]?\n end", "def connect?\n connect != false\n end", "def has_connection?\n setup_connection_adapter\n connection.send(:connect) if ActiveRecord::Base.connection.respond_to?(:connect)\n true\n end", "def connected?\r\n\t\t\t\t\treturn false if (@connections.nil?)\r\n\t\t\t\t\treturn false if (@connections.empty?)\r\n\t\t\t\t\treturn true\r\n\t\t\t\tend", "def connected?\r\n @connection && @connection.connected?\r\n end", "def valid?\n @conn.valid?\n end", "def valid_connection?(conn)\n conn.servers.length\n true\n rescue Excon::Errors::Forbidden, Excon::Errors::Unauthorized\n false\n end", "def initialized?\n server.present? && username.present? && api_token.present?\n end", "def connected?\n @connections.any?\n end", "def connected?\n if @connection\n @connection.stat\n @connection.errno.zero?\n else\n false\n end\n end", "def connected?\n\t\[email protected]?\n\tend", "def ensure_connection!\n fail \"Must have active connection\" unless connection\n end", "def ensure_connection!\n fail \"Must have active connection\" unless connection\n end", "def ensure_connection!\n fail \"Must have active connection\" unless connection\n end", "def ensure_connection!\n fail \"Must have active connection\" unless connection\n end", "def ensure_connection!\n fail \"Must have active connection\" unless connection\n end", "def ensure_connection!\n fail \"Must have active connection\" unless connection\n end", "def connected?\n\t\treturn @conn ? true : false\n\tend", "def connected?\n @connections.size > 0 # synchronize { @connections.any? }\n end", "def connected?\n if @db_connection\n return true\n else\n return false\n end\n end", "def connected?\n [email protected]? && @connected\n end", "def auto_connecting?\n !!get_inherited_attribute(\"@auto_connecting\")\n end", "def connection_valid?\n if status.untested? or (changed? and (changes.keys & @@status_fields).empty?)\n begin\n test_connection\n status.success!\n rescue => e\n error_str = map_connection_exception_to_error(e)\n status.fail!(error_str)\n end\n end\n status.success?\n end", "def connected?\n not @handle.nil?\n end", "def connected?; connection_state == :connected end", "def connected?\n synchronize { @connections.any? }\n end", "def check_db_connection\n begin\n ActiveRecord::Base.verify_active_connections! if defined?(ActiveRecord)\n rescue Object => bdrb_error\n log_exception(bdrb_error)\n end\n end", "def connected?\n return false unless @connection\n return false unless @connection.started?\n true\n end", "def connected?\n return false if @connection.nil?\n return false if @connection.closed?\n return true\n end", "def check_connected\n raise NotConnected unless @session_id && @sso && @provider\n end", "def connected?(conn)\n conn && conn.active?\n end", "def bound?\n\t\treturn self.conn.bound?\n\tend", "def waited_on_connect?\n @waited_on_connect\n end", "def check_options\n unless @options[:stub]\n STDERR.puts \"Please specify a host to connect to using --host\" unless @options[:host]\n STDERR.puts \"Please specify a model to check using --model\" unless @options[:model]\n return false unless @options[:host] && @options[:model]\n end\n\n true\n end", "def connected?\n !!(@connection && @connection.status == PGconn::CONNECTION_OK)\n rescue PGError\n false\n end", "def setup?\n Crocoduck::Store.server_cluster && \n Crocoduck::Store.server_db && \n Crocoduck::Store.server_collection\n end", "def connected?\n self.connected\n end", "def authentication_set?\n [email protected]? && [email protected]?\n end", "def connected?\n @host && @port\n end", "def connected?\n @connected ||= false\n end", "def outstanding?(config, connection)\r\n db_hash(config, connection) ? false : true\r\n end", "def valid?\n return false if @_mc_connection.nil?\n return false unless @_mc_connection.active?\n return true\n end", "def connection_base?\n @base || @connection_specification\n end", "def connected?\n\t\t\t\t@@connected\n\t\t\tend", "def connected?\n unless @server.nil?\n true\n else\n false\n end\n end", "def connect_error?\n @connerror || false\n end", "def connection_exists?(name)\n @connections.include?(name)\n end", "def connected?\n return false unless @connection\n return false unless @connection.started?\n true\n end", "def stage_over_connection?\n\t\tfalse\n\tend", "def connected?\n connection_handler.connected?(self)\n end", "def connected?\n\t\t@connected\n\tend", "def connected?\n (conn=self.conn) && conn.connected? && @pid == Process.pid || false\n end", "def connected?\n connections = nil\n @lock.lock\n begin\n connections = @connections\n ensure\n @lock.unlock\n end\n connections.any?\n end", "def connected?\n connection_handler.connected?(connection_specification_name, role: current_role, shard: current_shard)\n end", "def connected?\n @connected\n end", "def connected?\n @connected\n end", "def connected?\n @connected\n end", "def connected?\n @connected\n end", "def connected?\n @connected\n end", "def connected?\n @connected\n end", "def connected?\n @connected\n end", "def connected?\n @connected\n end", "def connected?\n @connected\n end", "def connected?\n @connected\n end", "def connected?\n @connected\n end", "def closed?\n @connection.nil?\n end", "def connected?\n raise NotImplementedError\n end", "def is_connected?\n logger.debug \"Checking if DB is selected(#{@database.is_db_selected?})\"\n @database.is_db_selected?\n end", "def is_connected?\n logger.debug \"Checking if DB is selected(#{@database.is_db_selected?})\"\n @database.is_db_selected?\n end", "def test_connection_frame\n assert_not_nil @conn.connection_frame\n end", "def connection_field?(field)\n type = field.type.unwrap\n if type.kind.fields?\n if edges_field = type.get_field('edges')\n edges = edges_field.type.unwrap\n if node_field = edges.get_field('node')\n return node_field?(node_field)\n end\n end\n end\n\n false\n end", "def test_connection_exists\n assert_not_nil @conn\n end", "def connected?\n !!@connected\n end", "def connectable?\n true\n end", "def connection_valid?\n client('v1').api_valid?\n rescue StandardError\n false\n end", "def check_connection_state\n # Has the connection been established in the meantime? If yes, shut\n # down the work queues thread, all work will be done in this thread \n # from now on. \n if cached_connection_established?\n @work_queue.shutdown\n end\n end", "def cached_connection_established?\n @cached_connection_established ||= begin\n # NOTE This will not be called unless we have some messages to send,\n # so no useless connections are made\n @connection.try_connect\n @connection.established?\n end\n end" ]
[ "0.69252634", "0.6743864", "0.6743864", "0.657802", "0.6533757", "0.6485211", "0.6478045", "0.6469435", "0.6467915", "0.6411449", "0.6365215", "0.6365215", "0.63441753", "0.6337181", "0.63170624", "0.63047093", "0.6299431", "0.6288328", "0.6285538", "0.62674135", "0.62661785", "0.62478375", "0.621798", "0.61772025", "0.61590636", "0.6157974", "0.615199", "0.61187786", "0.6090554", "0.60789096", "0.6045924", "0.6022907", "0.5983663", "0.5983663", "0.5983663", "0.5983663", "0.5983663", "0.5983663", "0.59396315", "0.59317464", "0.59270334", "0.59072214", "0.5906803", "0.58956087", "0.5879341", "0.58428425", "0.5837702", "0.5826242", "0.5817566", "0.5807068", "0.5805387", "0.58033615", "0.5796715", "0.5767383", "0.57540816", "0.57509065", "0.57442725", "0.5727969", "0.5722514", "0.572249", "0.5719616", "0.5718628", "0.5713155", "0.5704687", "0.57008547", "0.56993455", "0.56829035", "0.56749207", "0.5672009", "0.56717575", "0.566864", "0.56683826", "0.56652933", "0.5663391", "0.5646395", "0.56446797", "0.56446797", "0.56446797", "0.56446797", "0.56446797", "0.5632259", "0.5632259", "0.5632259", "0.5632259", "0.5632259", "0.5632259", "0.562496", "0.561618", "0.5613026", "0.5613026", "0.5610749", "0.56106997", "0.5608912", "0.5607354", "0.56006515", "0.5597415", "0.559475", "0.5592944" ]
0.79839444
2
=> Methods! Gets Track Metadata from Spotify's Web Metadata API
def get_track(spotify_id) content = HTTParty.get('http://ws.spotify.com/lookup/1/.json?uri=' + URI.escape("spotify:track:#{spotify_id}")) if !content.body.empty? Oj.load(content.body) else flash[:notice] = 'Error with Spotify! Try again in 10 seconds!' end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_info( params )\n xml = LastFM.get( \"track.getInfo\", params )\n LastFM::Track.from_xml( xml )\n end", "def tracks_get_info params = { :track_id => nil }\n json = send_request 'tracks_get_info', params\n if json['success'] == true\n json['data']\n else\n puts \"Error: \" + json['message']\n exit\n end\n end", "def get_fingerprint_metadata( params )\n LastFM.get( \"track.getFingerPrintMetadata\", params )\n end", "def metadata\n puts \"Adding Metadata...\"\n doc = Nokogiri::HTML(open(\"http://www.last.fm/search?q=#{query}&type=track\"))\n url = doc.css(\"span.chartlist-ellipsis-wrap\").first.css(\"a.link-block-target\").first.attr('href')\n ch = url.gsub('/music/', \"\")\n artist, title = ch.split(\"/_/\")\n artist = artist.gsub('+', \" \")\n title = title.gsub('+', \" \")\n doc = Nokogiri::HTML(open(\"http://www.last.fm#{url}\"))\n album = doc.css(\"h3 a\").first\n begin\n Mp3Info.open(\"#{query.gsub(\"+\",\"-\")}.mp3\") do |mp3|\n mp3.tag.title = \"#{URI.unescape(title)}\".strip\n mp3.tag.artist = \"#{URI.unescape(artist)}\".strip\n mp3.tag.album = \"#{URI.unescape(album.content)}\".strip\n end\n puts \"Done\"\n rescue\n puts \"Fail\"\n end\n end", "def get_meta_track \n send_cmd(\"get_meta_track\")\n end", "def get_meta_artist \n send_cmd(\"get_meta_artist\")\n end", "def metadata\n api_get(\"$metadata\").body\n end", "def topTracks(artist)\n\tartistinfo = HTTParty.get(\"http://ws.audioscrobbler.com/2.0/?method=artist.gettoptracks&artist=#{artist}&api_key=#{API_KEY}&format=json\")\n\treturn artistinfo\nend", "def artistInfo(artist)\n\tartistinfo = HTTParty.get(\"http://ws.audioscrobbler.com/2.0/?method=artist.getinfo&artist=#{artist}&api_key=#{API_KEY}&format=json\")\n\treturn artistinfo\nend", "def read_metadata\n @client.get(metadata_path)\n end", "def metadata\n {\n title: flickr_title,\n description: description\n }\n end", "def spotify_track\n unless self.spotify_track_id.nil?\n Rails.cache.fetch(\"spotify_track/#{self.spotify_track_id}\", expires_in: 12.days) do\n RSpotify::Track.find(self.spotify_track_id)\n end\n end\n end", "def info(options={})\n get(:standard, {:method => \"track.getInfo\"}.merge(options))\n end", "def topsongs\n tracks = Array.new\n session[:related].each do |artistName|\n artist = RSpotify::Artist.search(artistName).first\n top_tracks = artist.top_tracks(@userCountry).first\n tracks.push(top_tracks.uri)\n end\n\n uri = URI.parse(\"#{@playlistUrl}/tracks/\")\n http = Net::HTTP.new(uri.host, uri.port)\n http.use_ssl = true #this helps with the https\n request = Net::HTTP::Post.new(uri)\n request[\"Authorization\"] = \"Bearer #{@data[\"access_token\"]}\"\n request[\"Content-Type\"] = \"application/json\"\n request.body = \"{\\\"uris\\\": #{tracks}}\"\n response = http.request(request)\n finished = JSON.parse(response.read_body)\n if finished.key?(\"snapshot_id\")\n @playlistCheck = true\n else\n @playlistCheck = false\n end\nend", "def fetch_metadata\n uri = SCALEWAY_METADATA_URL.to_s\n response = http_client.get(uri)\n case response.code\n when \"200\"\n parser = FFI_Yajl::Parser.new\n parser.parse(response.body)\n when \"404\"\n logger.trace(\"Mixin ScalewayMetadata: Encountered 404 response retrieving Scaleway metadata: #{uri} ; continuing.\")\n {}\n else\n raise \"Mixin ScalewayMetadata: Encountered error retrieving Scaleway metadata (#{uri} returned #{response.code} response)\"\n end\n end", "def get_metadata(*args)\n self.metadata.get(*args)\n end", "def search(searchString, token)\n query = {\n \"q\": searchString,\n \"type\": \"track\",\n \"limit\": 1\n }\n\n headers = {\n \"Accept\": \"application/json\",\n \"Authorization\": \"Bearer #{token}\"\n }\n\n response = HTTParty.get(\"https://api.spotify.com/v1/search\", :query => query, :headers => headers)\n jBody = JSON.parse(response.body)\n result = {\n \"id\": \"spotify:track:#{jBody[\"tracks\"][\"items\"].first[\"id\"]}\",\n \"name\": jBody[\"tracks\"][\"items\"].first[\"name\"],\n \"artist\": jBody[\"tracks\"][\"items\"].first[\"artists\"].first[\"name\"]\n }\n\n return result\n\nend", "def track_name\n begin\n track = MetaSpotify::Track.lookup(url_spotify)\n return track.name\n rescue Exception => e\n return \"Not a Track\"\n end\n end", "def show\n client_playlist = Soundcloud.new(:client_id => Rails.application.secrets.soundcloud_client_id,\n :client_secret => Rails.application.secrets.soundcloud_secret,\n :username => Rails.application.secrets.soundcloud_username,\n :password => Rails.application.secrets.soundcloud_password)\n client_track = SoundCloud.new(:client_id => Rails.application.secrets.soundcloud_client_id)\n # create an array of track ids\n track_url = Post.find(params[:id]).try(:track_url)\n # puts track_url.inspect\n embed_info = client_track.get('/oembed', :url => track_url)\n @song = embed_info['html']\n @track_id = client_track.get('/resolve', :url => track_url).id\n @playlists = client_playlist.get(\"/me/playlists\")\n end", "def trackgen_info;\treturn @json_data['trackgen_info'];\tend", "def initialize(artist, track)\n @artist = artist\n @track = track\n\n url =(\"https://api.spotify.com/v1/search?q=\" + track + \"&type=track\")\n\n response = HTTParty.get(url).parsed_response\n\n @uri = response['tracks']['items'][0]['uri']\n\n\n # ['artists']['items'][0]['uri']\n end", "def show\n response = Track.get_track_musicians_by_track_id(params[:id])\n @title = response['Results'][0]['Title']\n if response['Results'][0]\n @track = response['Results'][0]['Participations']\n else\n @track = nil\n end\n end", "def extract_metadata!\n unless self.title\n Mp3Info.open(self.server_path) do |mp3|\n self.title = mp3.tag.title\n self.artist = mp3.tag.artist\n self.album = mp3.tag.album\n self.year = mp3.tag.year\n self.comm = mp3.tag2.comm\n self.tcom = mp3.tag2.tcom\n self.tcon = mp3.tag2.tcon\n self.tcop = mp3.tag2.tcop\n self.tit2 = mp3.tag2.tit2\n self.tit3 = mp3.tag2.tit3\n self.tcat = mp3.tag2.tcat\n self.trck = mp3.tag2.trck\n self.tyer = mp3.tag2.tyer\n self.tgid = mp3.tag2.tgid\n self.wfed = mp3.tag2.wfed\n end\n\n self.title = \"Untitled\" unless self.title\n\n self.save\n end\n end", "def metadata\n return @metadata if defined? @metadata\n\n @metadata = Henkei.read :metadata, data\n end", "def metadata\n return @metadata if defined? @metadata\n\n @metadata = Henkei.read :metadata, data\n end", "def read_metadata; end", "def retrieve_metadata\n conn = Faraday.new(url: @reference.endpoint)\n conn.get\n rescue Faraday::Error::ConnectionFailed => error\n Geoblacklight.logger.error error.inspect\n nil\n rescue Faraday::Error::TimeoutError => error\n Geoblacklight.logger.error error.inspect\n nil\n end", "def latest_tracks(user, method)\n\n\t# Get the tweets\n\tresponse = make_response(user, method)\n\trecenttracks = JSON.parse(response.body)['recenttracks']\n\trecenttracks['track']\nend", "def extract_metadata\n return unless audio?\n logger.debug(maudio_params[:path])\n logger.debug('It\\'s audio')\n path = maudio_params[:path]\n #url = URI.parse(path) # turn the string into a URI\n #http = Net::HTTP.new(url.host, url.port) \n #req = Net::HTTP::Get.new(url.path) # init a request with the url\n #req.range = (0..4096) # limit the load to only 4096 bytes\n #res = http.request(req) # load the mp3 file\n #child = {} # prepare an empty array to store the metadata we grab\n #open_opts = { :encoding => 'utf-8' }\n #Mp3Info.open( StringIO.open(res.body) ) do |m| #do the parsing\n # child['title'] = m.tag.title \n # child['album'] = m.tag.album \n # child['artist'] = m.tag.artist\n # child['length'] = m.length\n # \n # puts m\n #end\n #logger.debug('*********************')\n \n #logger.debug(child['length'])\n #logger.debug('*********************')\n end", "def extract_metadata; end", "def get_identifier_metadata(identifier)\n request_uri = '/id/' + identifier\n uri = URI(ENDPOINT + request_uri)\n request = Net::HTTP::Get.new uri.request_uri\n response = call_api(uri, request)\nend", "def metadata\n @data[:metadata]\n end", "def extract_metadata_for_video url\n mfile = metadata_file_for(url)\n unless File.file? mfile\n\n # self << url\n # self << %w[ skip-download write-info-json ignore-errors ]\n # self << { output: mfile.gsub(/\\.info\\.json$/, '') }\n # self.run\n\n # Run directly:\n command = \"#{url} --skip-download --write-info-json --ignore-errors\"\n command += \" -o '#{mfile.gsub(/\\.info\\.json$/, '')}'\"\n delegator.run command\n end\n JSON.parse File.read(mfile) rescue nil\n end", "def get_analytics_reporting_metadata_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: AnalyticsApi.get_analytics_reporting_metadata ...\"\n end\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n # resource path\n local_var_path = \"/api/v2/analytics/reporting/metadata\".sub('{format}','json')\n\n # query parameters\n query_params = {}\n query_params[:'pageNumber'] = opts[:'page_number'] if opts[:'page_number']\n query_params[:'pageSize'] = opts[:'page_size'] if opts[:'page_size']\n query_params[:'locale'] = opts[:'locale'] if opts[:'locale']\n\n # header parameters\n header_params = {}\n\n # HTTP header 'Accept' (if needed)\n local_header_accept = ['application/json']\n local_header_accept_result = @api_client.select_header_accept(local_header_accept) and header_params['Accept'] = local_header_accept_result\n\n # HTTP header 'Content-Type'\n local_header_content_type = ['application/json']\n header_params['Content-Type'] = @api_client.select_header_content_type(local_header_content_type)\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n \n auth_names = ['PureCloud OAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'ReportMetaDataEntityListing')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: AnalyticsApi#get_analytics_reporting_metadata\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def show\n artist = params[:artist]\n track = params[:track]\n @url = \"http://soundcloud.com/#{artist}/#{track}\"\n @soundcloud_track = Soundcloud.client.get('/resolve', :url => @url)\n end", "def topAlbums(artist)\n\tartistinfo = HTTParty.get(\"http://ws.audioscrobbler.com/2.0/?method=artist.gettopalbums&artist=#{artist}&api_key=#{API_KEY}&format=json\")\n\treturn artistinfo\nend", "def getRecentTrack(username)\r\n connection = Request.new(\"http://ws.audioscrobbler.com/2.0/\")\r\n query =\r\n {\r\n :method => \"user.getrecenttracks\",\r\n :user => username,\r\n :limit => \"1\",\r\n :api_key => @apiKey\r\n }\r\n xml = connection.get(\"\", query)\r\n doc = XmlSimple.xml_in(xml)\r\n\r\n if(doc[\"status\"] == \"ok\")\r\n track = Hash.new\r\n if(doc[\"recenttracks\"][0][\"page\"] == \"0\")\r\n puts username + \" has not listened to any tracks\"\r\n track['artist'] = \"\"\r\n track['song'] = \"\"\r\n else\r\n track['artist'] = doc[\"recenttracks\"][0][\"track\"][0][\"artist\"][0][\"content\"]\r\n track['song'] = doc[\"recenttracks\"][0][\"track\"][0][\"name\"][0]\r\n end\r\n return track\r\n end\r\n puts \"\\tGet recent track failed\"\r\n puts \"\\t\"+doc[\"error\"][0][\"code\"][0]+\": \" +doc[\"error\"][0][\"content\"][0]\r\n return \"\"\r\n end", "def lookup_track_info(track_mbid, artist_name, track_name)\r\n\t\tputs \"looking up info for \" + artist_name + \"|\" + track_name\r\n\t\tbegin\r\n\t\t\ttrack = @lastfm.track.get_info(:track_mbid => track_mbid, :artist => artist_name, :track => track_name, :username => @username)\r\n\r\n\t\t\tputs track\r\n\t\t\[email protected]( \"INSERT INTO track_history (time_utc, artist_mbid, artist_name, track_mbid, track_name, album_mbid, album_name) VALUES (?,?,?,?,?,?,?)\", t[\"date\"][\"uts\"], t[\"artist\"][\"mbid\"], t[\"artist\"][\"content\"], t[\"mbid\"], t[\"name\"], t[\"album\"][\"mbid\"], t[\"album\"][\"content\"] )\r\n\t\trescue\r\n\t\t\t# (Lastfm::ApiError)\r\n # Track not found\r\n puts \"Track not found or something - \" + track_mbid + \" \" + track_name\r\n\r\n\t\tend\r\n\r\n\r\n\tend", "def followed_artist_tracks\n results = []\n follows.each do |follow|\n response = HTTParty.get(\"https://api.spotify.com/v1/artists/#{follow.artist_id}/top-tracks?country=US\")\n @hash_version = JSON.parse(response.body)\n @hash_version[\"tracks\"].each do |track|\n results << track[\"id\"]\n end\n end\n\n results.shuffle.join(\",\")\n end", "def top_tracks_for_random_artist\n unless @artist\n @artist = Lineup.select_random_artist\n end\n\n options = { :query => { :api_key => LASTFM_API_KEY, :limit => \"10\", :format => \"json\" } }\n top_tracks_response = self.class.get(\"/2.0/?method=artist.gettoptracks&artist=#{ERB::Util.url_encode(@artist)}\",options)\n\n top_tracks_response = top_tracks_response.try(:[],\"toptracks\").try(:[],\"track\")\n if top_tracks_response\n top_tracks_response\n else\n @artist = nil\n top_tracks_for_random_artist\n end\n end", "def get_metadata_response(metadata)\n if metadata.respond_to?(:has_key?)\n listing = []\n metadata.keys.sort.each do |key|\n value = metadata[key]\n if value.respond_to?(:has_key?)\n listing << key + '/'\n else\n listing << key\n end\n end\n return listing.join(\"\\n\")\n end\n return metadata\n end", "def top_tracks\n lfm_path = \"artist.topTracks&artist=#{@name}\"\n lfm_data = LastFm::fetch_data(lfm_path)\n return Track.create_from_hash(Hash.from_xml(lfm_data)['lfm']['toptracks']['track'])\n end", "def info(options={})\n get(:standard, {:method => \"artist.getInfo\"}.merge(options))\n end", "def get_top_tracks( params )\n LastFM.get( \"chart.getTopTracks\", params )\n end", "def metadata\n @metadata ||= Metaforce::Metadata::Client.new(@options)\n end", "def get_metadata\n doc = download_ais(@program_id)\n streamUri = (doc/\"//streamuri\").text\n @metadata[:fileType] = streamUri[-3..-1]\n @metadata[:programName] = (doc/\"//brandtitle\").text\n @metadata[:episodeId] = (doc/\"//programmenumber\").text\n\n assetInfo = download_asset(@program_id)\n @metadata[:episodeNumber] = (assetInfo/\"//episodenumber\").text\n @metadata[:seriesNumber] = (assetInfo/\"//seriesnumber\").text\n @metadata[:episodeInfo] = (assetInfo/\"//episodeinfo\").text\n @metadata[:episodeTitle] = (assetInfo/\"//episodetitle\").text\n @metadata[:brandTitle] = (assetInfo/\"//brandtitle\").text\n @metadata[:epId] = (assetInfo/\"//programmeid\").text\n @metadata[:imagePath] = (assetInfo/\"//imagepath\").text\n\n @metadata[:title1] = (assetInfo/\"//title1\").text\n @metadata[:title2] = (assetInfo/\"//title2\").text\n\n #progGuideUrl is used to pull out metadata from the CH4 website\n progGuideUrl = (assetInfo/\"//episodeguideurl\").text\n\n begin\n #read program guide to get additional metadata\n seriesInfo = download_progguide(progGuideUrl)\n\n synopsisElem = seriesInfo.at(\"//meta[@name='synopsis']\")\n @metadata[:description] = synopsisElem.nil? ? \"\" : synopsisElem['content']\n rescue\n @log.error \"Unable to read program guide data - the video file will not be fully tagged\"\n @log.debug \"Program Guide URL: #{progGuideUrl}\"\n end\n end", "def gettrackdata()\r\n return getvalue(SVTags::TRACK_DATA)\r\n end", "def extract_metadata\n return unless audio?\n path = upload.queued_for_write[:original].path\n open_opts = { :encoding => 'utf-8' }\n Mp3Info.open(path, open_opts) do |mp3info|\n self.metadata = mp3info.tag\n end\n end", "def extract_metadata\n return unless audio?\n path = upload.queued_for_write[:original].path\n open_opts = { :encoding => 'utf-8' }\n Mp3Info.open(path, open_opts) do |mp3info|\n self.metadata = mp3info.tag\n end\n end", "def extract_metadata\n path = audio.queued_for_write[:original].path\n open_opts = { :encoding => 'utf-8' }\n TagLib::FileRef.open(path) do |fileref|\n tag = fileref.tag\n properties = fileref.audio_properties\n self.update_attributes(:artist => tag.artist,:album=> tag.album,:title => tag.title, :genre => tag.genre, :track_number => tag.track, :year_of_release => tag.year, :comments => tag.comment,:bitrate => properties.bitrate,:no_of_channels => properties.channels,:length=> properties.length,:sample_rate=> properties.sample_rate)\n end\n end", "def hotness artist\n url = \"http://developer.echonest.com/api/v4/artist/hotttnesss?api_key=#{ECHONEST_API_KEY}&name=#{artist}&format=json\"\n result = parseURL url\n result[\"response\"][\"artist\"][\"hotttnesss\"]\nend", "def index\n if !params[:artist_name].empty?\n lastfm = Lastfm.new(ENV[\"LASTFM_KEY\"], ENV[\"LASTFM_SECRET\"])\n @videos = Yt::Collections::Videos.new\n artistspotify = RSpotify::Artist.search(params[:artist_name])\n artistlastfm = lastfm.artist.get_info(artist: params[:artist_name], autocorrect: 1)\n \n @artistbio = artistlastfm[\"bio\"][\"summary\"]\n puts artistspotify\n @artist = artistspotify.first\n @tracks = @artist.top_tracks(:US)\n # <% @videoId = @videos.where(q: \"music video for #{@artist.name} #{track.name}\", order: 'relevance').first.id %>\n # @videoId = @videos.where(q: params[:artist_name], safe_search: 'none').first.id\n else\n redirect_to root_path\n end \n end", "def print_latest_song(tracks)\n\t\n\tputs tracks[0]['artist']['#text']\n\tputs tracks[0]['name'] + \" - \" + tracks[0]['album']['#text']\nend", "def get_track_attributes(track_ids)\n params = {\n ids: track_ids.join(',')\n }\n\n raw_attributes = JSON.parse(api_call(\"https://api.spotify.com/\",\"v1/audio-features\",params))['audio_features']\n\n track_ids.each_with_index do |track_id, i|\n track = Track.find_by(spotify_id: track_id)\n track_data = raw_attributes[i].keep_if { |k,v| TrackProfile.new.attributes.keys.include?(k) && k != 'id'}\n track.create_track_profile(track_data)\n end\n\n end", "def metadata\n response = retrieve_metadata\n return response.body unless response.nil? || response.status == 404\n Geoblacklight.logger.error \"Could not reach #{@reference.endpoint}\"\n \"Could not reach #{@reference.endpoint}\"\n end", "def search_tracks(q, page=1)\n \t response_body = get_body(http_get(\"http://ws.spotify.com/search/1/track.json?q=#{CGI.escape(q)}&page=#{page}\"))\n \t json = ActiveSupport::JSON.decode(response_body)\n to_tracks(json[\"tracks\"])\n \tend", "def fetch(query)\n raise ArgumentError unless query.attributes[:title].present?\n\n params = {\n title: query.attributes[:title],\n artist: query.attributes[:artist]\n }.merge(query.options).delete_if { |k, v| v.blank? }\n\n response = Response.new(client.get('/ws/1//track', params))\n response.items.map { |item| Result.new(item) }\n end", "def fetch_object_metadata(session_id)\n # Setup params.\n setup_params_logic = lambda do |last_request, params|\n # Do nothing.\n end\n \n # Parse the response.\n response_parse_logic = lambda do |response| \n \n end\n end", "def playlist\n split_and_hash(send_request('playlistinfo'))\n end", "def metadata\n if any? && metadata_schema\n response = api('request',\n :uri => \"hm://metadata/#{resource_name}s\",\n :batch => true,\n :payload => map {|resource| {:uri => resource.metadata_uri}},\n :response_schema => metadata_schema\n )\n response['result']\n else\n []\n end\n end", "def query_metadata filename\n song = File.basename(filename, File.extname(filename))\n track = filename[/[0-9]+ /]\n\n unless track.nil?\n song = song[track.size .. -1]\n track = track.to_i\n end\n\n return track,song\nend", "def top_artists\n \n artists=[]\n spotify_artists = JSON.parse(api_call(\"https://api.spotify.com/\",\"v1/me/top/artists\"))\n\n spotify_artists['items'].each do |item|\n new_artist = Artist.find_or_create_by(name: item['name'], spotify_id: item['id'])\n new_artist.update(genre_list: item['genres'])\n artists << new_artist\n end\n\n artists\n\n end", "def to_track(raw_track)\n Models::Track.new( \n {\n :url => raw_track[\"href\"],\n :title => raw_track[\"name\"],\n :album => nested_lookup(raw_track, \"album\",\"name\"),\n :artists => to_artists(raw_track[\"artists\"])\n }\n )\n end", "def get_metadata(resource_uri)\n log \"get_metadata for #{resource_uri}\"\n get \"#{resource_uri}/fcr:metadata\", \"text/plain\" \n end", "def raw_info\n # This is a public API and does not need signing or authentication\n request = \"/services/rest/?format=json&method=flickr.people.getInfo&nojsoncallback=1&user_id=#{uid}\"\n @raw_info ||= MultiJson.decode(access_token.get(request).body)\n rescue ::Errno::ETIMEDOUT\n raise ::Timeout::Error\n end", "def fetch_metadata_item(item)\n full_url = \"#{SOFTLAYER_API_QUERY_URL}/#{item}\"\n u = URI(full_url)\n net = ::Net::HTTP.new(u.hostname, u.port)\n net.ssl_version = :TLSv1_2\n net.use_ssl = true\n net.ca_file = ca_file_location\n res = net.get(u.request_uri)\n if res.code.to_i.between?(200, 299)\n res.body\n else\n logger.error(\"Mixin Softlayer: Unable to fetch item #{full_url}: status (#{res.code}) body (#{res.body})\")\n nil\n end\n rescue => e\n logger.error(\"Mixin Softlayer: Unable to fetch softlayer metadata from #{u}: #{e.class}: #{e.message}\")\n raise e\n end", "def metadata_get(id, api_version)\n path = \"/#{api_version}/meta-data/#{id}\"\n logger.trace(\"Mixin EC2: Fetching http://#{EC2_METADATA_ADDR}#{path}\")\n response = http_client.get(path, { 'X-aws-ec2-metadata-token': v2_token })\n case response.code\n when \"200\"\n response.body\n when \"404\"\n logger.trace(\"Mixin EC2: Encountered 404 response retrieving EC2 metadata path: #{path} ; continuing.\")\n nil\n else\n raise \"Mixin EC2: Encountered error retrieving EC2 metadata (#{path} returned #{response.code} response)\"\n end\n end", "def metadata\n output = shell!(\"ffprobe -v quiet -print_format json -show_format -show_streams #{file.path.shellescape}\")\n json = JSON.parse(output)\n json.with_indifferent_access\n end", "def metadata\n output = shell!(\"ffprobe -v quiet -print_format json -show_format -show_streams #{file.path.shellescape}\")\n json = JSON.parse(output)\n json.with_indifferent_access\n end", "def getLastFmSimilarTrackData artist_name, album_name, track_name\n url = getLastFmSimilarTrackDataUrl(artist_name, album_name, track_name);\n proxy = getProxy();\n puts url+\"-[\"+proxy+\"]\";\n #url = \"http://www.google.com\" \n begin\n html = open(url, \"User-Agent\" => getUseragent(), :proxy=>proxy) \n document = Hpricot(html)\n #document = Nokogiri::HTML(html)\n #ar = document.css('div.skyWrap table.candyStriped') \n\n ar = document.search(\"//div[@class='skyWrap']\").search(\"//table[@class='candyStriped chart']\");\n #puts ar \n return ar\n rescue Exception => e\n return \"\"\n end \n end", "def metadata\n @meta_data\n end", "def artists\n if RESPONSE.code == 200\n # Return data to page\n JSON.parse(RESPONSE.to_s)['topartists']['artist']\n else\n # print error message\n \"Error Code #{RESPONSE.code}\"\n end\n end", "def similar artist\n url = \"http://developer.echonest.com/api/v4/artist/similar?api_key=#{ECHONEST_API_KEY}&bucket=years_active&name=#{artist}&format=json&start=0&results=#{RESULTS}\"\n parseURL url\n #also get their hotness?\nend", "def store_metadata\n html = HTMLParse.new(url)\n self.meta_title = html.title\n self.meta_desc = html.desc\n end", "def metadata\n unless @metadata\n\n unless cached?\n begin\n Zip::ZipFile.open(@path) do |zip|\n zip.extract('iTunesMetadata.plist', Cache.path_to(plist))\n end\n rescue Zip::ZipError => e\n raise Invalid, e.message\n end\n end\n\n @metadata = CFPropertyList.native_types(CFPropertyList::List.new(:file => Cache.path_to(plist)).value)\n end\n\n @metadata\n end", "def top_tracks(artist)\n get(:standard, {:method => \"artist.getTopTracks\", :artist => artist})\n end", "def response_metadata=(_); end", "def metadata_url\n \"#{service_url}/$metadata\"\n end", "def metadata_url\n \"#{service_url}/$metadata\"\n end", "def load_streaming_history(session)\n \n file = File.read(attachment.file.file)\n streaming_history = JSON.parse(file)\n \n streaming_history.each do |hash|\n \n track_name = hash[\"trackName\"]\n artist_name = hash[\"artistName\"]\n \n # NOTE: Below request to Spotify may need to be re-run several times for it to be successful\n query_tracks = RSpotify::Track.search(track_name)\n query_track = query_tracks.select do |t|\n t.artists.any?{|a| a.name == artist_name}\n end\n\n # Query artists in addition to tracks (WARNING: SLOW)\n # query_tracks = RSpotify::Track.search(track_name)\n # query_artists = RSpotify::Artist.search(artist_name)\n # query_artist_names = query_artists.map{|a| a.name}\n \n # query_track = query_tracks.select do |t|\n # t.artists.any? do |a|\n # query_artist_names.include?(a.name)\n # end\n # end\n \n track = query_track.first\n\n if track.nil?\n self.missing_songs += 1\n save\n next\n end\n\n # Convert information from simple to full\n # track.complete!\n # track.album.complete!\n # track.artists.each{|a| a.complete!}\n\n song = Song.find_by(spotify_id: track.id)\n \n if !song.present?\n \n artist_ids = []\n track.artists.each do |a|\n artist = Artist.find_or_create_by(spotify_id: a.id)\n artist.update(name: a.name)\n artist_ids << artist.id\n end\n \n genre_ids = []\n track.artists.each do |a|\n a.genres.each do |g|\n genre = Genre.find_or_create_by(name: g)\n genre_ids << genre.id unless genre_ids.include?(genre.id)\n end\n end\n \n album = Album.find_or_create_by(spotify_id: track.album.id)\n album.update(title: track.album.name, image_url: track.album.images[1][\"url\"]) # Numbers correspond to image size (0 -> 640x640, 1 -> 300x300, 2 -> 64x64)\n \n song = album.songs.create(spotify_id: track.id, title: track.name, preview_url: track.preview_url, external_url: track.external_urls[\"spotify\"], artist_ids: artist_ids, genre_ids: genre_ids)\n \n end\n \n self.song_reports.create(song_id: song.id, end_time: hash[\"endTime\"].to_datetime, ms_played: hash[\"msPlayed\"])\n \n end\n \n end", "def metadata\n value_of(:metadata, JSON.method(:pretty_generate), '{}')\n end", "def get_similar( params )\n xml = LastFM.get( \"track.getSimilar\", params )\n xml.find('similartracks/track').map do |track|\n LastFM::Track.from_xml( track )\n end\n end", "def inspect\n \"#{@artist}: #{@track} (#{@album})\"\n end", "def images artist\n url = \"http://developer.echonest.com/api/v4/artist/images?api_key=#{ECHONEST_API_KEY}&name=#{artist}&format=json&results=#{RESULTS}&start=0&license=unknown\"\n result = parseURL url\n result[\"response\"][\"images\"]\nend", "def metadata\n parse_tarball_metadata do |parsing_errors, metadata|\n if parsing_errors.any?\n Metadata.new\n else\n metadata\n end\n end\n end", "def fetch_metadata(**options)\n request = Protocol::TopicMetadataRequest.new(**options)\n\n @connection.send_request(request)\n end", "def play(track, token)\n body = {\"uris\": [track[:id]]}\n headers = {\n \"Accept\": \"application/json\",\n \"Authorization\": \"Bearer #{token}\"\n }\n putResponse = HTTParty.put(\"https://api.spotify.com/v1/me/player/play\", :body => body.to_json, :headers => headers)\n\n # 204 indicates a success\n if putResponse.code == 204\n return \"Playing #{track[:name]} by #{track[:artist]}\"\n else\n return \"Error!\"\n end\nend", "def get_tracks!\n tracks = RSpotify::Track.search(self.keyword, limit: 50, offset: 0).sort_by(&:popularity)\n\n tracks.delete_if { |t| t.popularity < 45 }\n tracks = tracks.uniq { |t| t.artists.first.name } \n\n raise 'The playlist could not be generated' if tracks.size < 4\n return tracks.reverse\n end", "def metadata_by_id(file_id)\n if file_id.is_a? String\n client = google_api_client\n metadata = client.get_file(\n file_id,\n fields: 'id, name, thumbnailLink, webContentLink, webViewLink, trashed'\n )\n validate_metadata(metadata)\n metadata\n end\n end", "def biography artist\n url = \"http://developer.echonest.com/api/v4/artist/biographies?api_key=#{ECHONEST_API_KEY}&name=#{artist}&format=json\"\nend", "def stats\n @stats = time_data Track.all\n @cloud = word_cloud Track.pluck(:artist), split: false, limit: 60\n\n respond_to do |format|\n format.html # stats.html.erb\n format.json { render json: time_data(Track.all, :hash), callback: params[:callback] }\n format.xml { render xml: time_data(Track.all, :hash) }\n end\n end", "def parse_json(json)\n case json[\"type\"]\n when \"track\"\n {\n title: json[\"name\"],\n artist: json[\"artists\"].map { |i| i[\"name\"] }.join(\", \"),\n album: json[\"album\"][\"name\"],\n thumbnail: json[\"album\"][\"images\"].sort_by { |i| i[\"height\"] }.last[\"url\"],\n length: json[\"duration_ms\"].to_i / 1000.0,\n preview_url: json[\"preview_url\"],\n }\n when \"album\"\n {\n\n }\n end\nend", "def info(options={})\n get(:standard, {:method => \"album.getInfo\"}.merge(options))\n end", "def get_metadata(url, timeout = 5)\n stripped_url = url.strip\n\n # If there is an error, we just return nil\n begin\n if doc = Nokogiri::HTML(open(stripped_url, {\"User-Agent\" => DEFAULT_USER_AGENT, :read_timeout => 10}))\n metadata = {}\n metadata[:url] = stripped_url\n metadata[:title] = title(doc)\n metadata[:description] = description(doc)\n metadata[:canonical_url] = canonical_url(doc)\n metadata[:language] = language(doc)\n metadata\n end\n rescue\n nil\n end\n end", "def object_definition_metadata\n version = self.api_version.nil? ? 'latest' : self.api_version\n response = ap_client(version).metadata.fetch\n\n Rails.logger.info \"response: \" + response.inspect\n parsed_json = []\n case response.status\n when 200\n begin\n parsed_json = ActiveSupport::JSON.decode(response.body)\n rescue MultiJson::DecodeError\n raise \"Unable to decode the JSON message.\"\n end\n else\n raise \"Unable to get a response.\"\n end\n\n parsed_json\n end", "def metadata\n metadata = {}\n @file.data.each { |key, value| metadata[key.to_sym] = value }\n\n metadata[:type] = @file.class.name.split('::')[1].downcase\n metadata[:url] = @file.url\n\n metadata[:slug] = slug\n\n metadata[:posted_at] = @file.date.to_time.to_i if @file.respond_to? :date\n metadata[:tags] = tags\n\n metadata\n end", "def metadata; end", "def metadata; end", "def metadata; end", "def metadata; end" ]
[ "0.68926036", "0.6781096", "0.6730962", "0.6594536", "0.65565485", "0.65055186", "0.649459", "0.64723015", "0.6459251", "0.643704", "0.6416832", "0.63851583", "0.6374228", "0.6330928", "0.6301184", "0.6259826", "0.6255222", "0.6243315", "0.61468714", "0.61323196", "0.61313915", "0.61270523", "0.60606676", "0.60601145", "0.60601145", "0.60473555", "0.60218817", "0.6003274", "0.5971732", "0.5971103", "0.5877299", "0.5874142", "0.5872673", "0.587086", "0.5868045", "0.5841148", "0.5840673", "0.58322096", "0.58231133", "0.5818723", "0.58108073", "0.5805689", "0.58037907", "0.5796284", "0.57883185", "0.57855827", "0.57844687", "0.5783813", "0.5783813", "0.57822275", "0.577681", "0.5772366", "0.5761921", "0.57616764", "0.5760189", "0.575542", "0.5754408", "0.57499087", "0.57485914", "0.574632", "0.5746281", "0.573555", "0.57296497", "0.57286114", "0.572615", "0.57182634", "0.5716078", "0.5712161", "0.5712161", "0.5709672", "0.5704997", "0.57043755", "0.5698821", "0.56988156", "0.56853694", "0.5673513", "0.5671417", "0.56532395", "0.56532395", "0.5650972", "0.56448865", "0.5641261", "0.56277925", "0.56274736", "0.56176156", "0.5608676", "0.56016093", "0.5598798", "0.5576703", "0.55735594", "0.5573393", "0.55730766", "0.5562088", "0.55543077", "0.55500734", "0.5547035", "0.5542938", "0.5542938", "0.5542938", "0.5542938" ]
0.64166534
11
Finds the song on youtube It does this by searching in format "Song Name Artist" on youtube Then it parses the XML data with nokogiri and picks the first video
def youtube download = HTTParty.get("https://gdata.youtube.com/feeds/api/videos?q=#{URI.escape(@track)}") if !download.body.empty? doc = Nokogiri::HTML(download.body) vids = doc.xpath('//link[contains(@href, "https://www.youtube.com/watch")]').to_a video = vids.first # Extracting the Video-ID if video query_string = URI.parse(video['href']).query Hash[URI.decode_www_form(query_string)] else "Can't find a decent YouTube mirror." end else flash[:notice] = 'Error with Youtube! Try again in 30 seconds!' end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_id\n url = \"https://www.youtube.com/results?search_query=#{@artist_name.gsub(\" \",\"+\")}+#{@song_name.gsub(\" \",\"+\")}\"\n#pp url\n web_page = HTTParty.get(url)\n\n @parsed = Nokogiri::HTML(web_page)\n ref = @parsed.css(\"a\").collect{|link| link[\"href\"]}.select{|href| href && href.include?(\"/watch?v=\")}.uniq[0]\n return ref.gsub(\"/watch?v=\", \"\") unless !ref\n end", "def search\n http, request = frame_request @keyword\n\n # Getting response from YouTube\n response = http.request(request)\n\n # Parsing for serialization of the response\n json_response = JSON.parse(response.body)\n\n # Any mishaps inside the code-block will return an empty array.\n begin\n json_response['feed']['entry'].map{ |rsp| \n rsp['media$group']['media$content'].first.send(:[],'url')\n }\n rescue\n []\n end\n end", "def get_video_title(youtube_url)\n doc = Hpricot(open(youtube_url))\n (doc/\"title\").each do |title|\n return $1 if title.inner_text =~ %r{YouTube - (.+)}\n end\nend", "def video_xml\n xml = Builder::XmlMarkup.new(:indent => 2)\n xml.instruct! :xml, :version => '1.0', :encoding => nil\n xml.entry :xmlns => 'http://www.w3.org/2005/Atom',\n 'xmlns:media' => 'http://search.yahoo.com/mrss/',\n 'xmlns:yt' => 'http://gdata.youtube.com/schemas/2007' do\n xml.media :group do\n xml.media :title, @opts[:title], :type => 'plain'\n xml.media :description, @opts[:description], :type => 'plain'\n xml.media :category, @opts[:category], :scheme => 'http://gdata.youtube.com/schemas/2007/categories.cat'\n @opts[:developer_tags].each do |developer_tag|\n xml.media :category, developer_tag, :scheme => 'http://gdata.youtube.com/schemas/2007/developertags.cat'\n end\n xml.tag! 'media:keywords', @opts[:keywords].join(\",\")\n end\n end\n xml.target!\n end", "def video_xml\n xml = Builder::XmlMarkup.new(:indent => 2)\n xml.instruct! :xml, :version => '1.0', :encoding => nil\n xml.entry :xmlns => 'http://www.w3.org/2005/Atom',\n 'xmlns:media' => 'http://search.yahoo.com/mrss/',\n 'xmlns:yt' => 'http://gdata.youtube.com/schemas/2007' do\n xml.media :group do\n xml.media :title, @opts[:title], :type => 'plain'\n xml.media :description, @opts[:description], :type => 'plain'\n xml.media :category, @opts[:category], :scheme => 'http://gdata.youtube.com/schemas/2007/categories.cat'\n @opts[:developer_tags].each do |developer_tag|\n xml.media :category, developer_tag, :scheme => 'http://gdata.youtube.com/schemas/2007/developertags.cat'\n end\n xml.tag! 'media:keywords', @opts[:keywords].join(\",\")\n end\n end\n xml.target!\n end", "def show\n @song = Song.find(params[:id])\n @yt_id = YoutubeSearch.search(\"#{@song.name} #{@song.artist.name}\").first['video_id']\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @song }\n end\n end", "def parse_song(song_url)\r\n\t\t\thtml_doc = get_html_doc(song_url)\r\n\t\t\tsong_info = html_doc.css('table table').css('tr td').map{ |data| data.text.split(':')[1].sub!(\" \", \"\") }\r\n\t\t\tsong_url = get_128kbps_mp3_url(html_doc)\r\n\t\t\tSong.parse(song_info, song_url)\r\n\t\tend", "def video_by(vid)\n video_id = vid =~ /^http/ ? vid : \"http://gdata.youtube.com/feeds/api/videos/#{vid}\"\n parser = YouTubeG::Parser::VideoFeedParser.new(video_id)\n parser.parse \n end", "def video_by(video)\n vid = nil\n vid_regex = /(?:youtube.com|youtu.be).*(?:\\/|v=)([\\w-]+)/\n if video =~ vid_regex\n vid = $1\n else\n vid = video\n end\n video_id =\"http://gdata.youtube.com/feeds/api/videos/#{vid}?v=2#{@dev_key ? '&key='+@dev_key : ''}\"\n parser = YouTubeIt::Parser::VideoFeedParser.new(video_id)\n parser.parse\n end", "def parse_youtube(url)\n source = `#{curl_path} \"http://kej.tw/flvretriever/\" -d \"videoUrl=#{url}\" -A \"foo\"`\n raise \"Cannot parse youtube URL\" unless(source =~ /<textarea id=\"outputfield\">([^<]+)<\\/textarea>/)\n $1\n end", "def parse_video_id_for_youtube\n parse_video_id_for_regexp_and_index(YOUTUBE_REGEXP, 6)\n end", "def find_artist(your_genre)\n if your_genre ==\"rnb2\"\n rnb = [%{<iframe width=\"800\" height=\"508\" src=\"https://www.youtube.com/embed/IPfJnp1guPc\" frameborder=\"0\" allowfullscreen></iframe>},%{<iframe width=\"800\" height=\"508\" src=\"https://www.youtube.com/embed/nS_V9iXBtxw\" frameborder=\"0\" allowfullscreen></iframe>}]\n return rnb.sample\n elsif your_genre ==\"pop2\"\n pop = [%{<iframe width=\"800\" height=\"508\" src=\"https://www.youtube.com/embed/1nydxbGhgv8\" frameborder=\"0\" allowfullscreen></iframe>},%{<iframe width=\"560\" height=\"315\" src=\"https://www.youtube.com/embed/fuyVJYP7GJk\" frameborder=\"0\" allowfullscreen></iframe>},%{<iframe width=\"560\" height=\"315\" src=\"https://www.youtube.com/embed/tCX2axvbE4o\" frameborder=\"0\" allowfullscreen></iframe>}]\n return pop.sample\n elsif your_genre ==\"hiphop2\"\n hiphop = [%{<iframe width=\"800\" height=\"508\" src=\"https://www.youtube.com/embed/3j8ecF8Wt4E\" frameborder=\"0\" allowfullscreen></iframe>},%{<iframe width=\"800\" height=\"508\" src=\"https://www.youtube.com/embed/MZZkURXq5Y4\" frameborder=\"0\" allowfullscreen></iframe>},%{<iframe width=\"560\" height=\"315\" src=\"https://www.youtube.com/embed/rfgbzlEYhIM\" frameborder=\"0\" allowfullscreen></iframe>}]\n return hiphop.sample\n end\nend", "def index\n if !params[:artist_name].empty?\n lastfm = Lastfm.new(ENV[\"LASTFM_KEY\"], ENV[\"LASTFM_SECRET\"])\n @videos = Yt::Collections::Videos.new\n artistspotify = RSpotify::Artist.search(params[:artist_name])\n artistlastfm = lastfm.artist.get_info(artist: params[:artist_name], autocorrect: 1)\n \n @artistbio = artistlastfm[\"bio\"][\"summary\"]\n puts artistspotify\n @artist = artistspotify.first\n @tracks = @artist.top_tracks(:US)\n # <% @videoId = @videos.where(q: \"music video for #{@artist.name} #{track.name}\", order: 'relevance').first.id %>\n # @videoId = @videos.where(q: params[:artist_name], safe_search: 'none').first.id\n else\n redirect_to root_path\n end \n end", "def youtube_code\n code = self.link.match(/v=(.*)/)\n code.captures.first\n end", "def youtube_id\n\t\tif youtube?\n\t\t\treturn path[\"stoffi:track:youtube:\".length .. -1]\n\t\telse\n\t\t\treturn \"\"\n\t\tend\n\tend", "def get_video_urls(feed_url)\n Youtube.notify \"Retrieving videos...\"\n urls_titles = {}\n result_feed = Nokogiri::XML(open(feed_url))\n urls_titles.merge!(grab_urls_and_titles(result_feed))\n\n #as long as the feed has a next link we follow it and add the resulting video urls\n loop do\n next_link = result_feed.search(\"//feed/link[@rel='next']\").first\n break if next_link.nil?\n result_feed = Nokogiri::HTML(open(next_link[\"href\"]))\n urls_titles.merge!(grab_urls_and_titles(result_feed))\n end\n\n filter_urls(urls_titles)\n end", "def video\n YouTubeApi.find_video(youtube_id)\n end", "def youtube_tracks_from_top_tracks(top_tracks)\n top_tracks.each do |top_track|\n title = top_track[\"name\"]\n\n top_track_url = top_track[\"url\"]\n html = self.class.get(top_track_url)\n youtube_urls = html.scan(/embed src=\\\"http:\\/\\/www.youtube.com\\/v\\/([-\\w\\.]+)?/)\n\n if youtube_urls.count > 0\n track_youtube_id = youtube_urls.first.first\n song = Song.new(@artist, title, track_youtube_id)\n @tracks.push(song)\n end\n end\n end", "def scrape\n @videos = []\n\n @search_result.search(\"//div[@class='vEntry']\").each do |video_html|\n video = Youtube::Video.new\n video.id = scrape_id(video_html)\n video.author = scrape_author(video_html)\n video.title = scrape_title(video_html)\n video.length_seconds = scrape_length_seconds(video_html)\n video.rating_avg = scrape_rating_avg(video_html)\n video.rating_count = scrape_rating_count(video_html)\n video.description = scrape_description(video_html)\n video.view_count = scrape_view_count(video_html)\n video.thumbnail_url = scrape_thumbnail_url(video_html)\n video.tags = scrape_tags(video_html)\n video.upload_time = scrape_upload_time(video_html)\n video.url = scrape_url(video_html)\n\n check_video video\n\n @videos << video\n end\n\n @video_count = scrape_video_count\n @video_from = scrape_video_from\n @video_to = scrape_video_to\n\n raise \"scraping error\" if (is_no_result != @videos.empty?)\n\n @videos\n end", "def youtube_url\n return \"https://www.youtube.com/results?search_query=#{CGI.escape(self.name)}\"\n end", "def getYahooMusicData altnet_name\n url = getYahooMusicDataUrl(altnet_name)\n puts url\n \n begin\n html = open(url, \"User-Agent\" => getUseragent(), :proxy=>getProxy()) \n document = Hpricot(html)\n ar = document.search(\"//ul[@id='artistPgSimArtists']\");\n return ar\n rescue Exception => e\n return \"\"\n end \n end", "def youtube_id\n rexp = /v=(.*)/ or /embed\\/(.*)/\n rexp.match(url).captures.first\n end", "def searchSpotify(inArtist)\n tArtist = CGI.escape(inArtist)\n \n outstring = open('http://ws.spotify.com/search/1/artist?q='+tArtist, 'User-Agent' => 'Ruby-Wget').read\n\n outdata = outstring.split(\"<opensearch:totalResults>\")\n\n outdata.delete_at(0);\n\n if outdata[0].split(\"</opensearch:totalResults>\")[0].to_i > 0\n outinfo = outstring.split(\"<artist href=\");\n return outinfo[1].split(\">\")[0];\n else\n return 0\n end\n end", "def find_video_for_track(track)\n query = track[:name] + ' - ' + track[:artist]\n results = @yt_client.videos_by({query: query, video_format: 5, categories: {either: [:music, :entertainment]}})\n results = beat_match_rank(track, results.videos)\n results[0]\n end", "def find_song(artist, url_slug)\n\n\t\tartist.songs.uniq.each do |song|\n\t\t\tif song.song_url_slug == url_slug\n\t\t\t\t@song = song\n\n\t\t\telse\n\n\t\t\tend\n\t\tend\n\tend", "def show_youtube\n @youtube = YoutubeMovie.find(params[:id])\n s = @youtube.url.index(\"?v=\")\n e = @youtube.url.index(\"&\")\n @id = @youtube.url[s+3..e-1] rescue 1\n end", "def index \n @pos = 0\n if params[:name] != nil && (params[:name][0..3] == \"http\" || params[:name][0..2] == \"www\")\n client = YouTubeIt::Client.new(:dev_key => \"AI39si4IGrrB9qyNNgKqtW7YKqTSxpG54pBcyVZ8wQANOPcKeDVgGDorcP9DkrxFcPsI_gW3pJ0T2oAFB3stXtaWG_hbAsdNfA\")\n playlist_id = params[:name].split('=').last\n new_playlist = Playlist.new\n new_playlist.update_attributes(:name => client.playlist(playlist_id).title, :position => 1)\n new_playlist.save\n @isprelist = true\n @agent ||= init_agent\n page = @agent.get(params[:name])\n len = page.at(\"#watch7-playlist-length\").text.to_i rescue nil\n if len == nil\n len = page.at(\".first .stat-value\").text.to_i\n end\n startindex = 1\n maxresults = 0\n while len - startindex > 0\n if len - startindex > 50\n maxresults = 50\n else\n maxresults = len - maxresults\n end\n playlist_vids = client.playlist(playlist_id, {'start-index' => startindex, 'max-results' => maxresults}).videos\n for vid in playlist_vids\n new_vid = new_playlist.videos.build\n title = vid.title\n norm_url = \"www.youtube.com/watch?v=\" + vid.media_content[0].url.split('?')[0].split('/').last\n url = vid.media_content[0].url rescue nil\n thumb_url = vid.thumbnails[0].url\n width = 560.to_s\n height = 315.to_s\n embed = '<iframe width=' + width + ' height=' + height + ' src=' + url + ' frameborder=\"0\" allowfullscreen></iframe>'\n new_vid.copy_vids(title, norm_url, thumb_url, \"Youtube\", embed)\n startindex += 1\n end\n end\n @playlists = new_playlist\n else\n current_user.update_attribute(:last_psearch, params[:name])\n @isprelist = false\n @playlists = Playlist.order(:name).where(\"name like ?\", \"%#{current_user.last_psearch}%\")\n @playlists = @playlists.order(\"cached_votes_up DESC\")\n @playlists = @playlists.where(\"privacy = 'Public'\")\n @playlists = @playlists.paginate(:page => params[:page], :per_page => 2)\n end\n respond_to do |format|\n format.html\n format.js\n end\n end", "def video_doc(xml)\n Nokogiri::XML.parse(\"<root xmlns:video='#{SitemapGenerator::SCHEMAS['video']}'>#{xml}</root>\")\n end", "def youtube_movie_embed\n str1 = \"http://www.youtube.com/v/\" \n str2 = movie_home_url.split('v=',2).last unless movie_home_url.nil?\n str3 = \"&hl=en_US&fs=1&\"\n return [str1,str2,str3].join \n end", "def metadata\n puts \"Adding Metadata...\"\n doc = Nokogiri::HTML(open(\"http://www.last.fm/search?q=#{query}&type=track\"))\n url = doc.css(\"span.chartlist-ellipsis-wrap\").first.css(\"a.link-block-target\").first.attr('href')\n ch = url.gsub('/music/', \"\")\n artist, title = ch.split(\"/_/\")\n artist = artist.gsub('+', \" \")\n title = title.gsub('+', \" \")\n doc = Nokogiri::HTML(open(\"http://www.last.fm#{url}\"))\n album = doc.css(\"h3 a\").first\n begin\n Mp3Info.open(\"#{query.gsub(\"+\",\"-\")}.mp3\") do |mp3|\n mp3.tag.title = \"#{URI.unescape(title)}\".strip\n mp3.tag.artist = \"#{URI.unescape(artist)}\".strip\n mp3.tag.album = \"#{URI.unescape(album.content)}\".strip\n end\n puts \"Done\"\n rescue\n puts \"Fail\"\n end\n end", "def youtube_embed(youtube_url)\n\t # Regex from # http://stackoverflow.com/questions/3452546/javascript-regex-how-to-get-youtube-video-id-from-url/4811367#4811367\n\t youtube_url.to_s[/^.*((v\\/)|(embed\\/)|(watch\\?))\\??v?=?([^\\&\\?]*).*/]\n\t youtube_id = $5\n\t youtube_id\n\tend", "def call\n video = search_youtube\n if video.present?\n context.playlist.add_video!(\n title: video.title,\n url: \"https://www.youtube.com/watch?v=#{video.id}\",\n user: context.user\n )\n context.dj.new_video_added!\n context.message = \"Success! #{video.title} was added to the playlist.\"\n else\n context.errors = \"Sorry but couldn't find any vides for #{query}.\"\n context.fail!\n end\n end", "def youtube_video_id\n video_id = link.match(/\\?v=/) ? link.split('?v=')[1] : link.split('/').last\n video_id = video_id.split('&')[0] if video_id =~ /&/\n self.vid = video_id\n end", "def extractYouTubeID(url)\n YoutubeVideoId.extract(url)\n end", "def youtube_embed(youtube_url)\n if youtube_url[/youtu\\.be\\/([^\\?]*)/]\n youtube_id = $1\n else\n # Regex from # http://stackoverflow.com/questions/3452546/javascript-regex-how-to-get-youtube-video-id-from-url/4811367#4811367\n youtube_url[/^.*((v\\/)|(embed\\/)|(watch\\?))\\??v?=?([^\\&\\?]*).*/]\n youtube_id = $5\n end\n end", "def song\n fetch('cowboy_bebop.song')\n end", "def search_video(aTitle)\n result = catalogue.find { |video| video.title.strip == aTitle.strip }\n if result.nil?\n msg = \"Video with title '#{aTitle}' isn't in the catalogue.\"\n $stderr.puts msg\n end\n\n return result\n end", "def now_playing\n doc = request(@user, \"nowplaying\")\n station = []\n doc.xpath('//rss/channel/item').each do |node|\n station << { :title => node.xpath('title').text.strip,\n :link => node.xpath('link').text.strip,\n :description => node.xpath('description').text.strip,\n :date => node.xpath('pubDate').text.strip,\n :artwork => node.xpath('pandora:stationAlbumArtImageUrl').text.strip,\n :songSeed_song => node.xpath('pandora:seeds/pandora:songSeed/pandora:song').text.strip,\n :songSeed_artist => node.xpath('pandora:seeds/pandora:songSeed/pandora:artist').text.strip,\n :composerSeed => node.xpath('pandora:seeds/pandora:composerSeed/pandora:composer').text.strip,\n :artistSeed => node.xpath('pandora:seeds/pandora:artistSeed/pandora:artist').text.strip }\n end\n station\n end", "def search(artist, song)\n RSpotify.authenticate(ENV['SPOTIFY_CLIENT_ID'], ENV['SPOTIFY_CLIENT_SECRET'])\n result = RSpotify::Track.search(\"#{song} #{artist}\", limit: 1, market: 'US').first\n if !!result\n format_result(result)\n else\n nil\n end\n end", "def get_youtube_video_id(url)\n # find id\n result = url.match /https*\\:\\/\\/.*youtube\\.com\\/watch\\?v=(.*)/\n # return id or nil\n result ? result[1] : nil\n end", "def where(data={})\n if data[:id]\n return self.find(data[:id])\n\n elsif (artists = (data[:artist] || data[:artists]))\n artists = [artists] unless artists.kind_of? Array\n artists.collect!{|a| \"\\\"#{a}\\\"\" }\n songs = Request.get('songs/byartists', :artists => artists.join(','))\n\n elsif data[:pattern]\n songs = Request.get('songs', :pattern => data[:pattern])\n end\n\n songs = songs.collect {|s| self.new(s) }\n songs.select! {|s| s.title =~ data[:title] } if data[:title]\n songs\n end", "def parse_youtube_id(url)\n url =~ /[v]=([^&]*)/\n id = $1\n \n if id.nil?\n # when there is no match for v=blah, then maybe they just \n # provided us with the ID the way the system used to work... \n # just \"E4Fbk52Mk1w\"\n return url \n else\n # else we got a match for an id and we can return that ID...\n return id\n end\n end", "def extract_video_page_urls(webpage,options)\r\n puts \"Extracting data from html5 data\"\r\n webpage.css('li.regularitem').each do |post|\r\n link = post.css('h4.itemtitle').css('a').first\r\n description = post.css('div.itemcontent').first.text\r\n download_episode(link.child.text,link['href'],description, options)\r\n end\r\nend", "def youtube_decscription\n regex_string = WebStat::Configure.get[\"id_extraction_regexs\"][\"youtube\"]\n if @url.match(regex_string)\n id = @url.gsub(%r{#{regex_string}}, '\\1')\n youtube = Google::Apis::YoutubeV3::YouTubeService.new\n youtube.key = WebStat::Configure.get[\"api_keys\"][\"youtube\"]\n response = youtube.list_videos(:snippet, id: id)\n response.items.first.snippet.description\n end\n end", "def first_movie\n doc.search(\"a[title='Первый фильм']\").text\n end", "def load_youtube\n Video\n YouTube\n end", "def load_youtube\n Video\n YouTube\n end", "def search( params )\n xml = LastFM.get( \"track.search\", params )\n xml.find('results/trackmatches/track').map do |track|\n LastFM::Track.from_xml( track )\n end\n end", "def similar artist\n url = \"http://developer.echonest.com/api/v4/artist/similar?api_key=#{ECHONEST_API_KEY}&bucket=years_active&name=#{artist}&format=json&start=0&results=#{RESULTS}\"\n parseURL url\n #also get their hotness?\nend", "def find_song_by_title(songs, title)\n songs.find do |song|\n song[:title] == title\n end\n end", "def youtube?\n\t\treturn path && path.starts_with?(\"stoffi:track:youtube:\")\n\tend", "def video_by_status_url(status_url)\n parser = YouTubeG::Parser::VideoFeedParser.new(status_url)\n video = parser.parse \n return video \n end", "def next_song(set_as_playing = false)\n @mode = :user\n \n play_id, filename, title, song_id = nil\n begin\n # this query will get a song which was cut off while playing first, and failing that, will get the next song on the queue which hasn't been played\n play_id, filename, title, song_id = db.get_first_row \"select id, filename, title, song_id from rubedo_plays where queued_at IS NOT NULL order by queued_at asc limit 1\"\n return nil unless play_id\n rescue\n log.error \"Error at some point during finding #{'(and setting) ' if set_as_playing}next song. Filename was: #{filename}\\n#{$!}\" if log\n return nil\n end\n\n mark_start!(play_id, song_id) if set_as_playing\n \n [play_id, filename, title]\n end", "def youtube_embed(youtube_url)\r\n\tif youtube_url[/youtu\\.be\\/([^\\?*)/]\r\n\t\tyoutube_id = $1\r\n\telse \r\n\t# Regex from # http://stackoverflow.com/questions/3452546/javascript-regex-how-to-get-youtube-video-id-from-url/4811367#4811367\r\n\t\tyoutube_url[/^.*((v\\/)|(embed\\/)|(watch\\?))\\??v?=?([^\\&\\?]*).*/]\r\n\t\tyoutube_id = $5 \r\n\tend\r\n\t#Look into a different method for this \r\n\t%Q{<iframe title=\"YouTube video player\" width=\"600px\" height=450px\" src=\"http://www.youtube.com/embed/#{ youtube_id }\" frameborder=\"0\" allowfullscreen></iframe>}\r\n end", "def get_youtube_video(id)\n uri = URI(\"https://noembed.com/embed?url=https://www.youtube.com/watch?v=#{id}\")\n begin\n resp_body = Net::HTTP.get(uri)\n title = JSON.parse(resp_body)['title']\n rescue\n title = \"Title wasn't found\"\n end\n # render html for youtube video embed\n \"<div class='video-title'>#{title}</div><iframe width='420' frameborder='0' height='315'\"+\n \" src='https://www.youtube-nocookie.com/embed/#{id.to_s}' allowfullscreen></iframe>\"\n end", "def item_find\n find(playlist_url(@response.old_index))\n end", "def download_all_videos_from_pl id,d_name\n\tmy_directory = \"#{@dir}#{d_name}\"\n\n\t videos_already_saved_array = get_all_titles_from_dir my_directory\n\n\t videos_already_saved_titles, videos_already_saved_paths = \n\t \t\t\t\t\tvideos_already_saved_array.map{|e| e[0]}, videos_already_saved_array.map{|e| e[2]}\n\n\t@current_playlist_video_titles.each do |v|\n\t\t\tsource = 'youtube'\n\t\t\tindex = @current_playlist_video_titles.index(v)\n\t\t\tp index\n\t\t\tvid = @current_playlist_video_ids[index]\n\t\t\tp vid\n\n\t\tif !videos_already_saved_titles.include?(v)\t\n\t\t\t\n\t\t\tvideo_string = \"http://www.youtube.com/watch?v=#{vid}\"\n\t\t\tdownload_video = \"viddl-rb #{video_string} -d 'aria2c' -s '#{my_directory}'\"\n\n\t\t\tcaptured_stdout = ''\n\t\t\tcaptured_stderr = ''\n\t\t\tstdin, stdout, stderr, wait_thr = Open3.popen3(\"#{download_video}\")\n\t\t\tpid = wait_thr.pid\n\t\t\tstdin.close\n\t\t\tcaptured_stdout = stdout.gets(nil)\n\t\t\taborted = captured_stdout.include? \"Download aborted\"\n \t\t\t# captured_stderr = stderr.read\n\t\t\twait_thr.value # Process::Status object returned\n\n\t# extract the info we need\n\t\t\tputs \"STDOUT: \" + captured_stdout\n\t\t\t# puts \"STDERR: \" + captured_stderr\n\n\t\t\t# go to Vimeo to download if it doesnt work\n\t\t\tif aborted\n\t\t\t\tartist_match_results = match_best v, @rule_artists\n\t\t\t\t@song_artist = artist_match_results[0]\n\t\t\t\tremaining_words = artist_match_results[1]\n\t\t\t\tsong_match_results = match_best remaining_words, @rule_titles\n\t\t\t\t@song_title = song_match_results[0]\n\n\t\t\t\tsource='vimeo'\n\t\t\t\tget_vimeo_manually @song_artist,@song_title,@mydir,\"vimeo\"\n\t\t\t\t# Process.kill(\"KILL\", stream.pid)\n\t\t\t\t# get_vimeo_manually v,my_directory,source \n\t\t\tend\n\n\t\t\tp \"already have it\" if videos_already_saved_titles.include?(v)\n\t\tend\n\n\n\tend\n\n\t\nend", "def video_source(str)\n if str.include? 'www.youtube.com'\n return youtube_embed(str)\n elsif str.include? 'youtu.be'\n return youtube_embed(str)\n else\n return nil\n end\n end", "def youtube_video_id\t\t\n\t\tif self.video_url.nil?\n\t\t\tnil\n\t\telse\n\t\t\tself.video_url.rpartition('/').last\n\t\tend\n\n\tend", "def youtube\n @data['social']['youtube']\n end", "def getEpisodeToPlay(client)\n\n\t\tbegin\n\t\t\tclient.click(\"NATIVE\", \"xpath=//*[@contentDescription='上へ移動']\", 0, 1)\n\t\t\tclient.sleep(2000)\n\t\t\tclient.click(\"NATIVE\", \"xpath=//*[@text='海外ドラマ' and @id='textView']\", 0, 1)\n\t\t\tclient.sleep(2000)\n\t\t\tif client.isElementFound(\"NATIVE\", \"xpath=//*[@text='見放題で楽しめる厳選良作!海外ドラマ編']\")\n\t\t\t\tclient.click(\"NATIVE\", \"xpath=(//*[@id='recyclerView' and ./preceding-sibling::*[./*[@text='見放題で楽しめる厳選良作!海外ドラマ編']]]/*/*/*[@id='imageView' and ./parent::*[@id='maskLayout']])[1]\", 0, 1)\n\t\t\t\tclient.sleep(2000)\n\t\t\telse\n\t\t\t\tclient.click(\"NATIVE\", \"xpath=//*[@id='searchButton']\", 0, 1)\n\t\t\t\tclient.sleep(2000)\n\t\t\t\tclient.click(\"NATIVE\", \"text=国内ドラマ一覧\", 0, 1)\n\t\t\t\tclient.sleep(2000)\n\t\t\t\tclient.click(\"NATIVE\", \"text=すべての作品\", 0, 1)\n\t\t\t\tclient.sleep(2000)\n\t\t\t\tclient.click(\"NATIVE\", \"text=見放題\", 0, 1)\n\t\t\t\tclient.sleep(3000)\n\t\t\t\tclient.click(\"NATIVE\", \"xpath=(//*[@id='recycler_view']/*/*/*[@id='thumbnail'])\", 0, 1)\n\t\t\t\tclient.sleep(2000)\n\t\t\tend\n\t\t\tclient.swipe2(\"Down\", 250, 2000)\n\t\t\tclient.sleep(3000)\n\t\trescue Exception => e\n\t\t\t$errMsgEpsdp = \"::MSG:: Exception occurrred while finding ELEMENT \" + e.message\n\t\tend\t\t\n\t\tbegin\n\t\t\tif client.isElementFound(\"NATIVE\", \"xpath=//*[@text='エピソードを選択']\")\n\t\t\t\tclient.click(\"NATIVE\", \"text=エピソードを選択\", 0, 1)\n\t\t\t\tclient.sleep(2000)\n\t\t\t\tclient.click(\"NATIVE\", \"xpath=(//*[@id='recycler_view']/*/*/*/*[@id='download_indicator' and ./parent::*[@id='image_container']])[1]\", 0, 1)\n\t\t\t\tclient.sleep(10000)\n\t\t\t\tHistoryPlay.new.playbackCheck(client)\n\t\t\t\tHistoryPlay.new.leavingPlayer(client)\n\t\t\telse\n\t\t\t\tputs \"::MSG:: This contents does not have episode list!!!\"\n\t\t\tend\n\t\trescue Exception => e\n\t\t\t$errMsgEpsdp = \"::MSG:: Error occurred while episode playing..\" + e.message\n\t\t\t$obj_rtnrs.returnNG\n\t\tend\t\t\n\t\tclient.sleep(2000)\n\tend", "def youtube_embed(youtube_url)\n if youtube_url[/youtu\\.be\\/([^\\?]*)/]\n youtube_id = $1\n else\n # Regex from # http://stackoverflow.com/questions/3452546/javascript-regex-how-to-get-youtube-video-id-from-url/4811367#4811367\n youtube_url[/^.*((v\\/)|(embed\\/)|(watch\\?))\\??v?=?([^\\&\\?]*).*/]\n youtube_id = $5\n end\n\n %Q{<iframe id=\"player\" type=\"text/html\" width=\"640\" height=\"390\"\n src=\"//www.youtube.com/embed/#{youtube_id}?enablejsapi=1&origin=*\" frameborder=\"0\"></iframe>}\n end", "def getLastfmArtistPopularity altnet_name\n url = getLastfmArtistPopularityUrl(altnet_name)\n puts url \n begin\n html = open(url, \"User-Agent\" => getUseragent(), :proxy=>getProxy()) \n document = Hpricot(html)\n #ar = document.search(\"//div[@id='catalogueHead']\");\n ar = document.search(\"//div[@id='catalogueHead']\").search(\"//p[@class='stats']\"); \n return ar\n rescue Exception => e\n puts \"html grab error\"\n return \"\"\n end \n end", "def search_videos(search_string)\n url = \"http://www.blip.tv/search/?search=#{search_string}&skin=json\"\n request = open(url,{\"UserAgent\" => \"Ruby-Wget\"}).read\n json = JSON.parse(request[16...-3])\n parse_json_videos_list(json)\n end", "def getLastFmSimilarTrackData artist_name, album_name, track_name\n url = getLastFmSimilarTrackDataUrl(artist_name, album_name, track_name);\n proxy = getProxy();\n puts url+\"-[\"+proxy+\"]\";\n #url = \"http://www.google.com\" \n begin\n html = open(url, \"User-Agent\" => getUseragent(), :proxy=>proxy) \n document = Hpricot(html)\n #document = Nokogiri::HTML(html)\n #ar = document.css('div.skyWrap table.candyStriped') \n\n ar = document.search(\"//div[@class='skyWrap']\").search(\"//table[@class='candyStriped chart']\");\n #puts ar \n return ar\n rescue Exception => e\n return \"\"\n end \n end", "def youtube_embed(youtube_url)\n if youtube_url[/youtu\\.be\\/([^\\?]*)/]\n youtube_id = $1\n else\n # Regex from # http://stackoverflow.com/questions/3452546/javascript-regex-how-to-get-youtube-video-id-from-url/4811367#4811367\n youtube_url[/^.*((v\\/)|(embed\\/)|(watch\\?))\\??v?=?([^\\&\\?]*).*/]\n youtube_id = $5\n end\n %Q{<iframe title=\"YouTube video player\" width=\"640\" height=\"390\" src=\"http://www.youtube.com/embed/#{ youtube_id }\" frameborder=\"0\" allowfullscreen></iframe>}\n end", "def youtube_embed(youtube_url)\n if youtube_url[/youtu\\.be\\/([^\\?]*)/]\n youtube_id = $1\n else\n # Regex from # http://stackoverflow.com/questions/3452546/javascript-regex-how-to-get-youtube-video-id-from-url/4811367#4811367\n youtube_url[/^.*((v\\/)|(embed\\/)|(watch\\?))\\??v?=?([^\\&\\?]*).*/]\n youtube_id = $5\n end\n %Q{<iframe title=\"YouTube video player\" width=\"640\" height=\"390\" src=\"http://www.youtube.com/embed/#{ youtube_id }\" frameborder=\"0\" allowfullscreen></iframe>}\n end", "def search_songs(query)\n search('Songs', query)\n end", "def next_video\n if episode?\n episode = content.next_ep\n unless episode.nil?\n return episode.video\n end\n end\n end", "def main\n doc = Nokogiri::HTML(open(START_URI))\n dirs_to_search = doc.css('a').map{|x|x[:href]}.grep(%r{/SolaceSounds/SolaceSounds})\n\n streams_to_rip = []\n\n dirs_to_search.each do |dir|\n puts \"Searching #{HOST}#{dir}\"\n\n # don't overwhelm the server\n sleep 1\n\n subdir = Nokogiri::HTML(open(HOST + dir))\n streams_to_grab = subdir.css('a').map{|x|x[:href]}.grep(%r{\\.mp3})\n streams_to_rip.concat(streams_to_grab)\n end\n\n # we've found all of the mp3 streams, now we grab them\n streams_to_rip.each do |url|\n if stream_already_downloaded(url)\n puts \"skipping url: #{url}\"\n next\n end\n rip_stream(url)\n move_ripped_file(url)\n set_mp3_tags(url)\n end\nend", "def search_by_tags(tags,page,per_page)\n begin\n\t args = { 'dev_id'=>@api_key, \n\t 'tag' => tags,\n\t 'per_page' => per_page,\n 'page' => page }\n videos = xmlrpc_get_xml('youtube.videos.list_by_tag',args,'//video') \n rvideos = []\n for video in videos\n html = html_code(video.elements[\"id\"].text,\n video.elements[\"thumbnail_url\"].text ) \n v = { :title => video.elements[\"title\"].text,\n :thumb => video.elements[\"thumbnail_url\"].text,\n :thumb_width => 80,\n :thumb_height => 60,\n :creator => video.elements[\"author\"].text,\n\t :html => html }\t \n\t rvideos << v\n\t end\n\t return rvideos\n \t rescue Exception\n return nil\n\t end \t \t \n\tend", "def find_playlists_for_song(song_name)\n track_array = self.find_track_ids_for_song_name(song_name)\n # This array contains matching playlists id for track_ids\n matching_playlists_array = []\n # Enumerate through each track id for the song name \n track_array.each do |a_track_id|\n\n # Enumerate through playlists\n self.lib.playlists.each_value do |playlist|\n if playlist.track_ids.include?(a_track_id)\n matching_playlists_array << playlist.metadata['playlist_id']\n end\n end \n end \n puts matching_playlists_array\n matching_playlists_array\n end", "def youtube_embed_url\n VideoInfo.new(self.video_url).embed_url if self.video_url?\n end", "def source\n\t\tif youtube?\n\t\t\treturn \"http://www.youtube.com/v/\" + youtube_id + \"?fs=1\"\n\t\telse\n\t\t\treturn \"\"\n\t\tend\n\tend", "def search genre, startyear, endyear\n url =\"http://developer.echonest.com/api/v4/artist/search?api_key=#{ECHONEST_API_KEY}&sort=hotttnesss-desc&format=json&style=#{genre}&artist_start_year_after=#{startyear}&artist_end_year_before=#{endyear}&results=#{RESULTS}\"\n result = parseURL url\n\nend", "def video_by(params)\n params = {:video_id => params} if !params.is_a?(Hash)\n url = \"http://gdata.youtube.com/feeds/api/\"\n video_id = params[:video_id].split(\"/\").last\n if params[:user]\n url << \"users/#{params[:user]}/uploads/#{video_id}\"\n else\n url << \"videos/#{video_id}\"\n end\n parser = YouTubeG::Parser::VideoFeedParser.new(url, request_headers, request_options)\n parser.parse\n end", "def get_channnel_xml( url )\n path = url.sub(/http:\\/\\/gdata\\.youtube\\.com/,'')\n xml = \"\"\n\n Net::HTTP.version_1_2\n Net::HTTP.start(\"#{@url}\", \"#{@port}\") do |http|\n response = http.get(\"/#{path}\")\n xml = response.body\n end\n\n return xml\n end", "def parse(library_file)\n xml = Nokogiri::XML(File.open(library_file))\n \n # parsing songs:\n # Using 'class community' contributed snippet, \n # couple of ways I came up with were not nearly as elegant to say the least\n (xml/\"/plist/dict/dict/dict\").each do |tnode|\n metadata = Hash[*tnode.children.reject {\n |e| e.kind_of? Nokogiri::XML::Text}.collect {\n |e| e.text}]\n\n song = Track.new(metadata)\n @tracks << song\n end\n \n @albums = @tracks.collect {|s| s.album}.uniq\n @artists = @tracks.collect {|s| s.artist}.uniq\n \n # parsing playlists:\n (xml/\"/plist/dict/array/dict\").each do |pnode| \n # excluding 'special' playlists based on a 'special' string in a key \n next if (pnode/\"key\").any? {|x| x.to_s =~ /Visible|Smart|Distinguished/}\n \n # the rest is slightly modified version provided by Ben\n playlist_name = (pnode/\"key[text()='Name']\").first.next.text\n playlist_tracks = (pnode/\"array//integer\").collect {|e| e.text.to_i}\n \n playlist = Playlist.new(playlist_name, playlist_tracks)\n @playlists << playlist\n end\n end", "def song\n fetch('bossa_nova.songs')\n end", "def search_song\n song_query = @prompt.ask('What is the name of the song?'.colorize(:light_green))\n tracks = RSpotify::Track.search(song_query, limit: 5)\n cleaned_results = []\n tracks.each { |t| cleaned_results << \"#{t.name} by #{t.artists[0].name}\" }\n system('clear')\n cleaned_results << 'Back'\n selection = @prompt.select('Please select one of the search results:', cleaned_results).split(' by ')\n add_to_list if selection[0] == 'Back'\n store_song(selection)\n end", "def playlist_title;\treturn @json_data['playlist_title'];\tend", "def youtube_json(query)\n full_results = HTTParty.get(\"https://www.googleapis.com/youtube/v3/search?part=snippet&q=#{query.gsub(\" \", \"%20\")}&maxResults=50&key=\"+YOUTUBE_API_KEY)\n return full_results\n end", "def retrieve_links(query, page = 1)\n\t\tyoutube = Nokogiri::HTML(open(URI.encode(\"#{self.query_url}#{query}&page=#{page}\")))\n\t\tself.stats = retrieve_youtube_result_stats(youtube, query)\n\t\t(youtube/\"h3[@id^='video-long-title-']/a\").each do |res|\t\t\t\t\t\t\t# 'a' tag has id which starts with \"video-long-title-\"\n\t\t\turl = res['href']\n\t\t\tdescription = res.inner_text\n\t\t\tvideo_id = url.split('=').last\t\t\t\t\t\t\t\t\t\t\t\t# /watch?v=WwojCsQ3Fa8 => WwojCsQ3Fa8 => video_id\n\t\t\tthumb_url = \"http://i4.ytimg.com/vi/#{video_id}/default.jpg\"\n\t\t\tself.links << ThumbnailedLink.new(\"#{self.base_url}#{url}\", description, thumb_url, self.name)\n\t\tend\n\t\tself.links\n\tend", "def youtube_url\n \"https://www.youtube.com/watch?v=#{@data['youtubeID']}\" if @data['youtubeID']\n end", "def have_song(room, song)\n for track in room.get_song_list\n if track == song\n return song\n end\n end\n return \"Song not available.\"\n end", "def set_youtube_video\n\t\t\t@youtube_video = YoutubeVideo.find(params[:id])\n\t\tend", "def if_guest_fav_song_played(room)\n room_song_title_list =room.title_of_songs()\n guest_fav_song_list = room.guests_fav_song_list()\n for song in guest_fav_song_list\n room_song_title_list.find{|room_song| room_song =\"song\"}\n return \"whoo!\"\n end\n end", "def parse_activity(entry)\n # Figure out what kind of activity we have\n video_type = nil\n parsed_activity = nil\n entry.css(\"category\").each do |category_tag|\n if category_tag[\"scheme\"] == \"http://gdata.youtube.com/schemas/2007/userevents.cat\"\n video_type = category_tag[\"term\"]\n end\n end\n\n if video_type\n case video_type\n when \"video_rated\"\n parsed_activity = YouTubeIt::Model::Activity.new(\n :type => \"video_rated\",\n :time => entry.at(\"updated\") ? entry.at(\"updated\").text : nil,\n :author => entry.at(\"author/name\") ? entry.at(\"author/name\").text : nil,\n :videos => parse_activity_videos(entry),\n :video_id => entry.at_xpath(\"yt:videoid\") ? entry.at_xpath(\"yt:videoid\").text : nil\n )\n when \"video_shared\"\n parsed_activity = YouTubeIt::Model::Activity.new(\n :type => \"video_shared\",\n :time => entry.at(\"updated\") ? entry.at(\"updated\").text : nil,\n :author => entry.at(\"author/name\") ? entry.at(\"author/name\").text : nil,\n :videos => parse_activity_videos(entry),\n :video_id => entry.at_xpath(\"yt:videoid\") ? entry.at_xpath(\"yt:videoid\").text : nil\n )\n when \"video_favorited\"\n parsed_activity = YouTubeIt::Model::Activity.new(\n :type => \"video_favorited\",\n :time => entry.at(\"updated\") ? entry.at(\"updated\").text : nil,\n :author => entry.at(\"author/name\") ? entry.at(\"author/name\").text : nil,\n :videos => parse_activity_videos(entry),\n :video_id => entry.at_xpath(\"yt:videoid\") ? entry.at_xpath(\"yt:videoid\").text : nil\n )\n when \"video_commented\"\n # Load the comment and video URL\n comment_thread_url = nil\n video_url = nil\n entry.css(\"link\").each do |link_tag|\n case link_tag[\"rel\"]\n when \"http://gdata.youtube.com/schemas/2007#comments\"\n comment_thread_url = link_tag[\"href\"]\n when \"http://gdata.youtube.com/schemas/2007#video\"\n video_url = link_tag[\"href\"]\n else\n # Invalid rel type, do nothing\n end\n end\n\n parsed_activity = YouTubeIt::Model::Activity.new(\n :type => \"video_commented\",\n :time => entry.at(\"updated\") ? entry.at(\"updated\").text : nil,\n :author => entry.at(\"author/name\") ? entry.at(\"author/name\").text : nil,\n :videos => parse_activity_videos(entry),\n :video_id => entry.at_xpath(\"yt:videoid\") ? entry.at_xpath(\"yt:videoid\").text : nil,\n :comment_thread_url => comment_thread_url,\n :video_url => video_url\n )\n when \"video_uploaded\"\n parsed_activity = YouTubeIt::Model::Activity.new(\n :type => \"video_uploaded\",\n :time => entry.at(\"updated\") ? entry.at(\"updated\").text : nil,\n :author => entry.at(\"author/name\") ? entry.at(\"author/name\").text : nil,\n :videos => parse_activity_videos(entry),\n :video_id => entry.at_xpath(\"yt:videoid\") ? entry.at_xpath(\"yt:videoid\").text : nil\n )\n when \"friend_added\"\n parsed_activity = YouTubeIt::Model::Activity.new(\n :type => \"friend_added\",\n :time => entry.at(\"updated\") ? entry.at(\"updated\").text : nil,\n :author => entry.at(\"author/name\") ? entry.at(\"author/name\").text : nil,\n :username => entry.at_xpath(\"yt:username\") ? entry.at_xpath(\"yt:username\").text : nil\n )\n when \"user_subscription_added\"\n parsed_activity = YouTubeIt::Model::Activity.new(\n :type => \"user_subscription_added\",\n :time => entry.at(\"updated\") ? entry.at(\"updated\").text : nil,\n :author => entry.at(\"author/name\") ? entry.at(\"author/name\").text : nil,\n :username => entry.at_xpath(\"yt:username\") ? entry.at_xpath(\"yt:username\").text : nil\n )\n else\n # Invalid activity type, just let it return nil\n end\n end\n\n return parsed_activity\n end", "def set_youtube_playlist\n @youtube_playlist = YoutubePlaylist.find(params[:id])\n end", "def good_xml\n \"<PLAY><TITLE>My Simple Play</TITLE></PLAY>\"\n end", "def get_similar( params )\n xml = LastFM.get( \"track.getSimilar\", params )\n xml.find('similartracks/track').map do |track|\n LastFM::Track.from_xml( track )\n end\n end", "def youtube_id(youtube_url)\n regex = %r{(?:youtube(?:-nocookie)?\\.com/(?:[^/\\n\\s]+/\\S+/|(?:v|e(?:mbed)?)/|\\S*?[?&]v=)|youtu\\.be/)([a-zA-Z0-9_-]{11})}\n match = regex.match(youtube_url)\n match[1] if match\n end", "def get_album_songs_url(album_url)\r\n\t\t\thtml_doc = get_html_doc(album_url)\r\n\t\t\tlinks = html_doc.css('a')\r\n\t\t\tlinks.map {|link| (link.attribute('href').to_s =~ /mp3-song.html/) ? \"http://pzmp3.com/\"+link.attribute('href').to_s : \"\"}.sort.delete_if{|href| href.empty?}\r\n\t\tend", "def best_movies\n doc.search('#BestFilmList a').map(&:text)\n end", "def atom()\r\n atom = \"<?xml version=\\\"1.0\\\"?><entry xmlns=\\\"http://www.w3.org/2005/Atom\\\" xmlns:media=\\\"http://search.yahoo.com/mrss/\\\" \"\r\n atom << \"xmlns:yt=\\\"http://gdata.youtube.com/schemas/2007\\\"> <media:group> <media:title type=\\\"plain\\\">#{@name}\"\r\n atom << \"</media:title> <media:description type=\\\"plain\\\">#{@description}</media:description>\"\r\n atom << \"<media:category scheme=\\\"http://gdata.youtube.com/schemas/2007/developertags.ca\\\">#{@category}</media:category>\"\r\n atom << \"<media:keywords>#{@keywords.join(', ')}</media:keywords></media:group></entry>\"\r\n end", "def youtube_embed(youtube_url)\n VideoPlayer::player(youtube_url,\"640\", \"390\", true)\n end", "def song_plays\n SongPlay.where(:song_path => path)\n end", "def play\n\t\treturn \"stoffi:track:youtube:#{youtube_id}\" if youtube?\n\t\treturn \"stoffi:track:soundcloud:#{soundcloud_id}\" if soundcloud?\n\t\treturn url\n\tend", "def play_next\n\t\t# locate now-playing and next-playing tracks\n\t\tnow_playing_track = Track.find_by(status: 'now playing')\n\t\tnext_track = Track.where(status: 'waiting').order('created_at').first\n\n\t\t# remove now-playing track(s)\n\t\tunless now_playing_track.nil?\n\t\t\t# remove the users songs from users song list\n\t\t\tlast_user_song = Song.find_by(title: now_playing_track.title, user_id: now_playing_track.user_id)\n\t\t\tlast_user_song.destroy unless last_user_song.nil?\n\n\t\t\t# remove track from player list\n\t\t\tnow_playing_track.destroy # TODO, (tbd) may want to archive this, move to a play history\n\t\tend\n\n\t\t# check to make sure there is at least one song in q to play\n\t\tunless next_track.nil?\n\t\t\t# set now-playing track to next track in q\n\t\t\tnext_track.update(status: 'now playing')\n\n\t\t\t# update the user's song status\n\t\t\tuser_song = Song.find_by(title: next_track.title, user_id: next_track.user_id)\n\t\t\tuser_song.update(status: 'now playing') unless user_song.nil?\n else\n flash[:error] = 'No music to play! Go gets some requests!'\n\t\tend\n\n \t# redisplay player page\n redirect_to '/player'\n\tend", "def artist_show_song\r\n\r\n\t\t#@artist = Artist.find(params[:id])\r\n\t\tsearchString = params[:url_slug]\r\n\t\t@artist = Artist.find_by_url_slug(searchString)\r\n\t\t@song = @artist.song.find.by_url_slug(params[:song_name])\r\n\r\n\t\trespond_to do |format|\r\n\t\t\tformat.html # show.html.erb\r\n\t\t\tformat.xml { render :xml => @artist }\r\n\t\tend\r\n\tend" ]
[ "0.7095424", "0.63789326", "0.6267256", "0.62182397", "0.62182397", "0.6133526", "0.594888", "0.5894755", "0.58848035", "0.5850666", "0.57705325", "0.5742889", "0.57082736", "0.56800807", "0.5664901", "0.5637378", "0.56163484", "0.56132054", "0.56091475", "0.55646855", "0.5557971", "0.55550754", "0.55455834", "0.55376124", "0.5536632", "0.55365056", "0.5506281", "0.5493057", "0.5489032", "0.5477871", "0.5477049", "0.54721254", "0.54647267", "0.54440355", "0.5434014", "0.54156905", "0.5406342", "0.5391444", "0.5385932", "0.53527176", "0.5329011", "0.5319089", "0.53133196", "0.5309851", "0.53074366", "0.52928245", "0.52928245", "0.52907073", "0.5287118", "0.52823734", "0.528138", "0.5276847", "0.5276327", "0.5275051", "0.5259727", "0.52594656", "0.52541405", "0.5250127", "0.5248637", "0.5244572", "0.52320844", "0.5224594", "0.52230954", "0.52176", "0.5215737", "0.5205471", "0.5205471", "0.5205121", "0.52014023", "0.5195148", "0.51809907", "0.51723963", "0.5171746", "0.51707923", "0.5150773", "0.51323247", "0.5120842", "0.51124704", "0.50969076", "0.5084908", "0.50653833", "0.50651586", "0.50651366", "0.50595164", "0.5058799", "0.5053723", "0.50350845", "0.5035083", "0.5030191", "0.5011734", "0.50114536", "0.49984512", "0.49929374", "0.4982047", "0.49684379", "0.49608523", "0.49553457", "0.49519545", "0.4950467", "0.49476406" ]
0.6912803
1
This method extracts the spotify track ID if the URI/URL is in the correct format If it isn't then it returns false
def extract_spotify_id(query) if query.start_with?('http://open.spotify.com/track/', 'https://open.spotify.com/track/', 'https://play.spotify.com/track/', 'http://play.spotify.com/track/') URI.split(query)[5].split('/')[2] elsif query.start_with?('spotify:track:') query.split(':')[2] else false end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def track_id\n @url.gsub(_url_regex, '')\n end", "def extract_id_from_youtube_url\n if self.youtube_url.present?\n url = self.youtube_url\n self.external_id = Youtube.id_from_url(url) if url.present?\n self.errors.add(:youtube_url, 'is not a valid Youtube URL') unless self.external_id.present?\n else\n true\n end\n end", "def lookup_track_info(orig_url)\n uri = URI.parse(orig_url)\n \n split_url = uri.path.split('/')\n\n puts uri.host\n \n track_info = Hash.new();\n\n #This needs to be better, include only track urls of specific services\n if uri.host == 'open.spotify.com'\n puts 'spotify lookup'\n track_info = spotify_lookup(split_url[2])\n elsif uri.host.ends_with? 'last.fm'\n puts 'lastfm lookup'\n track_info = last_fm_lookup(split_url[2], split_url[4])\n elsif uri.host.ends_with? 'grooveshark.com'\n track_info = grooveshark_lookup('', split_url[2])\n elsif uri.host == 'rd.io'\n print 'rdio lookup'\n else\n # Return an empty track_info hash\n puts 'non-valid url'\n end\n\n return track_info\n end", "def parse_youtube_id(url)\n url =~ /[v]=([^&]*)/\n id = $1\n \n if id.nil?\n # when there is no match for v=blah, then maybe they just \n # provided us with the ID the way the system used to work... \n # just \"E4Fbk52Mk1w\"\n return url \n else\n # else we got a match for an id and we can return that ID...\n return id\n end\n end", "def parse_playlist_reference_url(url)\n playlist, index = split_url(url)\n [to_valid_id(playlist), Integer(index)]\n end", "def track_name\n begin\n track = MetaSpotify::Track.lookup(url_spotify)\n return track.name\n rescue Exception => e\n return \"Not a Track\"\n end\n end", "def giphy_id\n GIPHY_MEDIA_URL.match(@url)['id'] if match?\n end", "def find_vimeo_id url\n url = sanitize url\n matches = VIMEO_REGEX.match url.to_str\n matches[2] if matches\n end", "def parse_url_for_item_id\n return nil if not self.item_id \n if self.item_id.match(/\\D+/).to_s.length != 0\n self.item_id.match(/item=\\d+\\D/).to_s.gsub!(/\\D+/, \"\") ||\n self.item_id.match(/-\\/\\d+/).to_s.gsub!(/\\D+/, \"\")\n else\n return self.item_id\n end\n end", "def extract_work_id( url )\n /^.*\\/(\\d+-\\d+-.*)$/.match( url )[ 1 ]\nend", "def goodreads_id\n @url.match(/\\/(\\d+)\\./)[1] rescue nil\n end", "def extractYouTubeID(url)\n YoutubeVideoId.extract(url)\n end", "def id_link?\n @url[0] == '#'\n end", "def get_url_id(url)\n if !url.nil?\n url.split('/').last\n end\nend", "def youtube_id\n\t\tif youtube?\n\t\t\treturn path[\"stoffi:track:youtube:\".length .. -1]\n\t\telse\n\t\t\treturn \"\"\n\t\tend\n\tend", "def video_id(url)\n url.scan(/\\?v=(.{11})|youtu.be\\/(.{11})/)[0].find {|e| not e.nil?}\n end", "def fake_url?\n url =~ /^\\d\\d\\d\\d-/\n end", "def get_id\n url = \"https://www.youtube.com/results?search_query=#{@artist_name.gsub(\" \",\"+\")}+#{@song_name.gsub(\" \",\"+\")}\"\n#pp url\n web_page = HTTParty.get(url)\n\n @parsed = Nokogiri::HTML(web_page)\n ref = @parsed.css(\"a\").collect{|link| link[\"href\"]}.select{|href| href && href.include?(\"/watch?v=\")}.uniq[0]\n return ref.gsub(\"/watch?v=\", \"\") unless !ref\n end", "def valid_remote_identifier?(value)\n RemoteRecord.valid?(value) && RemoteRecord.retrieve(value).success?\n rescue URI::InvalidURIError\n false\n end", "def steam_id_from_url(url)\n url.slice!('https://store.steampowered.com/app/')\n m = url.match(/(\\d+)\\/?/)\n return nil if m.nil?\n m.captures.first.to_i\nend", "def spotify?; spotify.to_s != \"\" end", "def valid_remote_identifier?(value)\n RemoteRecord.valid?(value) && RemoteRecord.retrieve(value).success?\n rescue URI::InvalidURIError\n false\n end", "def get_id_from_short_url shortURL\n\t id = 0 # initialize result \n\n\t # A simple base conversion logic \n\t (0...shortURL.size).each do |i| \n\t if ('a' <= shortURL[i] && shortURL[i] <= 'z') \n\t id = id*62 + shortURL[i].ord - 'a'.ord; \n\t end\n\n\t if ('A' <= shortURL[i] && shortURL[i] <= 'Z') \n\t id = id*62 + shortURL[i].ord - 'A'.ord + 26; \n\t end\n\n\t if ('0' <= shortURL[i] && shortURL[i] <= '9') \n\t id = id*62 + shortURL[i].ord - '0'.ord + 52;\n\t end \n\t end\n\n\t id\n\tend", "def video_id\n match = uri.path.match(/\\/v_show\\/id_([a-zA-Z0-9_=\\-]+)(\\.html)?.*/)\n return match[1] if match && match[1]\n\n nil\n rescue\n return nil\n end", "def video_id\n match = uri.path.match(/\\/v_show\\/id_([a-zA-Z0-9]*)(\\.html)*/)\n return match[1] if match && match[1]\n\n nil\n rescue\n return nil\n end", "def uriish? str\n (str.include? ':') && (UriSniffRx.match? str)\n end", "def extract_id(link)\n link.split('/').last if link.split('/')\n end", "def exists? purl_path\n record = get(purl_path)\n return nil if record.nil?\n return record[\"purlId\"].to_s if record[\"status\"] == 1\n return nil\n end", "def trackable?(uri)\n uri && uri.absolute? && %w(http https).include?(uri.scheme)\n end", "def trip_id\n url.to_s =~ /information\\/[a-z]+\\/[^\\/]+\\/([^\\/]+)/\n $1\n end", "def valid_id(major)\n unless major =~ /^[a-zA-Z0-9]*$/\n puts \"#{major} is not a valid imgur ID or URL\"\n return false\n end\n return true\n end", "def youtube?\n\t\treturn path && path.starts_with?(\"stoffi:track:youtube:\")\n\tend", "def uri\n @uri ||= uri_id && \"spotify:user:#{user.username}:playlist:#{uri_id}\"\n end", "def extract_id_uri\n @id_uri = @meta.at_xpath('./a:identification/a:FRBRWork/a:FRBRuri', a: NS)['value']\n empty, @country, @nature, date, @num = @id_uri.split('/')\n\n # yyyy-mm-dd\n @year = date.split('-', 2)[0]\n end", "def uid_from_path(url, base_store_url)\n return [:main, 0] if url.include?(base_store_url)\n\n return [:category, url.match(/viewGenre\\?id\\=(\\d+)/)[1]] if url.include?(\"viewGenre?id=\")\n return [:category, url.match(/viewGrouping\\?id\\=(\\d+)/)[1]] if url.include?(\"viewGrouping?id=\")\n return [:room, url.match(/viewMultiRoom\\?fcId\\=(\\d+)/)[1]] if url.include?(\"viewMultiRoom?fcId=\")\n return [:app, url.match(/.+id(\\d+)/)[1]] if url =~ /\\/id\\d+/\n \n [nil, nil]\n end", "def spotify_url; \"http://open.spotify.com/artist/#{spotify}\" end", "def track?\n type == \"track\"\n end", "def get_youtube_video_id(url)\n # find id\n result = url.match /https*\\:\\/\\/.*youtube\\.com\\/watch\\?v=(.*)/\n # return id or nil\n result ? result[1] : nil\n end", "def extract_id_from_response(resp)\n resp.response.headers['location'].split('/').last\n end", "def youtube_id\n rexp = /v=(.*)/ or /embed\\/(.*)/\n rexp.match(url).captures.first\n end", "def normalize_for_artist_finder\n profile_url.presence || url\n end", "def guid_from_url\n # get the last large number from the url, if there is one\n url.to_s.scan(/https:\\/\\/firstlook.org\\/theintercept\\/[0-9]{4}\\/[0-9]{2}\\/[0-9]{2}\\/[a-z0-9-]+/).last\n end", "def get_track(spotify_id)\n content = HTTParty.get('http://ws.spotify.com/lookup/1/.json?uri=' + URI.escape(\"spotify:track:#{spotify_id}\"))\n if !content.body.empty?\n Oj.load(content.body)\n else\n flash[:notice] = 'Error with Spotify! Try again in 10 seconds!'\n end\n end", "def youtube_id(youtube_url)\n regex = %r{(?:youtube(?:-nocookie)?\\.com/(?:[^/\\n\\s]+/\\S+/|(?:v|e(?:mbed)?)/|\\S*?[?&]v=)|youtu\\.be/)([a-zA-Z0-9_-]{11})}\n match = regex.match(youtube_url)\n match[1] if match\n end", "def id_from_response(response)\n response['Location'][/\\/([^\\/]*?)(\\.\\w+)?$/, 1]\n end", "def parse_url(input)\n if input.success?\n video_id = youtube_id(input[:youtube_url])\n Success(video_id: video_id)\n else\n Failure(\"URL #{input.errors.messages.first}\")\n end\n end", "def parse_video_id_for_youtube\n parse_video_id_for_regexp_and_index(YOUTUBE_REGEXP, 6)\n end", "def extract_record_id(record)\n url = record.xpath('//tei:facsimile/tei:graphic/@url', NS).map(&:text).first\n url.gsub!('http://cudl.lib.cam.ac.uk/content/images/', '')\n url.gsub!(%r{-\\d+-\\d+_files\\/8\\/0_0.jpg}, '')\n end", "def has_track_with_id(spotify_id)\n self.tracks.exists?(spotify_id: spotify_id)\n end", "def id_found?\n token && decoded_token && decoded_token[:id]\n end", "def get_tracks_url(album)\n tracks_url = ''\n if album.first == 'Twist and Shout' || album.first == 'A Hard Day\\'s Night' || album.first == 'Yellow Submarine'\n tracks_url = \"https://en.wikipedia.org/wiki/#{album.first} (album)\"\n elsif album.first == 'Something New' || album.first == 'Revolver' || album.first == 'Let It Be'\n tracks_url = \"https://en.wikipedia.org/wiki/#{album.first} (Beatles album)\"\n elsif album.first == 'The Beatles (\"The White Album\")'\n tracks_url = \"https://en.wikipedia.org/wiki/The_Beatles_(album)\"\n else\n tracks_url = \"https://en.wikipedia.org/wiki/#{album.first}\"\n end\n return tracks_url\nend", "def can_track?(id)\n return false if credentials.nil?\n package_id_matchers.each {|m| return true if id =~ m }\n false\n end", "def offer_id_from_url(url)\n encrypted_params = CGI::parse(url.query)\n @params = ObjectEncryptor.decrypt(encrypted_params['data'].first)\n\n if url.path.include?('/videos/')\n url.path.split('/')[2]\n elsif url.path.include?('/click/generic') ||\n url.path.include?('/click/mraid') ||\n url.path.include?('/click/app') ||\n url.path.include?('/click/survey') ||\n url.path.include?('/click/action')\n # entity_id and offer_id are the same thing, is this always the case?\n @params[:entity_id]\n else\n nil\n end\n end", "def is_track_key\n return ( 'tracks' == @top_level_key ) &&\n \"/plist/dict/dict/dict/key\" == @breadcrumb\n end", "def thumb_id\n regex_url = /pictures[\\/][\\d]+[\\/]/ \n string_url = self.content.scan(regex_url).to_s \n regex_id = /[\\d]+/ \n picture_id = string_url.scan(regex_id)[0]\n return picture_id\n end", "def thumb_id\n regex_url = /pictures[\\/][\\d]+[\\/]/ \n string_url = self.content.scan(regex_url).to_s \n regex_id = /[\\d]+/ \n picture_id = string_url.scan(regex_id)[0]\n return picture_id\n end", "def guid_from_url\n # get the last large number from the url, if there is one\n url.to_s.scan(/[0-9]{6,12}/).last\n end", "def query_metadata filename\n song = File.basename(filename, File.extname(filename))\n track = filename[/[0-9]+ /]\n\n unless track.nil?\n song = song[track.size .. -1]\n track = track.to_i\n end\n\n return track,song\nend", "def id_from_href(href)\r\n if href.include?(\"?\")\r\n href = href.partition(\"?\")[0]\r\n end\r\n id = href.split(\"/\").last\r\n id\r\n end", "def initialize(artist, track)\n @artist = artist\n @track = track\n\n url =(\"https://api.spotify.com/v1/search?q=\" + track + \"&type=track\")\n\n response = HTTParty.get(url).parsed_response\n\n @uri = response['tracks']['items'][0]['uri']\n\n\n # ['artists']['items'][0]['uri']\n end", "def real_url?\n url && url.present? && url != \"#\"\n end", "def local_part_uri\n self.uri.match(/^http:\\/\\/.+?(\\/[^#]+)/)\n return $1\n end", "def set_url_video_id\r\n # Get the URL and put in this variable that will be filtered\r\n # down to the Video ID\r\n url_video_id = self.url\r\n \r\n # Remove the http:// part of the URL\r\n if (url_video_id = url_video_id.split(/^http[s]?:\\/\\//i)[1]) != nil\r\n \r\n #Remove the www part if it exists\r\n url_video_id = url_video_id.split(/^www./i)[1] unless url_video_id.match(/^www./i) == nil\r\n \r\n # Go through each of the filters for the source of this story and\r\n # find one that will return the ID\r\n for filter in self.story_source.story_source_id_filters\r\n \r\n # Determine if this filter is usable for the URL provided\r\n if url_video_id.match(/^#{filter.pre_id_regex}/i) != nil\r\n # Remove the first part of the URL\r\n url_video_id = url_video_id.split(filter.pre_id_url)[1]\r\n \r\n # Remove the end of the URL\r\n url_video_id = url_video_id.split(filter.post_id_url)[0]\r\n \r\n # Set the ID and return it\r\n self.url_video_id = url_video_id\r\n return url_video_id\r\n end\r\n end\r\n end\r\n \r\n # The ID could not be found\r\n # Return nil and don't set the ID\r\n return nil\r\n end", "def extract_pid(uri)\n URI(uri).path.split('/').last\n end", "def is_a_real_url?\n begin\n URI.parse(long_url)\n rescue URI::InvalidURIError\n errors.add(:message, \"must be a valid URL\")\n end \n end", "def parse_attachment_reference_id_from_url(url)\n # TODO: Attachments from a third party domain with the same path should not be returned.\n result = url.match(ATTACHMENT_ID_REGEX)\n result ? result[1] : nil\n end", "def spotify_track\n unless self.spotify_track_id.nil?\n Rails.cache.fetch(\"spotify_track/#{self.spotify_track_id}\", expires_in: 12.days) do\n RSpotify::Track.find(self.spotify_track_id)\n end\n end\n end", "def checkURL(twitter_user)\n\tchecker = twitter_user.to_s\n\tif checker.start_with?(\"http://\") or checker.start_with?(\"https://\") or checker.start_with?(\"twitter.\")\n\t\treturn checker[checker.rindex('/')+1..checker.length]\n\telse \n\t\treturn checker\n\tend\nend", "def tracking_url\n if has_tracking_information?\n return \"#{shipping_carrier.tracking_base_url}#{tracking_number}\"\n else\n return false\n end\n end", "def play\n\t\treturn \"stoffi:track:youtube:#{youtube_id}\" if youtube?\n\t\treturn \"stoffi:track:soundcloud:#{soundcloud_id}\" if soundcloud?\n\t\treturn url\n\tend", "def url?(uri)\n /\\w+\\:\\/\\// =~ uri\n end", "def url?(uri)\n /\\w+\\:\\/\\// =~ uri\n end", "def spotify_playlist_url=(url)\n if url.present?\n url = url[\"spotify:\"] ? url[url.rindex(\"spotify:\")..-1] : url #\n write_attribute(:spotify_playlist_url, url)\n end\n end", "def is?(url)\n return true if url.match('https?://twitter\\.com.*status/(\\d+)')\n end", "def sierra_856_perfect?\n @url == self.proper.proper_856_content\n end", "def pull_ssid(url)\n url =~ /\\?h3fileid\\=(\\d+)/\n return $1\n end", "def find_id(uri)\n Addressable::URI.parse(uri).basename\n end", "def embed_id\n match = @node.inner_text.strip.match(self.class.url_regexp)\n match[:id] if match\n end", "def getAudioUri\n return @data.scan(/https:\\/\\/.*?\\.mp3/)[0]\nend", "def _parse_image_uri row\n row./('td[2]/a').first[:href] =~ /multiverseid=(\\d+)/\n $1\n end", "def extract_session_id!(url)\n url.gsub!(/#{::Rails.application.config.session_options[:key]}=([^&]+)&?/, '')\n url.chomp!('?')\n $1\n end", "def snipe_listing_from_url(listing_url)\n snipe = listing_url.match(/\\/listing(\\/[\\w\\-]+){4}|\\/listings\\/(\\d{7,})\\/gallery(\\?refer=map)?/)\n if snipe\n result1, result2, result3 = snipe[1], snipe[2], snipe[3]\n if result1\n listingid = result1.sub(\"/\",\"\")\n elsif result2\n listingid = result2\n elsif result3\n listing = result3\n end\n end\n end", "def youtube_embed(youtube_url)\n\t # Regex from # http://stackoverflow.com/questions/3452546/javascript-regex-how-to-get-youtube-video-id-from-url/4811367#4811367\n\t youtube_url.to_s[/^.*((v\\/)|(embed\\/)|(watch\\?))\\??v?=?([^\\&\\?]*).*/]\n\t youtube_id = $5\n\t youtube_id\n\tend", "def term_id_expects_uri?\n return false if term_config.nil? || !(term_config.key? :term_id)\n term_config[:term_id] == \"URI\"\n end", "def searchSpotify(inArtist)\n tArtist = CGI.escape(inArtist)\n \n outstring = open('http://ws.spotify.com/search/1/artist?q='+tArtist, 'User-Agent' => 'Ruby-Wget').read\n\n outdata = outstring.split(\"<opensearch:totalResults>\")\n\n outdata.delete_at(0);\n\n if outdata[0].split(\"</opensearch:totalResults>\")[0].to_i > 0\n outinfo = outstring.split(\"<artist href=\");\n return outinfo[1].split(\">\")[0];\n else\n return 0\n end\n end", "def pull_gameid(url)\n url =~ /\\?gameid=(\\d+)\\&/\n return $1\n end", "def unique_id\n playlist_id[/playlists\\/([^<]+)/, 1]\n end", "def ok_to_extract?(remote_url)\n true\n end", "def get_store_number_from_url(url)\n $tracer.trace(\"GameStopAnalyticsFunctions: #{__method__}, Line: #{__LINE__}\")\n $tracer.report(\"Should #{__method__}.\")\n store = url.split(\"store=\")\n\t\tstore_number = store[1].split(\"&\")\n\t\t$tracer.report(\"Store Number from URL :::: #{store_number[0]}\")\n\t\treturn store_number[0]\n end", "def parse_id(e)\n return e.css('a')[0].attribute('data-play').content.split(':')[1].to_i\n end", "def id\n case self.service\n when :youtube then parse_video_id_for_youtube\n when :vimeo then parse_video_id_for_vimeo\n end\n end", "def uri_parse(uri)\n\t\t\tif uri == ''\n\t\t\t\tprint_error(\"URI required\")\n\t\t\t\treturn\n\t\t\tend\n\n\t\t\tregexstr = '^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\\?([^#]*))?(#(.*))?'\n\n\t\t\tregexurl = Regexp.new(regexstr, false)\n\t\t\tret = regexurl.match(uri)\n\n\t\t\treturn ret\n\t\tend", "def valid_tweet(tweet)\n\t\t@url = tweet.text\n\n\t\tif @url.include? \"#radio\"\n\t\t\treturn true\n\t\telse\n\t\t\treturn false\n\t\tend\n\tend", "def parse_url(url)\n regex = /(?:.be\\/|\\/watch\\?v=|\\/(?=p\\/))([\\w\\/\\-]+)/\n url.match(regex)[1] unless url.match(regex).nil?\n end", "def has_userinfo?\n ImpURI.has_userinfo?(@uri)\n end", "def valid_url?\n\t\t# http:// or not http://\n\t\tx = self.long_url.start_with?(\"http://\", \"https://\")\n\t\tif x == false\n\t\t\treturn \"http://\" + self.long_url\n\t\telse\n\t\t\treturn self.long_url\n\t\tend\n\tend", "def has_better_id?\n slug and found_using_numeric_id? || found_using_outdated_friendly_id?\n end", "def extract_identity(uri)\n return $1.downcase if uri =~ %r[/user/([a-zA-Z_1-9-]*)] || uri =~ %r[://([a-zA-Z_1-9-]*)?\\.#{AppConfig.host(request.host)}] || uri =~ %r[://(.*?)/?$]\n return nil\n end", "def check_params(longurl)\n if longurl==\"\"\n return false\n else \n short_domain = ShortDomain.where(domain: Domainatrix.parse(longurl).domain + '.' + Domainatrix.parse(longurl).public_suffix).first\n if short_domain == nil\n return false\n else\n return true\n end\n end\n end", "def youtube_video_id\t\t\n\t\tif self.video_url.nil?\n\t\t\tnil\n\t\telse\n\t\t\tself.video_url.rpartition('/').last\n\t\tend\n\n\tend" ]
[ "0.6885513", "0.6647702", "0.6527046", "0.62536675", "0.61541885", "0.60888875", "0.599258", "0.5950304", "0.5941533", "0.59147716", "0.588949", "0.58433044", "0.5798079", "0.57464063", "0.5733345", "0.5707705", "0.5699513", "0.568366", "0.56762314", "0.56740475", "0.56514555", "0.56482637", "0.5616818", "0.5574492", "0.55616415", "0.55602765", "0.55550736", "0.5552354", "0.5540452", "0.55261403", "0.55257255", "0.5519526", "0.5519028", "0.5516815", "0.5516289", "0.551331", "0.5485331", "0.54840237", "0.54787725", "0.54689336", "0.54570246", "0.5435726", "0.54261774", "0.5403842", "0.5388587", "0.53699046", "0.5360175", "0.5349816", "0.53351635", "0.5317965", "0.5307343", "0.5297697", "0.5287366", "0.5281359", "0.5273968", "0.5273968", "0.52702934", "0.52503246", "0.52479714", "0.52443326", "0.52438277", "0.521876", "0.5218088", "0.52083504", "0.5208218", "0.52065074", "0.5191286", "0.5187234", "0.5171817", "0.51699585", "0.5168287", "0.51676816", "0.5162164", "0.5160279", "0.51584125", "0.5156204", "0.51548356", "0.51455915", "0.5140773", "0.5135454", "0.5124237", "0.5123114", "0.5117764", "0.5109613", "0.5108226", "0.5106664", "0.5102521", "0.5096087", "0.5081599", "0.50785106", "0.5075637", "0.50706273", "0.5067669", "0.50470024", "0.50442386", "0.5039581", "0.5036115", "0.5032034", "0.5028635", "0.5024069" ]
0.81502295
0
that is evenly divisible by 4, unless the year is also divisible by 100. If the year IS divisible by 100, then it is not a leap year unless the year is divisible by 400. Write a method that takes any year greater than 0 as input, and returns true if the year is a leap year, or false if it is not a leap year. Examples: Pseudocode Given any year greater than 0 Determine if the year is divisible evenly by 400 If yes, return true. If no, determine if the year is divisible by 100 If yes, return true if no, determine if the year is divisible by 4. If yes, return true if no, return false.
def leap_year?(year) if year < 1752 && year % 4 == 0 true elsif year % 400 == 0 true elsif year % 100 == 0 false else year % 4 == 0 end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def leap_year?(year)\n #PSEUDOCODE\n #every four years is a leap year\n #2012,2016,2020 are leap years\n #check if input is divisble 4\n #2000 is a leap year because it is divisible by 400\n #1900 is NOT a leap year because its divisble by 100 but not 400\n\n year % 4 == 0 && (year % 100 !=0 || year % 400 ==0)\n\nend", "def leap_year?(year)\n year.divisible_by?(400) || year.not_divisible_by?(100) && year.divisible_by?(4)\n # if (year % 100 == 0)\n # return year % 400 == 0\n # end\n # year % 4 == 0\nend", "def leap_year?(year)\n year % 4 == 0 && !((year % 100 == 0) && (year % 400 != 0)) # True if divisble by 4 && NOT((Divisble by 100) && (NOT divisible by 400))\nend", "def leap_year?(year)\n if divisible?(year, 400)\n true\n elsif divisible?(year, 100)\n false\n else\n divisible?(year, 4)\n end\nend", "def leap_year?(year)\n divisible?(year, 400) || divisible?(year, 4) && !divisible?(year, 100)\nend", "def leap_year? (x)\n if x % 4 == 0\n if x % 100 == 0 && x % 400 != 0\n return false\n end\n return true\n else\n return false\n end\nend", "def leap_year?(num)\n if num%4 == 0 && num%100 !=0\n true\n elsif num%400 == 0\n true\n elsif num%4 == 0 && num%100 == 0 && num%400 != 0\n false\n elsif num%4 != 0\n false\n end\nend", "def leap_year? (int)\n if int%400==0\n return true\n elsif int%100==0\n return false\n elsif int%4 ==0\n return true\n else\n return false\n end\nend", "def leap_year?(x)\n if x.to_i % 4 == 0\n if x.to_i % 100 == 0\n if x.to_i % 400 == 0\n return true\n else\n return false\n end\n else\n return true\n end\n else\n return false\n end\nend", "def leap_year?(year)\n if year%4 == 0\n if year%100 != 0 || year%400 == 0\n puts true\n else\n puts false\n end\n end\nend", "def leap_year?(year)\n if (year % 400).zero?\n true\n elsif (year % 100).zero?\n false\n else\n (year % 4).zero?\n end\nend", "def leap_year?(x)\n if x % 4 === 0 or x % 100 == 0\n if x % 100 === 0 && x % 400 != 0\n false\n else\n true\n end\n else\n false\n end\nend", "def leap_year?(x)\n if x % 100 == 0\n if x % 400 == 0\n return true\n else\n return false\n end\n elsif x % 4 == 0\n return true\n else\n return false\n end\nend", "def leap_year?(year)\n if (year%4 == 0 ) && !(year%100 == 0)\n return true\n elsif (year%400 == 0)\n return true\n elsif (year%4 == 0 ) && (year%100 == 0) && !(year%400 == 0)\n return false\n elsif !(year%4 == 0 )\n return false\n end\nend", "def leap_year?(year)\n return false if year % 4 != 0\n \n if year % 100 != 0\n true\n elsif year % 400 == 0\n true\n elsif year % 100 == 0 && year % 400 != 0\n false\n end\nend", "def leap_year?(year)\n return 'That is not a valid year.' if year <= 0\n ((year % 4).zero? && (year % 100 != 0)) || (year % 400).zero?\nend", "def leap_year?(yr1)\r\n if yr1 % 4 > 0\r\n false\r\n elsif yr1 % 100 == 0\r\n if yr1 % 400 == 0\r\n true\r\n else\r\n false\r\n end\r\n elsif yr1 % 4 == 0\r\n true\r\n end\r\nend", "def leap_year?(year)\n return false unless (year % 4).zero?\n\n if (year % 100).zero?\n return false unless (year % 400).zero?\n end\n\n true\nend", "def leap_year?(integer)\n if integer % 400 == 0 \n return true\n elsif\n integer % 100 == 0\n return false\n elsif \n integer % 4 == 0\n return true\n else\n return false\n end\nend", "def leap_year?(year)\n if year % 4 == 0 && year % 100 !=0\n return true\n elsif year % 400 == 0\n return true\n elsif year % 4 == 0 && year % 100 == 0 && year % 400 != 0\n return false\n elsif year % 4 != 0\n return false\n end\nend", "def leap_year?(year)\n return true if (year % 400).zero?\n return false if (year % 100).zero?\n return true if (year % 4).zero?\n false\nend", "def leap_year?(input_int)\n if input_int % 400 == 0 && input_int % 100 == 0 then true\n elsif input_int % 100 == 0 then false\n elsif input_int % 4 == 0 then true\n else false\n end\nend", "def leap_year?(year)\n if year%4 == 0 && ( year%100 != 0 || year%400 == 0 )\n true\n else\n false\n end\nend", "def leap_year?(year)\n \n if year % 4 == 0 and year % 100 != 0 and \n return true\n elsif \n year % 400 == 0\n return true\n else \n return false\n end\nend", "def leap_year?(year)\n return false if year % 4 > 0\n return true if (year % 400).zero?\n return false if (year % 100).zero?\n true\nend", "def leap_year? (years)\n leap = years % 4\n leaps = years % 100\n leaped = years % 400\n if leap == 0 && leaps == 0 && leaped != 0\n return false\n elsif leap == 0 && leaps != 0\n return true\n elsif leap == 0 && leaps == 0 && leaped == 0\n return true\n else\n return false\n end\nend", "def leap_year?(input_year)\n ((input_year % 4 == 0) && (input_year % 100 > 0)) || (input_year % 400 == 0)\nend", "def leap_year?(year)\n if year % 100 == 0 and year % 400 != 0\n return false\n else\n return year % 4 == 0\n end\nend", "def leap_year(year)\n if year % 100 == 0\n if year % 400 == 0\n return true\n end\n elsif year % 4 == 0 \n return true \n end \n return false\n end", "def leap_year?(year)\n if year%400 == 0\n return true\n elsif year%100 == 0\n return false\n elsif year%4 == 0\n return true\n else\n return false\n end\nend", "def leap_year?(year)\n if year%400==0\n return true\n elsif year%4==0 && year%100!=0\n return true\n elsif year%4==0 && year%100==0 && year%400!=0\n return false\n else return false\n end\nend", "def leap_year?(year = nil)\n return false if year.nil?\n\tdivisible_by?(year, 400) || !divisible_by?(year, 100) && divisible_by?(year, 4)\nend", "def leap_year? (year)\n if year % 4 == 0 || year % 100 == 0 || year % 400 == 0\n puts true\n else\n puts false\n end\nend", "def leap_year?(year)\n if year % 4 == 0 && year % 100 != 0\n true\n elsif year % 400 == 0\n true\n elsif year % 4 == 0 && year % 100 == 0 && year % 400 != 0\n false\n else\n false\n end\nend", "def leap_year?(n)\n if n%4==0 && n%100!=0\n return true\n elsif n%400==0\n return true\n else\n return false\n end\nend", "def leap_year? (year) \nif year % 4 == 0 && year % 100 != 0\nreturn true\nelsif year % 400 == 0\n\treturn true \nelsif year % 4 == 0 || year % 100 == 0 && year % 400 != 0\n\treturn false\nelsif year % 4 != 0\n\treturn false\nelse\nreturn false\t\n\tend\nend", "def leap_year?(year)\n\nif year % 4 == 0 && year % 100 != 0 \n\treturn true;\n\nelsif year % 400 == 0\n\treturn true;\n\nelsif year % 4 == 0 && year % 100 == 0 && year % 400 != 0\n\treturn false;\n\nelsif year % 4 != 0\n\treturn false;\n\n end\nend", "def leap_year?(year)\n if year%4 != 0\n return false\nelsif (year%100 != 0 || year%400 == 0)\n return true\n else\n return false\n end\n end", "def leap? year\n ((year % 4).zero? && year % 100 != 0) || (year % 400).zero?\nend", "def leap_year?(year)\n if year % 400 == 0\n true\n elsif year % 100 != 0 && year % 4 == 0\n true\n else\n false\n end\nend", "def leap_year?(year)\n if year % 400 == 0\n res = true\n elsif year % 100 == 0\n res = false\n elsif year % 4 == 0\n res = true\n else\n res = false\n end\nend", "def leap_year? (year)\n if year % 4 == 0\n if year % 100 != 0\n return true\n else\n if year % 400 == 0\n return true\n else return false\n end\n end\n else return false\n end\nend", "def leap_year?(year)\r\n if year % 4 == 0 && !(year % 100 == 0)\r\n true\r\n else\r\n year % 400 == 0\r\n end\r\nend", "def leap_year?(year)\n return true if year % 400 == 0\n return false if year % 4 == 0 && year % 100 == 0\n if year % 4 == 0\n true\n else\n false\n end\nend", "def leap?(year)\n (year % 400).zero? ||\n year % 100 != 0 && (year % 4).zero?\nend", "def leap_year?(years)\r\n\tleap = years % 4\r\n\tleaps = years % 100\r\n\tleaped = years % 400\r\n\tif leap == 0 && leaps == 0 && leaped != 0\r\n\t\treturn false\r\n\telsif leap == 0 && leaps != 0\r\n\t\treturn true\r\n\telsif leap == 0 && leaps == 0 && leaped == 0\r\n\t\treturn true\r\n\telse\r\n\t\treturn false\r\n\tend\r\nend", "def leap_year?(year)\n if (year%4 == 0) and (year%100 != 0)\n return true\n elsif (year%400 == 0)\n return true\n else\n return false\n end\nend", "def leap_year?(year)\n if year % 4 == 0\n if year % 100 == 0\n return false unless year % 400 == 0\n end\n return true\n end\n false\nend", "def leap_year?(year)\n if year % 400 == 0\n true\n elsif year % 100 == 0\n false\n else\n year % 4 == 0\n end\nend", "def leap_year?(year)\n if year % 400 == 0\n true\n elsif year % 100 == 0\n false\n else\n year % 4 == 0\n end\nend", "def leap_year?(n)\n if n.to_i % 4 == 0\n puts \"true\"\n else\n puts \"false\"\n end\nend", "def leap_year?(year)\n if year % 400 == 0\n true\n elsif year % 100 == 0\n false\n else \n year % 4 == 0\n end\nend", "def leap_year?(year)\n if year % 400 == 0\n true\n elsif year % 100 == 0\n false\n else \n year % 4 == 0\n end\nend", "def leap_year?(year)\nif (year % 4 == 0 ) && !(year % 100 == 0)\n\treturn true\nelsif (year % 400 == 0)\n\treturn true\nelsif (year % 4 == 0) && (year % 100 == 0) && !(year % 400 ==0)\n\treturn false\n\nelsif !(year % 4 ==0)\n\treturn false\n\t\nend\nend", "def leapyear?(year)\n year % 4 == 0 && year % 400 !=0\nend", "def leap_year?(year)\nif (year % 4 == 0 ) && !(year % 100 == 0)\n return true\nelsif (year % 400 == 0)\n return true\nelsif (year % 4 == 0) && (year % 100 == 0) && !(year % 400 ==0)\n return false\n\nelsif !(year % 4 ==0)\n return false\n \nend\nend", "def leap_year?(year)\n if year % 400 == 0\n true\n elsif year % 100 == 0\n false\n elsif year % 4 == 0\n true\n else\n false\n end\nend", "def leap_year?(year)\n if year % 400 == 0\n true\n elsif year % 100 == 0\n false\n elsif year % 4 == 0\n true\n else\n false\n end\nend", "def leap_year?(year)\n if year % 4 == 0 and year % 100 != 0\n return true\n elsif year % 4 ==0 and year % 400 == 0\n return true\n else\n return false\n end\nend", "def leap_year?(year)\n if year % 100 == 0\n return true if year % 400 == 0\n else\n return true if year % 4 == 0\n end\n return false\nend", "def leap_year?(year)\n if year % 4 == 0\n true\n elsif year % 100 == 0\n false\n elsif year % 400 == 0\n true\n end\nend", "def leap_year?(int)\n case \n when int % 400 == 0 && int % 100 == 0 then true\n when int % 4 == 0 && int % 100 != 0 then true\n else false\n end\nend", "def leap_year?(year)\n if year % 4 == 0\n if year % 100 ==0\n if year % 400 ==0\n return true\n end\n return false\n end\n return true\n end\n return false\nend", "def leap_year?(year)\n if year % 100 == 0 && year % 400 == 0\n true\n elsif year % 4 == 0 && !(year % 100 == 0)\n true\n else\n false\n end\nend", "def leap_year?(year)\n if year % 400 == 0\n true\n elsif year % 4 ==0 && year % 100 != 0\n true\n elsif year % 100 == 0\n false\n else\n false\n end\nend", "def leap_year_further?(year)\n if year % 4 == 0\n if year % 100 == 0\n if year % 400 == 0\n true\n else\n false\n end\n else\n if year % 4 == 0\n true\n else\n false\n end\n end\n else\n false\n end\nend", "def leap_year?(year)\n if year % 100 == 0\n false\n elsif year % 400 == 0\n true\n else\n year % 4 == 0\n end\nend", "def leap_year?(year)\n if year % 100 == 0\n false\n elsif year % 400 == 0\n true\n else\n year % 4 == 0\n end\nend", "def is_leap_year?(year)\n if year % 400 == 0 #check for full century leap yr\n true\n elsif year % 100 == 0 # exception to the 400 rule\n false\n elsif year % 4 == 0 # its a leap year\n true\n else\n false # else its not.\n end\nend", "def leap_year?(year)\n if year % 400 == 0\n return true\n elsif year % 100 == 0\n return false\n elsif year % 4 == 0\n return true\n else\n return false\n end\n end", "def leap_year?(year)\n if (year % 100 == 0)\n if (year % 400 == 0)\n true\n else\n false\n end\n elsif (year % 4 == 0)\n true\n else\n false\n end\nend", "def leap_year?(year)\n\t\n\tif year%4 == 0 && year%100 != 0\n\t\t\n\t\tp true\n\t\n\telsif year%400 == 0\n\n\t\tp true\n\n\telsif year%4 == 0 && year%100 == 0 && year%400 != 0\n\n\t\tp false\n\n\telse\n\n\t\tp false\n\t\t\t\n\tend\n\nend", "def leap_year?(year)\n if (year.to_i % 4 == 0 && year.to_i % 100 != 0) || (year.to_i % 400 == 0)\n return true\n else\n return false\n end\nend", "def leap_year?(year)\n if (year % 4) == 0\n if (year % 100) == 0\n if (year % 400) == 0\n true\n else\n false\n end\n else\n true\n end\n else\n false\n end\nend", "def leap_year?(year)\n if year % 100 == 0\n year % 400 == 0 ? true : false\n else\n year % 4 == 0 ? true : false\n end\nend", "def leap_year?(year)\n if year % 4 == 0 && year % 100 != 0\n true\n elsif year % 100 == 0 && year % 400 == 0\n true\n else\n false\n end\nend", "def isLeapYear?(year)\n if (year % 4 != 0)\n return false\n elsif (year % 100 != 0)\n return true\n elsif (year % 400 != 0)\n return false\n else\n return true\n end\nend", "def arithmetic_leap_year?(f_year = self.year)\n (f_year % 4) == 0 && \n ![100, 200, 300].include?(f_year % 400) &&\n (f_year % 4000) != 0\n end", "def leap?\n\t\t# \t#leap years are divisible by 4 but not 100 unless they are divisible by 400\n\t\tdivides_by_4? && ( does_not_divide_by_100? || divides_by_400? )\n\n\t\tdef divides_by_4?\n\t\t\t(@year % 4) == 0\n\t\tend\n\n\t\tdef does_not_divide_by_100?\n\t\t\t@year %100 != 0\n\t\tend\n\n\t\tdef divides_by_400?\n\t\t\t@year % 400 == 0\n\t\tend\n\tend", "def leap_year?(year)\n if (year % 4) == 0\n if (year % 400) == 0\n return true\n elsif (year % 100) == 0\n return false\n else\n return true\n end\n else\n return false\n end\nend", "def leap_year? year\n year % 4 == 0 && (year % 400 == 0 || year % 100 != 0)\nend", "def leap_year? (year)\n case\n when year % 400 == 0\n return true\n when year % 100 == 0\n return false\n when year % 4 == 0\n return true\n else\n return false\n end\nend", "def leap_year?(year)\n \n if year < 1752\n year % 4 == 0 \n else \n (year % 4 == 0 && year % 100 != 0) || (year % 400 == 0)\n end\n\nend", "def leap_year?(year)\n if year % 4 == 0 && year % 100 > 0\n return true\n elsif year % 100 && year % 400 == 0\n return true\n else\n return false\n end\nend", "def leap_year? year\n year % 4 == 0 && year % 100 != 0 || year % 400 == 0\nend", "def leap_year?(year)\n if year < 1752\n if year % 4 == 0 then true\n else false\n end\n elsif year % 400 == 0\n true\n elsif year % 4 == 0 && year % 100 == 0 \n false\n elsif year % 4 == 0\n true\n else false\n end\nend", "def leap_year?(year)\n if year < 1752 && year % 4 == 0\n true\n elsif year % 400 == 0\n true\n elsif year % 100 == 0\n false\n else \n year % 4 == 0\n end\nend", "def leap_year?(year)\n if year < 1752 && year % 4 == 0\n true\n elsif year % 400 == 0\n true\n elsif year % 100 == 0\n false\n else \n year % 4 == 0\n end\nend", "def is_leap_year?(year)\n return false if year % 4 != 0\n return true if year % 400 == 0\n return false if year % 100 == 0\n true\nend", "def leap_year?(yr)\n\tyr % 4 == 0 && (yr % 100 != 0 || yr % 400 == 0)\nend", "def leap_year?(year)\n \tif year % 400 == 0\n \t\treturn true\n \telsif year % 100 == 0\n \t\treturn false\n \telsif year % 4 == 0\n\t\treturn true\n\tend\n\tfalse\nend", "def leap_year?(year)\n year % 4 == 0 && year % 100 != 0 || year % 400 == 0\nend", "def leap_year?(year)\n year % 4 == 0 && year % 100 != 0 || year % 400 == 0\nend", "def leap_year?(year)\n\tif year%4 == 0\n\t\tif year%100 == 0 && year%400 != 0\n\t\t\treturn false\n\t\telse\n\t\t\treturn true\n\t\tend\n\telse\n\t\treturn false\t\t\n\tend\nend", "def leap_year?(year)\n if (year % 4 == 0)\n if (year % 100 == 0)\n if (year % 400 == 0)\n return true\n end\n return false\n end\n return true\n end\n return false\nend", "def leap_year?(year)\nyear % 4 ==0 && (year %100 !=0 || year % 400 ==0)\n\nend", "def leap_year?(year)\n\n if year%400 == 0\n\n p true\n\n elsif year%100 == 0\n\n p false\n\n elsif year%4 == 0\n\n p true\n\n else\n\n p false\n\n end\n\nend", "def leap_year?(year)\n if year < 1752 && year % 4 == 0\n true\n elsif year % 400 != 0 && year % 100 == 0\n false\n elsif year % 4 == 0\n true\n else\n false\n end\nend", "def leap_year?(year)\n\tif (year%4 == 0) and ((year%100 != 0) or (year%400 == 0))\n\t\treturn true\n\telse\n\t\treturn false\n\tend\n\nend", "def leap_year?(year)\n if year >= 1752\n if year % 400 == 0\n return true\n elsif year % 4 == 0 && !(year % 100 == 0)\n return true\n else\n false\n end\n \n elsif year % 4 == 0\n true\n else\n false\n end\nend" ]
[ "0.906098", "0.88388515", "0.87225306", "0.8712026", "0.86061156", "0.85405546", "0.8539625", "0.84958094", "0.84656996", "0.8424928", "0.8423182", "0.8422723", "0.84201634", "0.84133", "0.84062076", "0.8398416", "0.8396825", "0.8389221", "0.8377899", "0.8365543", "0.83591807", "0.83470047", "0.83457947", "0.83438313", "0.83434355", "0.8343321", "0.8343282", "0.83432376", "0.8341595", "0.83333254", "0.8328593", "0.8326218", "0.8326119", "0.83216065", "0.83191746", "0.8317761", "0.83093446", "0.8308288", "0.8306381", "0.8306203", "0.83058983", "0.83024806", "0.830088", "0.829823", "0.8297011", "0.82968974", "0.8292198", "0.8290549", "0.8288167", "0.8288167", "0.82878697", "0.82873416", "0.82873416", "0.82832724", "0.8274969", "0.82749677", "0.8271874", "0.8271874", "0.8267699", "0.82663983", "0.8266147", "0.8260078", "0.825856", "0.8258475", "0.82562506", "0.82489645", "0.8248459", "0.8248459", "0.8245955", "0.8240113", "0.82240057", "0.821897", "0.8211695", "0.82113373", "0.82104003", "0.8209574", "0.8204303", "0.82021105", "0.8201368", "0.81960064", "0.8193161", "0.81851023", "0.81846994", "0.81801945", "0.81755555", "0.8175538", "0.8170775", "0.8170775", "0.8164151", "0.8161908", "0.8160877", "0.8159846", "0.8159846", "0.815382", "0.8136", "0.8132826", "0.81327623", "0.81303376", "0.8122208", "0.8119188" ]
0.81626004
89
adds repository hash to ENC global parameters
def params_with_repositories # convert all repos to a format that puppet create_resource with yumrepo can consume repos = Hash[attached_repositories.map { |repo| [repo.to_label, format_repo(repo)] }] # adds a global parameter called repositories contain all repos params_without_repositories.merge('repositories' => repos) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def repository_params\n params.require(:repository).permit(:title, :path, :vcs_type, :weblink_to_commit, :hook_enabled, :hook_login, :hook_password)\n end", "def register(params)\n @fields = params[\"fields\"]\n @tag = params[\"tag\"]\n @key = params[\"key\"]\n @digest = OpenSSL::Digest::SHA256.new\nend", "def set_git(params)\n [:type, :source, :scm_name, :scmtoken, :scmowner,\n :scmbranch, :scmtag ].each do |repo_setting|\n params[repo_setting] = self.send(\"#{repo_setting}\") if self.send(\"#{repo_setting}\")\n end\n params\n end", "def repository_params\n params.require(:repository).permit(:name, :url, :acronym, :status, :institutional_LSID, :is_index_herbarioum_record, :created_by_id, :updated_by_id)\n end", "def secret_key_base=(_arg0); end", "def secret_key_base=(_arg0); end", "def authentication_hash=(_arg0); end", "def params=(hash); end", "def params=(hash); end", "def passwordrepository_params\n params.require(:passwordrepository).permit(:system_name, :system_password, :active, :user_name)\n end", "def update_params\n permitted = [:name, :use_ssl, (:hostname unless Repository.any?), :external_hostname].compact\n params.require(:registry).permit(permitted)\n end", "def update_params\n permitted = [:name, :use_ssl, (:hostname unless Repository.any?), :external_hostname].compact\n params.require(:registry).permit(permitted)\n end", "def store_encryption_key_sha\n self.encryption_key_sha = ENCRYPTION_KEY_SHA\n end", "def git_hub_repo_params\n params.require(:git_hub_repo).permit(:repo, :node_id, :user_id, :state, :code)\n end", "def kernel_source_params\n params.require(:kernel_source).permit(:git_repo, :git_ref)\n end", "def base_credential; end", "def repository_params\n params.require(:repository).permit(:url, :framework)\n end", "def add(hash, opts = {})\n uuid = opts[:id] || SecureRandom.uuid\n thash = Treet::Hash.new(hash)\n repos[uuid] = thash.to_repo(\"#{root}/#{uuid}\", opts.merge(:repotype => repotype))\n end", "def global_setting_params\n params.require(:global_setting).permit(:name, :url, :base64_key, :active, :region)\n end", "def repository_params\n params.require(:repository).permit(:repo_id, :name, :user, :description, :stars)\n end", "def repository_params\n params.permit(:user, :name, :private, :new_name, :description)\n end", "def default_key \n Digest::SHA1.hexdigest(\"riaque:#{name}\")\n end", "def setup\n @commit_data = {\n :id => \"a-real-deal-id-#{get_iterator}\",\n :commit_message => 'Creating Git Resource',\n :author_name => 'Collin',\n :author_email => '[email protected]',\n :attributes => \"{ \\\"title\\\" : \\\"test #{get_iterator}\\\" }\",\n :rendered => \"<h1>Test #{get_iterator}</h1>\"\n }\n @commit_sha = Regulate::Git::Interface.save(@commit_data)\n end", "def extra_fields_to_set\n {\n decrypted_secret_key: decrypted_secret_key\n }\n end", "def add_crypto_variables(crypto_salt, crypto_iterations, crypto_key)\n @crypto_salt = crypto_salt\n @crypto_iterations = crypto_iterations\n @crypto_key = crypto_key\n end", "def assign_version_params\n resource_params.first.merge!(version_author: committer)\n end", "def repository_params\n params.require(:repository).permit :profile_name, :name, :url, :public,\n :description\n end", "def snapshots_transport_auth_key=(_arg0); end", "def repository_params\n params.permit(:organization_id, :repo_id, :name)\n end", "def extension_parameters\n @extension_parameters ||= {}\n end", "def create_params\n herbarium_params.merge(\n name: \" Burbank <blah> Herbarium \",\n code: \"BH \",\n place_name: \"Burbank, California, USA\",\n email: \"[email protected]\",\n mailing_address: \"New Herbarium\\n1234 Figueroa\\nBurbank, CA, 91234\\n\\n\\n\",\n description: \"\\nSpecializes in local macrofungi. <http:blah>\\n\"\n )\n end", "def populate_hash\n self.orig_image_url_hash = Digest::SHA1.hexdigest orig_image_url\n end", "def additional_parameters\n @additional_parameters ||= {}\n end", "def repository_key\n @repository_name ? @repository_name.to_sym : derived_name.to_sym\n end", "def configuration_for_repository\n {\n \"name\" => self.configuration_name,\n \"namespace\" => self.configuration_namespace,\n \"methodConfigVersion\" => self.configuration_snapshot,\n \"methodRepoMethod\" => {\n \"methodName\" => self.name,\n \"methodNamespace\" => self.namespace,\n \"methodVersion\" => self.snapshot\n },\n \"inputs\" => self.repository_parameter_list(:inputs),\n \"outputs\" => self.repository_parameter_list(:outputs),\n \"prerequisites\" => {},\n \"rootEntityType\" => self.entity_type,\n \"deleted\" => false\n }\n end", "def home_params\n params.fetch(:crypto, {})\n end", "def params_auth_hash; end", "def git_hub_account_params\n params.require(:git_hub_account).permit(:oauth, :user_id, :state)\n end", "def repo=(_arg0); end", "def repo=(_arg0); end", "def repo=(_arg0); end", "def repo=(_arg0); end", "def repository_params\n params.require(:repository).permit(:name, :project, :repo_location, languages: [])\n end", "def git_init\n git.config('user.name', ENV['MACHINE_USER_NAME'])\n git.config('user.email', ENV['MACHINE_USER_EMAIL'])\n end", "def make_hash_repo_test_value(data = 'foo')\n data\n end", "def repository\n @repository ||= Hash.new\n end", "def params\n { version: ::EasyManageClient.configuration(profile).extension }\n end", "def common_params\n {\n magento_url: magento_url,\n magento_username: magento_username,\n magento_api_key: magento_api_key\n }\n end", "def extintor_params\n params.fetch(:extintor, {})\n end", "def repo_params\n params.require(:repo).permit(:test_commands, :test_setup_commands)\n end", "def ruby_gem_params\n params.require(:ruby_gem).permit(:name, :version, :author, :information, :project_uri, :homepage_uri)\n end", "def repository_name\n config['image']\n end", "def add_credential(source_id, tenant: user_tenant_account)\n {'id' => source_id.to_s, 'username' => 'admin', 'password' => 'smartvm', 'resource_id' => source_id.to_s, 'resource_type' => 'Endpoint', 'tenant' => tenants[tenant]['external_tenant']}\n end", "def lcb_registry_params\n params.require(:lcb_registry).permit(:visible, :summary, :val, :regex, :owner_id, :create_uid, :write_uid, :id)\n end", "def repo_params\n params.require(:repo).permit(:repo_type, :name, :api_url)\n end", "def version_attributes\n super.merge(:commit_label => commit_label)\n end", "def params_auth_hash\n params[scope].merge(ip_address: remote_ip)\n end", "def repo_params\n params.require(:repo).permit(:name, :user_id)\n end", "def initialize(*)\n super\n\n @encrypted_env_key = \"ENCRYPTED_#{env_key}\"\n @encrypted_key_path = \"#{key_path}.enc\"\n end", "def image_params\n params[:image].permit(:content).merge(repository: @repository)\n end", "def add_some_extra_params\n @params['size'] = @file.size\n @params['md5sum'] = @file.md5sum\n end", "def add_param(p)\n @install_command.add_param(p)\n end", "def mercurial_params\n params.require(:mercurial).permit(:ezii_os_global_path, :detected_app_type, :level)\n end", "def params_digest\n # return Digest::SHA1.hexdigest(params.sort.flatten.join(\"_\"))\n Digest::SHA1.hexdigest(params.to_s)\n end", "def update_signature!\n result = Overcommit::Utils.execute(\n %w[git config --local] + [signature_config_key, signature]\n )\n\n unless result.success?\n raise Overcommit::Exceptions::GitConfigError,\n \"Unable to write to local repo git config: #{result.stderr}\"\n end\n end", "def set_outgoing_hash\n \n \t\tdata = \tself.class.get_payumoney_key + \"|\" + self.id.to_s + \"|\" + self.amount.to_s + \"|\" + get_product_info + \"|\" + get_first_name + \"|\" + get_email + \"|||||\" + get_udf5 + \"||||||\" + self.class.get_payumoney_salt\n\n \t\tself.outgoing_hash = Digest::SHA512.hexdigest(data)\n \tend", "def repo_params\n params.require(:repo).permit(:name, :owner, :html_url, :description, :language, :size, :user_id, :repo_ids => [])\n end", "def params_digest\n return Digest::SHA1.hexdigest(params.sort.flatten.join(\"_\"))\n end", "def extension_parameters= new_extension_parameters\n if new_extension_parameters.respond_to? :to_hash\n @extension_parameters = new_extension_parameters.to_hash\n else\n raise TypeError,\n \"Expected Hash, got #{new_extension_parameters.class}.\"\n end\n end", "def registration_params\n hash = params.permit(:object_type, :admin_policy, :metadata_source, :rights,\n :collection, :other_id, tag: [])\n hash[:source_id] = params.require(:source_id)\n hash[:label] = params.require(:label)\n hash\n end", "def passphrase\n super\n end", "def require_master_key=(_arg0); end", "def require_master_key=(_arg0); end", "def base_params\n {\n v: PROTOCOL_VERSION,\n # Client ID\n cid: @user_id,\n # Tracking ID\n tid: TRACKING_ID,\n # Application Name\n an: APPLICATION_NAME,\n # Application Version\n av: Bolt::VERSION,\n # Anonymize IPs\n aip: true,\n # User locale\n ul: Locale.current.to_rfc,\n # Custom Dimension 1 (Operating System)\n cd1: @os\n }\n end", "def to_hash\n result = super\n result[:source_type] = :git\n result[:location] = location\n result[:commitish_type] = commitish_type\n result[:commitish] = commitish\n result\n end", "def repository_params\n params.require(:repository).permit(:title, :staff_id, :data, :uploaded, :uploaded_file_name, :uploaded_content_type, :uploaded_file_size)\n end", "def hash\n [uri, parameters, username, password, verify_mode].hash\n end", "def public_repository_params\n params.require(:public_repository).permit(:name, :url, :record_id)\n end", "def repo_cred_params\n params.require(:repo_cred).permit(:username, :token)\n end", "def hr_config_contract_params\n params.require(:hr_config_contract).permit(:code, :title, :khtitle, :description, :khdescription, :usercreate, :userupdate)\n end", "def bridgepay_config\n Hash(\n username: ::BridgePay.username,\n password: ::BridgePay.password,\n merchant_code: ::BridgePay.code,\n merchant_account: ::BridgePay.account\n ) \n end", "def set_dep(requested_version, whitelisted_repo_key, sha)\n if requested_version.include?(\"git+ssh\") # Preserve URL\n return \"#{ requested_version.split(\"#\").first }##{ sha }\"\n else\n return \"#{ whitelisted_repo_key }##{ sha }\"\n end\nend", "def strong_params\n params.require(:installer_new_app).permit(\n :repository_id,\n :label, :container_name, :host_name, :domain_name, :http_protocol,\n :icon_url, :license_label, :license_sourceurl,\n :memory, :required_memory, :recommended_memory, :license_accept,\n service_connections_attributes: [\n :publisher_namespace, :type_path,\n :create_type, :existing_service, :orphan_service ],\n environment_variables_attributes: [\n :mandatory, :immutable,\n :ask_at_build_time, :build_time_only,\n field_attributes: [\n :value, :method_name, :as,\n :label, :title,\n :horizontal, :compact,\n :left, :width, :right,\n :collection,\n :placeholder, :comment, :tooltip, :hint,\n :validate_regex, :validate_invalid_message,\n :depends_on_field, :depends_on_regex,\n :required, :read_only ] ] )\n end", "def commit_params\n params.require(:commit).permit(:sha, :description, :author, :commit_date)\n end", "def relevant_options_digest(options); end", "def salt_params\n params.require(:salt).permit(:name, :description)\n end", "def config_hash\n digest = Digest::MD5.hexdigest(\n \"#{@x}-#{@y}-#{@hires_factor}-#{@render_type}-#{@format}-#{CONVERTER_VERSION}\")\n digest\n end", "def app_environment_credential_params\n params.require(:app_environment_credential).permit(:credential_id, :app_environment_id, :encrypted_value, :text_value)\n end", "def hash\n [additional_line_item_data, allow_invoice_download, allowed_payment_method_configurations, currency, id, integrated_payment_form_enabled, language, login_name, name, payment_app_version, payment_installed, payment_proxy_path, planned_purge_date, replace_payment_method_image, shop_name, show_payment_information, show_subscription_information, space_id, space_view_id, state, subscription_app_version, subscription_installed, subscription_proxy_path, version].hash\n end", "def external_credential_params\n params.require(:external_credential).permit(:name, :description, :username, :password)\n end", "def append_api_credentials(params)\n is_param_defined(params) do\n params = Struct::ApiCredentials.new\n params.version = Struct::Version.new\n end \n if params.merchant_id == nil \n params.merchant_id = self.config.merchant_id\n end\n if params.merchant_key == nil\n params.merchant_key = self.config.merchant_key\n end\n if params.version.client == nil\n params.version.client = ZipMoney::Configuration::API_NAME + \" Version:\" + ZipMoney::Configuration::API_VERSION\n end\n if params.version.platform == nil\n params.version.platform = ZipMoney::Configuration::API_PLATFORM\n end\n params \n end", "def secret_key_base; end", "def secret_key_base; end", "def secret_key_base; end", "def secret_key_base; end", "def get_base_args(pr)\n {\n :clone => CONFIG[\"local\"][\"clone\"],\n :remote => CONFIG[\"local\"][\"remote\"],\n :org => CONFIG[\"repo\"][\"org\"],\n :repo => CONFIG[\"repo\"][\"repo\"],\n :base => pr[\"base\"][\"ref\"],\n :head => pr[\"head\"][\"ref\"],\n :user => pr[\"head\"][\"user\"][\"login\"],\n :branch => pr[\"head\"][\"user\"][\"login\"] + \"-\" + pr[\"head\"][\"ref\"]\n }\n end", "def hash(option_json)\n\t\tif(File.exist?(GROWTH::GROWTH_REPOSITORY))then\n\t\t\tg = Git.open(GROWTH::GROWTH_REPOSITORY)\n\t\t\treturn {status: \"ok\", hash: g.log()[-1].sha}\n\t\telse\n\t\t\treturn {status: \"error\", message: \"GRWOTH git repository '#{GROWTH::GROWTH_REPOSITORY}' not found \"}\n\t\tend\n\tend", "def local\n return Global.git_repository_directory + @address_digest\n end", "def ezii_delta_git_params\n params.require(:ezii_delta_git).permit(:git_commit_created_at, :github_commit_link, :git)\n end", "def ruby_application_params\n if CONFIG['GITHUB_INTEGRATION']\n params.require(:ruby_application).permit(:name)\n else\n params.require(:ruby_application).permit(:name, :filename, :gems_url)\n end\n end" ]
[ "0.6254725", "0.61494255", "0.59811825", "0.5894692", "0.58299", "0.58299", "0.5826589", "0.5807352", "0.5807352", "0.5777314", "0.56591165", "0.56591165", "0.5653479", "0.56063724", "0.56062937", "0.5558191", "0.55314964", "0.5517735", "0.5511326", "0.55107707", "0.5458556", "0.5444362", "0.5428277", "0.5427207", "0.5419122", "0.5416006", "0.54118544", "0.540711", "0.53962386", "0.53932554", "0.5391565", "0.5390122", "0.5368886", "0.5366877", "0.53564274", "0.5352519", "0.53431284", "0.5334967", "0.53346545", "0.53346545", "0.53346545", "0.53346545", "0.53207093", "0.52715826", "0.52472794", "0.524726", "0.52451575", "0.5229086", "0.52098143", "0.5209638", "0.52065074", "0.5203949", "0.5192588", "0.5185029", "0.517998", "0.51794374", "0.5178783", "0.517868", "0.51707345", "0.51663417", "0.51658183", "0.5159586", "0.5151083", "0.5151081", "0.5145817", "0.514422", "0.5143109", "0.51354796", "0.5125525", "0.510852", "0.50993234", "0.50969344", "0.50969344", "0.5096096", "0.5092603", "0.50902563", "0.50875694", "0.5085394", "0.5083882", "0.5083581", "0.50824815", "0.5081657", "0.5081015", "0.5070009", "0.5065122", "0.5064142", "0.5062528", "0.5058289", "0.50520444", "0.50470847", "0.50467557", "0.5043157", "0.5043157", "0.5043157", "0.5043157", "0.5038905", "0.5015664", "0.50108725", "0.5001901", "0.49960107" ]
0.6163446
1
product_ids from the os default and hostgroup.
def inherited_product_ids products = [] products += operatingsystem.product_ids if operatingsystem products += Content::HostgroupProduct.where(:hostgroup_id => hostgroup.path_ids).pluck(:product_id) if hostgroup_id products.uniq end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_ids products\n products.each do |k, sp|\n size= Size.find_by_name sp[:size]\n color = Color.find_by_name sp[:color]\n if !size.nil? && !color.nil?\n p= Product.where(\"reference = ? and size_id = ? and color_id = ?\", sp[:reference], size.id, color.id)[0]\n sp[:product_id] = p.id if !p.nil?\n end\n end\n end", "def product_groups\n @product_groups ||= ProductGroupProxy.new(self)\n end", "def product_locations\n case Aspera::Environment.os\n when Aspera::Environment::OS_WINDOWS; return [{\n :expected =>PRODUCT_CONNECT,\n :app_root =>File.join(ENV['LOCALAPPDATA'],'Programs','Aspera','Aspera Connect'),\n :log_root =>File.join(ENV['LOCALAPPDATA'],'Aspera','Aspera Connect','var','log'),\n :run_root =>File.join(ENV['LOCALAPPDATA'],'Aspera','Aspera Connect')\n },{\n :expected =>PRODUCT_CLI_V1,\n :app_root =>File.join('C:','Program Files','Aspera','cli'),\n :log_root =>File.join('C:','Program Files','Aspera','cli','var','log'),\n },{\n :expected =>PRODUCT_ENTSRV,\n :app_root =>File.join('C:','Program Files','Aspera','Enterprise Server'),\n :log_root =>File.join('C:','Program Files','Aspera','Enterprise Server','var','log'),\n }]\n when Aspera::Environment::OS_X; return [{\n :expected =>PRODUCT_CONNECT,\n :app_root =>File.join(Dir.home,'Applications','Aspera Connect.app'),\n :log_root =>File.join(Dir.home,'Library','Logs','Aspera_Connect'),\n :run_root =>File.join(Dir.home,'Library','Application Support','Aspera','Aspera Connect'),\n :sub_bin =>File.join('Contents','Resources'),\n },{\n :expected =>PRODUCT_CLI_V1,\n :app_root =>File.join(Dir.home,'Applications','Aspera CLI'),\n :log_root =>File.join(Dir.home,'Library','Logs','Aspera')\n },{\n :expected =>PRODUCT_ENTSRV,\n :app_root =>File.join('','Library','Aspera'),\n :log_root =>File.join(Dir.home,'Library','Logs','Aspera'),\n },{\n :expected =>PRODUCT_DRIVE,\n :app_root =>File.join('','Applications','Aspera Drive.app'),\n :log_root =>File.join(Dir.home,'Library','Logs','Aspera_Drive'),\n :sub_bin =>File.join('Contents','Resources'),\n }]\n else; return [{ # other: Linux and unix family\n :expected =>PRODUCT_CONNECT,\n :app_root =>File.join(Dir.home,'.aspera','connect'),\n :run_root =>File.join(Dir.home,'.aspera','connect')\n },{\n :expected =>PRODUCT_CLI_V1,\n :app_root =>File.join(Dir.home,'.aspera','cli'),\n },{\n :expected =>PRODUCT_ENTSRV,\n :app_root =>File.join('','opt','aspera'),\n }]\n end\n end", "def products(params = {})\n @products ||= product_ids.map do |product_id|\n client.products.find(product_id)\n end\n end", "def get_variants_used_by_rpms\n return {} if is_pdc?\n brew_build_map = HashList.new\n mappings = self.build_mappings.for_rpms\n product_version_ids, brew_build_ids, package_ids = collect_id_lists(mappings)\n\n ThreadLocal.with_thread_locals(\n :cached_restrictions => Package.prepare_cached_package_restrictions(package_ids),\n :cached_arches => Arch.prepare_cached_arches,\n :cached_files => BrewBuild.prepare_cached_files(brew_build_ids),\n :cached_listings => ProductListingCache.prepare_cached_listings(product_version_ids, brew_build_ids)\n ) do\n mappings.group_by(&:product_version).each_pair do |product_version,mappings|\n mappings.each do |mapping|\n package = mapping.package\n mapping.build_product_listing_iterator({:cache_only => true}) do |file, variant, brew_build, arch_list|\n # Some packages are restricted to push to certain dists.\n # Exclude the package if it is not pushing to the dists.\n next if (package.supported_push_types_by_variant(variant) & self.supported_push_types).empty?\n\n # If the errata is multi-product supported then we need to find the\n # mapped channels by given the product listing variant and arches.\n mapped_variants = self.supports_multiple_product_destinations? ?\n self.get_mapped_variants(product_version, package, variant, arch_list) :\n []\n brew_build_map[package].concat([variant] + mapped_variants)\n end\n end\n end\n end\n\n brew_build_map.values.each {|v| v.uniq!}\n brew_build_map\n end", "def product_property_and_property_version_ids(product_id:)\n if status[:product_properties]\n version_ids = status[:product_properties].collect do |item|\n item.select {|key, value| key.match /_version_id$/} if item[:product_id] == product_id\n end.compact\n\n version_ids\n else\n []\n end\n end", "def get_product_ids_from_config(key)\n # get product ids from configuration\n return [] if key.nil?\n\n # takes in a regexp and looks it up in the content settings\n products_hash = SETTINGS[:fusor][:content].select { |k, v| k.to_s.match(key) }\n pids = []\n products_hash.values.each do |products|\n pids.concat(products)\n end\n return pids.map { |p| p[:product_id] }.uniq\n end", "def products\n @products ||= [].tap { |prods| each { |prod| prods << prod } }\n end", "def products\n @products ||= hash[\"Products\"].map { |p| Merchant::Product.new p }\n end", "def product_properties_version_ids(product_id:)\n if status[:product_properties]\n status[:product_properties].collect do |item|\n item[:product_property_version_id] if item[:product_id] == product_id\n end.compact\n else\n []\n end\n end", "def product_ids\n new_record? ? (@products ? @products.map(&:id) : []) : self.root.products.map(&:id)\n end", "def get_ids(host)\n node_uid, site_uid, grid_uid, _tdl = host.split('.')\n cluster_uid, node_num = node_uid.split('-')\n ids = { 'node_uid' => node_uid, 'site_uid' => site_uid, 'grid_uid' => grid_uid, 'cluster_uid' => cluster_uid, 'node_num' => node_num }\n return ids\nend", "def product_list\n a = selectable_products\n l = products a\n l\n end", "def product_list\n current_user.products.order('name').collect {|p| [ p.name, p.ticket_project_id, p.id ]}\n end", "def find_products\n\n product_ids = session[:compare_products] || []\n if product_ids.length > 4\n flash[:notice] = I18n.t('compare_products.limit_is_4')\n product_ids = product_ids[0..3]\n elsif product_ids.length < 1\n flash[:error] = I18n.t('compare_products.insufficient_data')\n redirect_to \"/t/#{@taxon.permalink}\"\n end\n @products = Spree::Product.find(:all, :conditions => { :id => product_ids},\n :include => { :product_properties => :property },\n :limit => 4)\n end", "def product_ids\n new_record? ? (@products_cache ? @products_cache.map(&:id) : []) : self.products.map(&:id)\n end", "def properties_version_ids(product_id:)\n if status[:product_properties]\n status[:product_properties].collect do |item|\n item[:property_version_id] if item[:product_id] == product_id\n end.compact\n else\n []\n end\n end", "def products\n Product.find_all_by_vendor_id(@id)\n end", "def group_items\n @group_items ||= Product.includes(:pictures)\n .where(group: group.presence || '_ZZZZ_', active: true, hidden: false)\n .order(:option_sort, :option_title)\n end", "def real_variants\n Product.where(openerp_id: product_variants.collect(&:openerpid)).order('id ASC').uniq\n end", "def set_product_product_group\n @product_product_group = ProductProductGroup.find(params[:id])\n end", "def products\n @products ||= rules.of_type('Spree::Promotion::Rules::Product').map(&:products).flatten.uniq\n end", "def products\n Product.find_by_vendor(@id)\n end", "def compute_list_ids(mixins = nil)\n # TODO: impl filtering with mixins\n backend_compute_pool = ::OpenNebula::VirtualMachinePool.new(@client)\n rc = backend_compute_pool.info_all\n check_retval(rc, Backends::Errors::ResourceRetrievalError)\n\n compute = []\n backend_compute_pool.each do |backend_compute|\n compute << backend_compute['ID']\n end\n\n compute\n end", "def production_partnerships_shared_products\n#\t\tMoved to production products\n\tend", "def products\n products_this_vendor_sells = []\n products_to_check = FarMar::Product.all\n products_to_check.each do |product_to_check|\n if self.id == product_to_check.vendor_id\n products_this_vendor_sells << product_to_check\n end#of if\n end#of do\n return products_this_vendor_sells\n end", "def product_info\n {\n product_name: {\n default_value: 'similarity detection reference tool'\n },\n product_version: '1.0',\n description: {\n default_value: 'LTI 2.1 tool provider reference implementation'\n },\n product_family: {\n code: 'similarity detection reference tool',\n vendor: {\n code: 'Instructure.com',\n vendor_name: {\n default_value: 'Instructure'\n },\n description: {\n default_value: 'Canvas Learning Management System'\n }\n }\n }\n }\n end", "def get_products_selected\n raise \"expecting knowledge to be set\" unless @current_knowledge\n unless @products_selected\n @products = @current_knowledge.get_products\n if session[:product_ids_selected]\n @products_selected = @products.select { |p| session[:product_ids_selected].include?(p.id) }\n else\n session[:product_ids_selected] = (@products_selected = @products.first(5)).collect(&:id)\n end\n end\n [@products, @products_selected]\n end", "def products_list\n @products = Haiwet::Product.list\n @products.each.with_index(1) do |prod, i|\n puts \"#{i}- #{prod.name}\"\n end\n end", "def known_omnibus_projects\n # iterate through min/max versions for all product names\n # and collect the name for both versions\n projects = %w{ 0.0.0 1000.1000.1000 }.collect do |v|\n @version = v\n omnibus_project\n end\n # remove duplicates and return multiple known names or return the single\n # project name\n projects.uniq || projects\n end", "def option_types_version_ids(product_id:)\n if status[:option_types_and_values]\n status[:option_types_and_values].collect do |item|\n item[:option_type_version_id] if item[:product_id] == product_id\n end.compact\n else\n []\n end\n end", "def products\n FarMar::Product.all.select { |product| product.vendor_id == id }\n end", "def products\n @products ||= begin\n items = []\n\n for i in (1..params[\"NumItens\"].to_i)\n items << {\n :id => params[\"ProdID_#{i}\"],\n :description => params[\"ProdDescricao_#{i}\"],\n :quantity => params[\"ProdQuantidade_#{i}\"].to_i,\n :price => to_price(params[\"ProdValor_#{i}\"]),\n :shipping => to_price(params[\"ProdFrete_#{i}\"]),\n :fees => to_price(params[\"ProdExtras_#{i}\"])\n }\n end\n\n items\n end\n end", "def mk_products\n # create some test products\n @first_product = Product.create!(:name => \"Test product 1\")\n @second_products = Product.create!(:name => \"Test product 2\") \n @product_ids = Product.all.map(&:id)\n assert_equal(2,Product.all.size)\n [@first_product,@second_product]\n end", "def index\n @product_product_groups = ProductProductGroup.all\n end", "def config_item_group_and_defaults\n @host_type_item = ConfigItem.find_by_name('host_type')\n if @host_type_item\n @host_type_children = ConfigItem.find_all_by_parent_id @host_type_item.id\n end\n @config_groups = ConfigItem.find_all_by_parent_id nil\n \n @config_groups.each { |item|\n if item.name == 'defaults'\n @default_group = item\n @config_groups.delete(item)\n break\n end\n }\n if @default_group\n @default_items = ConfigItem.find_all_by_parent_id_and_configurable(@default_group.id, true)\n end\n \n end", "def os_group\n @os_group\n end", "def products\n @product_map.keys\n end", "def set_product_group\n @product_group = ProductGroup.find(params[:id])\n end", "def get_enterable_products\n return call('Product.get_enterable_products')\n end", "def products\n @products ||= rules.of_type('Promotion::Rules::Product').map(&:products).flatten.uniq\n end", "def taxons_version_ids(product_id:)\n if status[:taxons]\n status[:taxons].collect do |item|\n item[:version_id] if item[:product_id] == product_id\n end.compact\n else\n []\n end\n end", "def get_selectable_products\n return call('Product.get_selectable_products')\n end", "def product_ids=(ids)\n self.products = Product.find(:all, :conditions => [\"id IN (?)\", ids.map(&:to_i)])\n end", "def product_ids=(ids)\n self.products = Product.find(:all, :conditions => [\"id IN (?)\", ids.map(&:to_i)])\n end", "def products\n vendor_ids = vendors.collect { |vendor| vendor.vendor_id}\n CSV.read(PRODUCT_CSV).collect do |line|\n FarMar::Product.new(line) if vendor_ids.include? line[2].to_i\n end\n end", "def products(ven_id)\n FarMar::Product.all.find_all { |product| product.vendor_id == ven_id }\n end", "def products\n if params[:ids]\n arraycheck = JSON.parse(params[:ids])\n if arraycheck.length > 0\n @products = SupplierRecord.where(contractnumber: arraycheck).joins(:product).select([\"products.id\" , \"products.name AS description\"]).uniq\n end\n else\n \n end\n end", "def get_products_colletion_select\n model = self.controller_name.singularize\n collection_select model, :product_ids, Product.find(:all), :id, :name, { }, { :multiple => true, :size => '10', :style => \"width:240px\" }\n end", "def installed_identities\n available = request_valid_identities\n ids = {}\n available.split(\"\\n\").each do |current|\n begin\n sha1 = current.match(/[a-zA-Z0-9]{40}/).to_s\n name = current.match(/.*\\\"(.*)\\\"/)[1]\n ids[sha1] = name\n rescue\n nil\n end # the last line does not match\n end\n\n ids\n end", "def all_hosts_in(group)\n inventory.all_hosts_in(group).map { |i| server(i.to_sym) }\nend", "def set_products_grupo\n @products_grupo = ProductsGrupo.find(params[:id])\n end", "def get_random_product_id\r\n arr_id = []\r\n\r\n # get all id of product on catalog page\r\n catalog_product_all_results_div.each do |product|\r\n if product['id']\r\n arr_id.push(product['id']) unless has_xpath?(\"//*[@class='resultList']//div[@id='#{product['id']}']//button[@class='btn btn-block ajax btnDisbaleOutOfStock text-inherit']\", wait: 1)\r\n end\r\n end\r\n\r\n arr_id[rand(arr_id.count - 1)]\r\n end", "def platform_list; ['all','xbox-360']; end", "def get_variants_by_color(products)\n variants = []\n\n products.each do |product|\n optionType = product.option_types.select { |type| type.presentation == \"Color\" }\n optionTypeId = optionType.length > 0 ? optionType.first.id : nil\n colors = []\n\n if optionTypeId\n product.variants.each do |var|\n var.option_values.each do |opt|\n if opt.option_type_id == optionTypeId\n unless colors.include?(opt.presentation)\n isFeatured = product.property(\"featured\") && (product.property(\"featured\") == opt.presentation) ? 1 : 0\n featuredImage = nil\n mainImage = nil\n\n var.images.each do |image|\n if image.alt == \"featured\"\n featuredImage = image\n else\n mainImage = image\n end\n end\n\n if mainImage.nil?\n mainImage = var.images[0]\n end\n\n if featuredImage.nil?\n featuredImage = isFeatured ? var.images[1] : nil\n end\n\n variant = {\n \"name\" => var.name,\n \"color\" => opt.presentation,\n \"price\" => var.price,\n \"image\" => mainImage,\n \"isFeatured\" => isFeatured,\n \"featuredImage\" => featuredImage,\n \"object\" => var\n }\n\n # move the featured variant to the beginning of the array\n if (isFeatured > 0)\n variants.unshift(variant)\n else\n variants.push(variant)\n end\n \n colors.push(opt.presentation)\n end\n end\n end\n end\n else\n isFeatured = product.property(\"featured\") ? 1 : 0\n variant = {\n \"name\" => product.name,\n \"color\" => nil,\n \"price\" => product.price,\n \"image\" => product.variants[0].images[0],\n \"isFeatured\" => isFeatured,\n \"featuredImage\" => isFeatured ? product.variants[0].images[1] || nil : nil,\n \"object\" => product\n }\n variants.push(variant)\n end\n end\n\n variants\n end", "def products\n collection = []\n\n all_products = FarMar::Product.all\n all_products.each do |product|\n if product.vendor_id == self.id\n collection.push(product)\n end\n end\n\n return collection\n end", "def index\n @product_groups = ProductGroup.all\n end", "def products\n FarMar::Product.by_vendor(id)\n end", "def products\n FarMar::Product.by_vendor(id)\n end", "def generate_inventory_groups\n groups_of_groups = {}\n defined_groups = []\n group_vars = {}\n inventory_groups = \"\"\n\n # Verify if host range patterns exist and warn\n if config.groups.any? { |gm| gm.to_s[RANGE_PATTERN] }\n @machine.ui.warn(I18n.t(\"vagrant.provisioners.ansible.ansible_host_pattern_detected\"))\n end\n\n config.groups.each_pair do |gname, gmembers|\n if gname.is_a?(Symbol)\n gname = gname.to_s\n end\n\n if gmembers.is_a?(String)\n gmembers = gmembers.split(/\\s+/)\n elsif gmembers.is_a?(Hash)\n gmembers = gmembers.each.collect{ |k, v| \"#{k}=#{v}\" }\n elsif !gmembers.is_a?(Array)\n gmembers = []\n end\n\n if gname.end_with?(\":children\")\n groups_of_groups[gname] = gmembers\n defined_groups << gname.sub(/:children$/, '')\n elsif gname.end_with?(\":vars\")\n group_vars[gname] = gmembers\n else\n defined_groups << gname\n inventory_groups += \"\\n[#{gname}]\\n\"\n gmembers.each do |gm|\n # TODO : Expand and validate host range patterns\n # against @inventory_machines list before adding them\n # otherwise abort with an error message\n if gm[RANGE_PATTERN]\n inventory_groups += \"#{gm}\\n\"\n end\n inventory_groups += \"#{gm}\\n\" if @inventory_machines.include?(gm.to_sym)\n end\n end\n end\n\n defined_groups.uniq!\n groups_of_groups.each_pair do |gname, gmembers|\n inventory_groups += \"\\n[#{gname}]\\n\"\n gmembers.each do |gm|\n inventory_groups += \"#{gm}\\n\" if defined_groups.include?(gm)\n end\n end\n\n group_vars.each_pair do |gname, gmembers|\n if defined_groups.include?(gname.sub(/:vars$/, \"\")) || gname == \"all:vars\"\n inventory_groups += \"\\n[#{gname}]\\n\" + gmembers.join(\"\\n\") + \"\\n\"\n end\n end\n\n return inventory_groups\n end", "def define_group\n case new_resource.im_install_mode\n when 'admin'\n group = if new_resource.group.nil?\n 'root'\n else\n new_resource.group\n end\n group\n when 'nonAdmin', 'group'\n group = if new_resource.group.nil?\n Chef::Log.fatal \"Group not provided! Please provide the group that should be used to install your product\"\n raise \"Group not provided! Please provide the group that should be used to install your product\"\n else\n new_resource.group\n end\n group\n end\nend", "def define_group\n case new_resource.im_install_mode\n when 'admin'\n group = if new_resource.group.nil?\n 'root'\n else\n new_resource.group\n end\n group\n when 'nonAdmin', 'group'\n group = if new_resource.group.nil?\n Chef::Log.fatal \"Group not provided! Please provide the group that should be used to install your product\"\n raise \"Group not provided! Please provide the group that should be used to install your product\"\n else\n new_resource.group\n end\n group\n end\nend", "def add_default_products_to_user\r\n Product.default_include.each do |product|\r\n #ProductSupport.change_support product.id, self.id, -1\r\n ps = ProductSupport.new\r\n ps.change_support product.id, self.id, -1\r\n end\r\n end", "def set_products\n @products_ordered = Product.where(\"id = ?\", params[:product_id]) \n end", "def products\n @product = Product.includes(:images).find(params[:id])\n @products = []\n\n # Cross-Selling: derzeit noch zufällig.\n 6.times do\n offset = rand(Product.count)\n @products << Product.includes(:images).offset(offset).first\n end\n end", "def fill_in_products\n products = []\n @data.products.all_products.each_with_index do |p, i|\n prod = {}\n prod['index'] = i\n prod['qty'] = @data.product_qty[p.prd_id]\n prod['desc'] = p.prd_long_desc\n prod['price'] = \"$\" + p.fmt_total_price\n prod['prd_id'] = p.prd_id\n products << prod\n end\n \n products\n end", "def filtered_search_product_ids(filter_params)\n\n # Select ids of attributes linked to search product.\n search_product_attribute = SitescanCommon::AttributeClass\n .where(depend_link: true)\n\n # Set condition to select product attributes related to search product.\n sql = where( attributable_type: SitescanCommon::SearchProduct )\n\n ids = nil\n\n # If the filter contains one or more options.\n if filter_params[:o]\n\n # Select classs attribute ids related to the filter options.\n sr_opt_attr_ids = search_product_attribute\n .joins(:attribute_class_options)\n .where(attribute_class_options: { id: filter_params[:o] }).ids\n\n # For each class attribute select product attribute ids.\n sr_opt_attr_ids.each do |attr_id|\n\n # Select options ids related to the class attribute.\n search_product_option_ids = SitescanCommon::AttributeClassOption\n .where(attribute_class_id: attr_id, id: filter_params[:o]).ids\n\n # Select Search products ids filtered by option or list of options.\n # Options which belong to same list type attribute conjuct with\n # OR logical condition.\n sr_opt_ids = sql.joins(%{ JOIN attribute_options ao\n ON ao.id=product_attributes.value_id\n AND value_type='#{ SitescanCommon::AttributeOption.to_s }' AND\n attribute_class_option_id IN (#{search_product_option_ids.join ','})})\n .pluck :attributable_id\n\n # Attributes conjuct with AND logical condition.\n ids = if ids then ids & sr_opt_ids else sr_opt_ids end\n end\n end\n\n # If filter has nubmer attributes.\n if filter_params[:n]\n filter_numbers = search_product_attribute.ids & filter_params[:n].keys\n filter_numbers.each do |key, value|\n unless key == 0\n num_condition = []\n num_condition << 'value>=:min' if value[:min]\n num_condition << 'value<=:max' if value[:max]\n num_condition << 'attribute_class_id=:attr_cls_id'\n sr_num_ids = sql.join( %{ JOIN attribute_numbers an\n ON an.id=product_attributes.value_id\n AND value_type='#{SitescanCommon::AttributeNumber.to_s}' } )\n .where(num_condition.join ' AND ', value.merge(attr_cls_id: key))\n .pluck :attributable_id\n ids = if ids\n ids & sr_num_ids\n else\n sr_num_ids\n end\n end\n end\n end\n ids\n end", "def products_by_variant_id\n products\n .group_by { |product| product.variant_id.to_i }\n .reduce({}) do |hash, (variant_id, product_array)|\n product = product_array.first\n hash[variant_id] = ProductPresenter.new(product)\n hash\n end\n end", "def products\n Spree::Product\n .joins(variants: { line_items: { order: :order_cycle } })\n .for_order_cycle(order_cycle)\n .where(\"spree_orders.state = 'complete'\")\n .uniq\n .select(\n 'spree_products.name, ' \\\n 'spree_variants.id AS variant_id, ' \\\n 'spree_products.variant_unit, ' \\\n 'spree_products.variant_unit_name, ' \\\n 'spree_products.variant_unit_scale'\n )\n end", "def products\n @products ||= (@doc/\"Product\").collect { |it| Element.new(it) }\n end", "def get_variant_properties_from_product\n vpv = []\n self.variant_variant_property_values.each do |vvpv|\n if !vvpv.variant_property_value.nil?\n vpv << vvpv.variant_property_value.variant_property_id\n end\n end\n self.product.variant_properties.each do |vp|\n self.variant_variant_property_values.build(:variant_property_value => vp.variant_property_values.first) unless vpv.include?(vp.id)#self.variant_variant_property_values.map(&:variant_property_value_id).include?(vp.variant_property_values.first.id)\n end\n end", "def get_vendor_inventory_products\n\t\tis_unassigned_vendor = self.vendor.nil?\n\t\tif is_unassigned_vendor\n\t\t\treturn []\n\t\telse\n\t\t\t_inventory = self.vendor.inventory\n\t\t\treturn _inventory.products\n\t\tend\n\tend", "def products\n collection = []\n vendors_array = self.vendors\n\n vendors_array.each do |vendor|\n collection += FarMar::Product.by_vendor(vendor.id)\n end\n\n return collection\n end", "def admin_group_ids\n @attributes[:admin_group_ids]\n end", "def get_products(ids=[])\n ids = [ids] unless ids.is_a? Array\n return call('Product.get_products', {:ids => ids})\n end", "def all\n data = []\n if @ec2_main.settings.openstack \n conn = @ec2_main.environment.connection\n if conn != nil\n begin \n x = conn.flavors.all\n x.each do |y|\n vcpu = nil\n begin \n vcpu = y.vcpus\n rescue\n vcpu = nil \n end\n if vcpu != nil \n data.push(\"#{y.id} (#{y.name} Mem: #{y.ram}MB Disk: #{y.disk}GB VCPU: #{y.vcpus}VCPUs)\")\n else\n data.push(\"#{y.id} (#{y.name} Mem: #{y.ram}MB Disk: #{y.disk}GB)\") \n end\n end\n rescue\n puts \"ERROR: getting all flavors #{$!}\"\n end\n else \n raise \"Connection Error\" \n end \n elsif @ec2_main.settings.google \n conn = @ec2_main.environment.connection\n if conn != nil\n begin \n response = conn.list_machine_types($google_zone)\n\t\t\t if response.status == 200\n\t x = response.body['items']\n\t x.each do |r|\n\t\t\t\t data.push(\"#{r['name']} ( Mem: #{r['memoryMb']}MB Disks: #{r['maximumPersistentDisks']} Disk Size: #{r['maximumPersistentDisksSizeGb']}GB CPUs: #{r['guestCpus']})\")\n \t end\n\t else\n\t \t data = []\n end\n rescue\n puts \"ERROR: getting all flavors #{$!}\"\n end\n else \n raise \"Connection Error\" \n end \t\t\n\t else \n data.push('t1.micro (EBS only Micro 32 or 64-bit, 613 MB, up to 2 compute unit)') \n data.push('m1.small (Small 32 or 64-bit, 1.7 GB, 1 compute unit)')\n data.push('m1.medium (Medium 32 or 64-bit, 3.75 GB, 2 compute unit)')\n data.push('m1.large (Large 64-bit, 7.5 GB, 4 compute unit)')\n data.push('m1.xlarge (Extra Large 64-bit, 15 GB, 8 compute unit)')\n data.push('m3.xlarge (EBS Only Extra Large 64-bit, 15 GB, 13 compute unit)')\n data.push('m3.2xlarge (EBS Only Extra Double Large 64-bit, 30 GB, 26 compute unit)')\n data.push('m2.xlarge (High Memory Extra Large 64-bit, 17.1 GB, 6.5 compute unit)')\n data.push('m2.2xlarge (High Memory Double Extra Large 64-bit, 34.2 GB, 13 compute unit)')\n data.push('m2.4xlarge (High Memory Quadruple Large 64-bit, 68.4 GB, 26 compute unit)')\n data.push('c1.medium (Compute optimized CPU Medium 32 or 64-bit, 1.7 GB, 5 compute unit)')\n data.push('c1.xlarge (Compute optimized CPU Extra Large 64-bit, 7 GB, 20 compute unit)')\n data.push('c3.xlarge (Compute optimized Extra Large 64-bit, 3.75 GB, 7 compute unit)')\n data.push('c3.2xlarge (Compute optimized Double Extra Large 64-bit, 7 GB, 14 compute unit)')\n data.push('c3.4xlarge (Compute optimized Quadruple Large 64-bit, 15 GB, 28 compute unit)')\t\n data.push('c3.8xlarge (Compute optimized Eight Large 64-bit, 30 GB, 55 compute unit)')\n data.push('i2.xlarge\t\t (High I/O 1x800 GB SSD, 30.5 GB, 14 compute unit)')\n data.push('i2.2xlarge\t\t (High I/O 2x800 GB SSD, 61 GB, 27 compute unit)')\n data.push('i2.4xlarge\t\t (High I/O 4x800 GB SSD, 122 GB, 53 compute unit)')\n data.push('i2.8xlarge\t \t (High I/O 8x800 GB SSD, 244 GB, 104 compute unit)')\t\t \n data.push('cc1.4xlarge (Cluster Compute Quadruple Extra Large 64-bit, 23 GB, 33.5 compute unit. 10GBit network)')\n data.push('cc2.8xlarge (Cluster Compute Eight Extra Large 64-bit, 60.5 GB, 88 compute unit. 10GBit network)')\n\t\t data.push('g2.2xlarge (Cluster GPU Quadruple Extra Large 64-bit, 15 GB, 26compute unit.)') \n data.push('cg1.4xlarge (Cluster GPU Quadruple Extra Large 64-bit, 22 GB, 33.5 compute unit. 10GBit network)') \n data.push('hi1.4xlarge (High I/O Quadruple Extra Large 64-bit, 60.5 GB, 2x1024GB SSD, 35 compute unit. 10GBit network)')\n\t\t data.push('hs1.8xlarge (High I/O Quadruple Extra Large 64-bit, 117 GB, 24x2048GB SSD, 35 compute unit. 10GBit network)')\n \t\t\n end \n return data\n end", "def get_all_product_info\r\n arr_product_info = []\r\n Timeout.timeout 30 do\r\n # handle exception: execution expired. The network is sometimes slow, default_wait_time is not enough\r\n begin\r\n active = evaluate_script 'jQuery.active'\r\n active = evaluate_script 'jQuery.active' until active == 0\r\n rescue\r\n return 'The network is slow. Should optimize the network or increase the time wait'\r\n end\r\n end\r\n\r\n find('div.row.product-row')\r\n\r\n all('div.row.product-row div.catalog-product').each do |product|\r\n within product do\r\n all_divs = all('div')\r\n id = all_divs[0][:id]\r\n div_index = 0\r\n\r\n if id.include? 'monetate'\r\n id = all_divs[2][:id]\r\n div_index = 2\r\n end\r\n\r\n current_div = all_divs[div_index]\r\n next_div = all_divs[div_index + 1]\r\n age_start = current_div['data-ga-prod-agestart']\r\n age_end = current_div['data-ga-prod-ageend']\r\n platforms = current_div['data-ga-prod-platforms']\r\n\r\n within next_div do\r\n title = find('p.heading a').text\r\n prices = all('div.product-availability p.prices span.single.price')\r\n price = prices.last.text\r\n arr_product_info.push(id: id, title: title, price: price, agestart: age_start, ageend: age_end, platforms: platforms)\r\n end\r\n end\r\n end\r\n\r\n arr_product_info\r\n end", "def instance_ids\n @instance_ids ||= groups.map { |gr| gr.instances.map { |i| i.instance_id } }.flatten\n end", "def set_product\n if !@products\n @products = Product.all\n end\n price_ranges = [ {to: 10 }, {from: 10.01, to: 20 }, {from: 20.01, to: 30 }, {from: 30.01}]\n if !@producss\n @producss = Product.search \"*\", aggs: {price: {ranges: price_ranges}, category_id: {}, condition_id: {}, date: {}}\n end\n if !@producs\n @producs = Product.search(params.fetch(:name, \"*\")).to_a\n end\n end", "def index\n @group_products = GroupProduct.all\n end", "def production_id\n production = []\n adverts.each do |content_break|\n production << content_break.map do |advert|\n advert.match(/prodid=(.*?)\\//)[1]\n end\n end\n production\n end", "def get_hostids_by_group(name)\n hostids = []\n hosts = get_hosts_by_group(name)\n if hosts == nil\n return nil\n else\n hosts.each { |host| hostids.push(host[\"hostid\"].to_i) }\n return hostids\n end\n end", "def build_product(old_prod, all_overrides, override_classes)\n prod = Api::Product.new\n old_prod.instance_variables\n .reject { |o| o == :@objects }.each do |var_name|\n if (all_overrides['product'] || {})[var_name]\n prod.instance_variable_set(var_name, all_overrides['product'][var_name])\n else\n prod.instance_variable_set(var_name, old_prod.instance_variable_get(var_name))\n end\n end\n prod.instance_variable_set('@objects',\n old_prod.objects\n .map do |o|\n build_resource(o, all_overrides[o.name],\n override_classes)\n end)\n prod\n end", "def group_ids\n @attributes[:group_ids]\n end", "def group_ids\n @attributes[:group_ids]\n end", "def group_ids\n @attributes[:group_ids]\n end", "def products\n FarMar::Product.by_vendor(self.id)\n end", "def provision_groups\n @attributes[:provision_groups]\n end", "def current_user_group_ids\n container.current_user_group_ids\n end", "def product_product_group_params\n params.require(:product_product_group).permit(:product_id, :product_group_id)\n end", "def sg_get_random_product_id(duplicate_item = nil)\n arr_id = []\n\n # get all id of product on catalog page\n product_list_div.each do |product|\n arr_id.push(product['id'])\n end\n\n # Remove duplicate items\n arr_id.delete(duplicate_item) unless duplicate_item.nil?\n\n # return random product id\n arr_id[rand(arr_id.count - 1)]\n end", "def sg_get_random_product_id(duplicate_item = nil)\n arr_id = []\n\n # get all id of product on catalog page\n product_list_div.each do |product|\n arr_id.push(product['id'])\n end\n\n # Remove duplicate items\n arr_id.delete(duplicate_item) unless duplicate_item.nil?\n\n # return random product id\n arr_id[rand(arr_id.count - 1)]\n end", "def shopping_products items\n\titems.select do |item|\n\t\titem['kind']=='shopping#product'\n\tend\nend", "def create_default_products(company, user, income_account_id, expense_account_id)\n # Creating sales product\n sales_product = create!(:company_id => company, :created_by => user, :name => \"Consultation\",\n :description => \"This is system generated default non-inventory product for sales\",\n :type => \"SalesItem\", :sales_price => 150, :income_account_id => income_account_id, :inventory => 0 )\n # Creating purchase product\n purchase_product = create!(:company_id => company, :created_by => user, :name => \"Purchase product\",\n :description => \"This is system generated default product for purchase\",\n :type => \"PurchaseItem\", :purchase_price => 90, :expense_account_id => expense_account_id, :inventory => 1)\n # Creating reseller product\n reseller_product = create!(:company_id => company, :created_by => user, :name => \"iPhone5 (demo)\",\n :description => \"This is system generated default product for trading\",\n :type => \"ResellerItem\", :sales_price => 46000, :income_account_id => income_account_id,\n :purchase_price => 45000, :expense_account_id => expense_account_id, :inventory => 1)\n warehouse = Warehouse.default_warehouse(company)\n unless warehouse.blank?\n Stock.create!(:company_id => company, :product_id => purchase_product.id,\n :warehouse_id => warehouse.id, :quantity => 10)\n Stock.create!(:company_id => company, :product_id => reseller_product.id,\n :warehouse_id => warehouse.id, :quantity => 10)\n end\n end", "def group_ebs_volumes\n @group_ebs_volumes ||= Hash[ebs_groups.map do |group_name|\n vols = ebs_volumes.select { |vol| vol.group == group_name}\n [group_name, EbsGroupConfig.new(group_name).populate!(vols)]\n end]\n end", "def ensure_variant_and_color_code\n flush_variants\n if product_code\n variant_ids = ProductProduct.search([['default_code', 'ilike', \"FN-#{product_code.split(\"-\")[0]}\"]])\n variant_codes = ProductProduct.find(variant_ids).collect(&:default_code)\n\n variant_codes.each_with_index do |code, index|\n product_variants.create(openerpid: variant_ids[index], \n default_code: code,\n color_code_id: ensure_color_code(code)) \n end\n end\n end", "def pending_test_prod_tr_ids\r\n prods = Product.find(1, 2)\r\n assert_equal 2, prods.length\r\n assert_equal \"first-product\", prods[0].code\r\n assert_equal \"second-product\", prods[1].code\r\n assert_equal \"these are the specs for the first product\",\r\n prods[0].specs \r\n assert_equal \"This is a description of the first product\",\r\n prods[0].description \r\n assert_equal \"these are the specs for the second product\",\r\n prods[1].specs\r\n end", "def group_ids\n @group_ids ||= current_user.group_ids\n end", "def current_user_group_ids\n @current_user_group_ids ||= Manage::Group.group_ids_for current_user\n end", "def set_group_product\n @group_product = GroupProduct.find(params[:id])\n end" ]
[ "0.61072475", "0.6084616", "0.60518456", "0.5812594", "0.57197446", "0.57166237", "0.5677012", "0.5660227", "0.563267", "0.5613932", "0.5595846", "0.55925477", "0.55900997", "0.5577285", "0.5557805", "0.555591", "0.554748", "0.55396104", "0.55387276", "0.5515468", "0.54772526", "0.5474971", "0.5473904", "0.5462161", "0.54493994", "0.5442", "0.54356647", "0.54104215", "0.54081583", "0.53964937", "0.5392482", "0.53882265", "0.53687507", "0.5366174", "0.53603923", "0.5336252", "0.53357315", "0.5320833", "0.5318366", "0.5315171", "0.5313021", "0.5312696", "0.53015816", "0.5285989", "0.5285989", "0.52648187", "0.5254535", "0.52541685", "0.5251039", "0.5243175", "0.52335215", "0.5224973", "0.5214395", "0.5194139", "0.517446", "0.5152496", "0.5134684", "0.51237136", "0.51237136", "0.51235545", "0.5121582", "0.5121582", "0.51137507", "0.5113684", "0.51125234", "0.5107429", "0.51073164", "0.51014817", "0.5098209", "0.50973105", "0.50972444", "0.509478", "0.5080294", "0.5078985", "0.5075985", "0.5073473", "0.5070583", "0.50640285", "0.505588", "0.5055087", "0.5048766", "0.50393623", "0.5025502", "0.5024313", "0.5024313", "0.5024313", "0.50211376", "0.5015541", "0.5013509", "0.5012777", "0.5007581", "0.5007581", "0.50036895", "0.49951634", "0.49941683", "0.49925578", "0.4990193", "0.49872774", "0.4982582", "0.49702945" ]
0.7419128
0
convert a repository to a format that puppet create_resource with yumrepo can consume
def format_repo repo { 'baseurl' => repo.full_path, # yum repos have descr field but no name, if descr is empty use the repo name 'descr' => repo.description.blank? ? repo.name : repo.description, 'enabled' => repo.enabled ? '1' : '0', 'gpgcheck' => !!repo.gpg_key ? '1' : '0' } end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_repository_template(name)\n %Q(\n @prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>.\n @prefix rep: <http://www.openrdf.org/config/repository#>.\n @prefix sr: <http://www.openrdf.org/config/repository/sail#>.\n @prefix sail: <http://www.openrdf.org/config/sail#>.\n @prefix sys: <http://www.openrdf.org/config/repository#> .\n @prefix ns: <http://www.openrdf.org/config/sail/native#>.\n\n _:#{name}{\n [] a rep:Repository ;\n rep:repositoryID \"#{name}\" ;\n rdfs:label \"Metadata and Annotations for #{name} collection\" ;\n rep:repositoryImpl [\n rep:repositoryType \"openrdf:SailRepository\" ;\n sr:sailImpl [\n sail:sailType \"openrdf:NativeStore\" ;\n ns:tripleIndexes \"spoc,posc\"\n ]\n ].\n }\n {\n _:#{name} a sys:RepositoryContext .\n }\n )\n end", "def primary_repo_transforms\n primary_repo_base_dir = File.expand_path(\n File.join(\n config.base_dir(:rtfile_dir),\n config.setting(:relative_path_to_primary_repo)\n )\n )\n\n {\n :base_dir => {\n :from => config.base_dir(:rtfile_dir),\n :to => primary_repo_base_dir + '/',\n },\n :language_code => {\n :from => config.setting(:language_code_3_chars),\n :to => config.setting(:primary_repo_lang_code),\n }\n }\n end", "def build_repo(uri, distribution, components, trusted, arch, add_deb_src)\n components = components.join(' ') if components.respond_to?(:join)\n repo_options = []\n repo_options << \"arch=#{arch}\" if arch\n repo_options << 'trusted=yes' if trusted\n repo_options = '[' + repo_options.join(' ') + ']' unless repo_options.empty?\n repo_info = \"#{uri} #{distribution} #{components}\\n\"\n repo_info = \"#{repo_options} #{repo_info}\" unless repo_options.empty?\n repo = \"deb #{repo_info}\"\n repo << \"deb-src #{repo_info}\" if add_deb_src\n repo\nend", "def format_repo(data)\n Cinch::Formatting.format(:green, \"#{data[\"repository\"][\"name\"]}\")\n end", "def format(payload)\n if(payload.get(:data, :repository_generator, :generated))\n payload.set(:data, :repository_publisher, :repositories,\n payload.get(:data, :repository_generator, :generated)\n )\n end\n if(payload.get(:data, :repository_generator, :package_assets))\n payload.set(:data, :repository_publisher, :package_assets,\n payload.get(:data, :repository_generator, :package_assets)\n )\n end\n end", "def get_repo_content(type, username, repo_name) # :yields: String\n case type\n\n when Api_options::REPO::LANGUAGES\n \"#{BASE_URL}\" + \"#{REPOS}\" + \"#{username}/\" + \"#{repo_name}/languages\"\n when Api_options::REPO::CONTRIBUTORS\n BASE_URL + REPOS + \"#{username}\" + \"/\" + \"#{repo_name}\" + \"/\" + \"contributors\"\n when Api_options::REPO::README\n BASE_URL + REPOS + \"#{username}\" + \"/\" + \"#{repo_name}\" + \"/\" + \"readme\"\n end\n end", "def read_repository_info\n repository_url = File.open(@repository_conf).gets.chomp\n name = repository_url.split(\"/\").last.split(\".\").first\n return {\n :url => repository_url,\n :name => name,\n :container_prefix => container_prefix(name),\n :path => \"#{@work_dir}/#{name}\",\n }.freeze\n end", "def repository\n if ladnn?\n ['University of California, Los Angeles. Library. Department of Special Collections']\n else\n # Replace marc codes with double dashes and no surrounding spaces\n map_field(:repository)&.map { |a| a.gsub(/ \\$[a-z] /, ' ') }\n end\n end", "def get_repo(repo_id)\n response=client.extensions.repository.retrieve_with_details(repo_id)\n code=response.code\n body=response.body\n case code\n when 200\n repo=JSON.parse(body.to_json)\n type = repo[\"notes\"][\"_repo-type\"]\n #puts repos\n repo_data=nil\n case type\n when REPO_TYPE_RPM\n yum_distributor = repo[\"distributors\"].select{ |d| d[\"distributor_type_id\"] == 'yum_distributor'}[0]\n yum_importer = repo[\"distributors\"].select{ |d| d[\"distributor_type_id\"] == 'yum_importer'}[0]\n distributor = nil\n if yum_distributor\n distributor = {\n :auto_publish => yum_distributor[\"auto_publish\"],\n :last_publish => yum_distributor[\"last_publish\"],\n :config => yum_distributor[\"config\"]\n }\n end\n importer = nil\n if yum_importer\n importer = {\n :last_sync => yum_importer[\"last_sync\"],\n :config => yum_importer[\"config\"]\n }\n end\n\n repo_data={\n :id => repo[\"id\"],\n :name => repo[\"display_name\"],\n :description => repo[\"description\"],\n :content_unit_counts => repo[\"content_unit_counts\"],\n :type => REPO_TYPE_RPM,\n :last_unit_removed => repo[\"last_unit_removed\"],\n :last_unit_added => repo[\"last_unit_added\"],\n :distributor => distributor,\n :importer => importer,\n }\n #puts repos\n when REPO_TYPE_PUPPET\n puppet_distributor = repo[\"distributors\"].select{ |d| d[\"distributor_type_id\"] == 'puppet_distributor'}[0]\n distributor =nil\n if puppet_distributor\n distributor = {\n :auto_publish => puppet_distributor[\"auto_publish\"],\n :last_publish => puppet_distributor[\"last_publish\"],\n :config => puppet_distributor[\"config\"]\n }\n end\n repo_data={\n :id => repo[\"id\"],\n :name => repo[\"display_name\"],\n :description => repo[\"description\"],\n :content_unit_counts => repo[\"content_unit_counts\"],\n :type => REPO_TYPE_PUPPET,\n :last_unit_removed => repo[\"last_unit_removed\"],\n :last_unit_added => repo[\"last_unit_added\"],\n :distributor => distributor\n }\n else\n end\n repo_data\n else\n raise \"Exception: cannot get repository detail: response code :#{code}\"\n end\n end", "def generate_repository_data_from_Puppetfile\n repositories = []\n\n # Open the Puppetfile\n File.open(@options[:puppetfile], 'r') do |fh|\n while (line = fh.gets) != nil\n # Skip blank lines, comments, anything that looks like a forge module\n next if line =~ Skipall_Regex\n next if Module_Regex.match(line)\n # When we see /mod 'modulename',/ it is possibly a properly formatted fixture\n if Repository_Regex.match(line)\n complete = false\n name = Regexp.last_match(1)\n while (line = fh.gets) != nil\n next if line =~ Skipall_Regex\n if Location_Only_Regex.match(line)\n # The Puppetfile may specify just a location /:git => 'https://github.com/author/puppet-modulename'/\n # We do not validate the URI protocol, just that it is a valid URI\n location = Regexp.last_match(1)\n puts \"Found module #{name} with location #{location}\" if @options[:debug]\n unless location.match(URI.regexp)\n puts \"#{location} is not a valid URI, skipping this repo\" if @options[:debug]\n break\n end\n repositories << {name: name, location: location}\n complete = true\n elsif Location_Plus_Regex.match(line)\n # Or it may provide more, with a trailing comma\n # :git => 'https://github.com/author/puppet-modulename',\n # :ref => '1.0.0'\n location = Regexp.last_match(1)\n while (line = fh.gets) != nil\n next if line =~ Skipall_Regex\n if Type_ID_Regex.match(line)\n type = Regexp.last_match(1)\n id = Regexp.last_match(2)\n puts \"Found module #{name} with location #{location}, #{type} of #{id}\" if @options[:debug]\n unless location.match(URI.regexp)\n puts \"#{location} is not a valid URI, skipping this repo\" if @options[:debug]\n break\n end\n repositories << {name: name, location: location, type: type, id: id}\n complete = true\n else\n # If the :git line ends with a comma but no type/ID is found, ignore it, we cannot properly determine the fixture\n puts \"Found module #{name} at location #{location}. Expected type/ID information but did not find any, skipping.\" if @options[:debug]\n complete = true\n end\n break if complete\n end\n else\n # If the /mod 'modulename',/ line is not followed with a :git string, ignore it, we cannot properly determine the fixture\n puts \"Found a reference to module #{name} but no location (:git) was provided, skipping.\" if @options[:debug]\n complete = true\n end\n break if complete\n end\n end\n end\n end\n\n repositories\n end", "def repository_to_repo_url(repository)\n return if !repository.is_a?(Array) and !repository.is_a?(Hash)\n\n # NPM allows both a single hash (with a `url` and a optional `type`) or\n # an array of hashes. Uniform to always be an array\n repository = [repository] unless repository.is_a? Array\n\n # Reject all that don't have a URL property or that point to a non-github repository\n repository = repository.map(&:with_indifferent_access).find do |repo|\n repo[:url].present? and\n repo[:url].include?(\"github\")\n end\n\n repository.try(:[], :url)\n end", "def parse_repo\n if raw_event.has_key?('repository')\n @repo = parse_field('repository', %w{id url name})\n else\n @repo = parse_field('repo', %w{id url name})\n end\n end", "def metadata(repository)\n end_date = DateTime.strptime(@end_date || @start_date, \"%Y-%m-%d\")\n friendly_end_date = end_date.strftime(\"%B %d, %y\")\n\n {\n sanitized_repository: repository.tr(\"/\", \"-\"),\n repository: repository,\n date: end_date,\n friendly_date: friendly_end_date,\n }\n end", "def to_resolver_module\n require 'puppetfile-resolver'\n\n PuppetfileResolver::Puppetfile::GitModule.new(name).tap do |mod|\n mod.remote = @git\n mod.ref = sha\n end\n end", "def get_raw_repository_images\n # Docker image object inspection:\n # Docker::Image { :id => ae0d36c75a1deac924ef426162f4356118a780140c709e16bbb6d4aa435c8d34, :info => {\"ParentId\"=>\"ae9aedc2812918e2f5bc80d17de557de0f9ed18e1f10cc3609b10c0e1c1a24d2\", \"RepoTags\"=>[\"docker.cucloud.net/rais/pidash-uxwork-zach:e40c5a1\"], \"RepoDigests\"=>[], \"Created\"=>1438632097, \"Size\"=>6202783, \"VirtualSize\"=>486874949, \"Labels\"=>{}, \"id\"=>\"ae0d36c75a1deac924ef426162f4356118a780140c709e16bbb6d4aa435c8d34\"}, :connection => Docker::Connection { :url => unix:///, :options => {:socket=>\"/var/run/docker.sock\"} } }\n\n #https://github.com/bkeepers/dotenv\n images = Docker::Image.all\n\n repo_images = []\n\n images.each do |image|\n # check to see if this image is tagged with the local repository (can have multiple tags)\n # note - this could probably be improved with some enumerable magic - revisit it\n include_image = false\n image.info[\"RepoTags\"].each do |tag|\n if tag[0..@repository_name.length] == @repository_name + \":\"\n include_image = true\n end\n end\n\n if include_image\n repo_images << image\n end\n end\n\n return repo_images\n\n end", "def repo_to_hash(repo)\n { :path => repo.path }\n end", "def repo_to_hash(repo)\n { :path => repo.path }\n end", "def format(payload)\n if(payload[:status].to_s != 'error' && payload.get(:data, :package_builder, :name))\n payload.set(:data, :github_kit, :release,\n Smash.new(\n :repository => [\n payload.get(:data, :code_fetcher, :info, :owner),\n payload.get(:data, :code_fetcher, :info, :name)\n ].join('/'),\n :reference => payload.get(:data, :code_fetcher, :info, :commit_sha),\n :tag_name => payload.get(:data, :package_builder, :version),\n :name => [\n payload.get(:data, :package_builder, :name),\n payload.get(:data, :package_builder, :version)\n ].join('-'),\n :prerelease => prerelease?(payload.get(:data, :package_builder, :version)),\n :body => \"Release - #{payload.get(:data, :package_builder, :name)} <#{payload.get(:data, :package_builder, :version)}>\",\n :assets => payload.get(:data, :package_builder, :keys)\n )\n )\n end\n end", "def monitoring_repository(action)\n # call with :add or :remote, defaults to :add\n action ||= 'add'\n\n case node['platform_family']\n when 'debian'\n apt_repository 'rackspace_monitoring' do\n uri \"https://stable.packages.cloudmonitoring.rackspace.com/#{node['platform']}-#{node['lsb']['release']}-x86_64\"\n distribution 'cloudmonitoring'\n components ['main']\n key \"https://monitoring.api.rackspacecloud.com/pki/agent/#{node['platform']}-#{node['platform_version'][0]}.asc\"\n action action\n end\n when 'rhel'\n yum_repository 'rackspace_monitoring' do\n description 'Rackspace Monitoring Agent package repository'\n baseurl \"https://stable.packages.cloudmonitoring.rackspace.com/#{node['platform']}-#{node['platform_version'][0]}-x86_64\"\n gpgkey \"https://monitoring.api.rackspacecloud.com/pki/agent/#{node['platform']}-#{node['platform_version'][0]}.asc\"\n enabled true\n gpgcheck true\n action action\n end\n end\nend", "def convert_to_gitolite_format\n @all_rewind_identifiers = @rewind + @rewind_deploy\n @all_read_identifiers = @read + @read_deploy\n permissions = {}\n permissions['RW+'] = { '' => @all_rewind_identifiers.uniq.sort } unless @all_rewind_identifiers.empty?\n permissions['RW'] = { '' => @write.uniq.sort } unless @write.empty?\n permissions['R'] = { '' => @all_read_identifiers.uniq.sort } unless @all_read_identifiers.empty?\n\n [permissions]\n end", "def to_s\n \"#<DataMapper::Repository:#{@name}>\"\n end", "def repo; end", "def repo; end", "def repo; end", "def repo; end", "def get_repo_details\n user = User.find_by(uuid: params[:uuid])\n client = Octokit::Client.new(:access_token => user.password)\n repo_map = {}\n client.repository(:user => user.gh_username, :repo => params[:repo_name]).each { |detail|\n repo_map[detail[0]] = detail[1]\n }\n repo_map['languages'] = client.languages(:user => user.gh_username, :repo => params[:repo_name]).map{ |langArray|\n langArray[0]\n }\n render :json => repo_map\n end", "def to_s\n \"#{@timestamp}/#{@repo_name}/#{@sha}\"\n end", "def repository_name\n config['image']\n end", "def parse_repo\n matches = @source_url.match @github_regexp\n return unless matches\n owner = matches[:owner]\n name = matches[:name]\n \"#{owner}/#{name}\"\n end", "def yum_repo_url(base_url)\n \"#{base_url}/#{new_resource.version}/#{yum_repo_platform_family_string}/#{yum_repo_platform_string}\"\n end", "def description_for_repo(repo)\n repo.description\n end", "def repo\n @attributes[:repo]\n end", "def repo\n @attributes[:repo]\n end", "def parse_git_repo\n git_output = `git status --porcelain 2>&1`\n unless $?.exitstatus == 0\n puts \"Git error: make sure the current directory is a valid repo and that git is working.\"\n exit!\n end\n list = git_output.split(\"\\n\")\n # Get just what was added, modified, copied or moved.\n # Skip deleted files.\n files = list.reduce([]) do |a, f|\n file = /[\\?MARC ]{1,2} (.*\\.pp)/.match f\n a << file[1] if file\n a\n end\n files.uniq! # remove dupes, just in case.\n files.each { |f| PuppetFile.new(File.expand_path(f)) }\nend", "def repository\n digital_object.repository\n end", "def create_ips_repo\n shellout!(\"pkgrepo create #{repo_dir}\")\n log.info(log_key) { \"Created IPS repo: #{repo_dir}\" }\n end", "def repo\n @repository\n end", "def lfs_http_url_to_repo(_operation = nil)\n http_url_to_repo\n end", "def yum_repo_url(base_url)\n \"#{base_url}/#{new_resource.version}/#{yum_repo_platform_family_string}/#{yum_repo_platform_string}\"\n end", "def converter_factory(cmdline)\n if cmdline.args[:gitRepoRoot]\n Giblog.logger.info { \"User asked to parse a git repo\" }\n GitRepoConverter.new(cmdline.args)\n else\n FileTreeConverter.new(cmdline.args)\n end\n end", "def create_repository_from_template(repo, name, options = {})\n options.merge! :name => name\n options = ensure_api_media_type(:template_repositories, options)\n post \"#{Repository.path repo}/generate\", options\n end", "def repository_base_url(result)\n if result['slug'] && AppConfig[:use_human_readable_urls]\n url = \"repositories/\" + result['slug']\n else\n url = result['uri']\n end\n\n return url\n end", "def get_repository_name(fedora_obj)\n return (get_coll_field_val(fedora_obj, :repository))\n end", "def manifest\n repo_name = url_path[0] == \"/\" ? url_path[1..-1] : url_path\n\n GithubRepositoryResource.new(repo_name, @client).to_h\n end", "def canonicalize_repo(repo)\n # if they fully-qualified it, we're good\n return repo if repo.start_with?('http', 'git@')\n\n # otherwise, ti's a shortname\n cr = \"git@#{@ghhost || 'github.com'}:#{repo}.git\"\n SugarJar::Log.debug(\"canonicalized #{repo} to #{cr}\")\n cr\n end", "def github_repo(node)\n node['delivery']['config']['build_attributes']['publish']['github']\n end", "def to_s\n @index.inject(\"<CartridgeRepository:\\n\") do |memo, (name, sw_hash)|\n sw_hash.inject(memo) do |memo, (sw_ver, cart_hash)|\n cart_hash.inject(memo) do |memo, (cart_ver, cartridge)|\n memo << \"(#{name}, #{sw_ver}, #{cart_ver}): \" << cartridge.to_s << \"\\n\"\n end\n end\n end << '>'\n end", "def convert25(resource)\n if resource.class == Puppet::Resource::Catalog\n resource.edges.each do |b|\n convert25(b)\n end\n elsif resource.class == Puppet::Relationship and resource.target.class == Puppet::Resource and resource.target.title != nil and resource.target.file != nil\n target = resource.target\n manifestfile = target.file.gsub(\"/etc/puppet/manifests/\", \"\")\n\n resource = {:type => target.type,\n :title => target.title,\n :parameters => {}}\n\n target.each do |param, value|\n resource[:parameters][param] = value\n end\n\n if resource[:parameters].include?(:content)\n resource[:parameters][:content] = Digest::MD5.hexdigest(resource[:parameters][:content])\n end\n\n resource[:resource_id] = \"#{target.type.downcase}[#{target.title}]\"\n @resources << resource\n end\nend", "def repo_to_uri(repo)\n\tif @bigboy.key?(repo.to_sym)\n\t\tif @bigboy[repo.to_sym].is_a?(Array)\n\t\t\tputs \"\\nThe URIs for \" + \"#{ARGV[0]}\".red + \" are: \"\n\t\t\t@bigboy[repo.to_sym].each { |n| puts n.green }\n\t\telse\n\t\t\tputs \"\\nThe URI for \" + \"#{ARGV[0]} \".red + \"is \" + \" #{@bigboy[repo.to_sym]}\".green\n\t\tend\n\telse\n\t\tputs \"\\nSorry, I can't find that repo.\".magenta\n\tend\nend", "def repo\n @repo\n end", "def repository_fields\n client.repo('git/git').fields\n end", "def repo\n @repo ||= OctocatHerder::Repository.new(@raw['repo'], connection)\n end", "def get_repo_list\n Chef::Log.debug(\n \"Fetching all versions of #{new_resource.module_name} \" +\n \"from #{new_resource.repository}.\",\n )\n latest = powershell_out!(\n <<-EOH,\n $splat = @{\n Name = \"#{new_resource.module_name}\"\n Repository = \"#{new_resource.repository}\"\n AllVersions = $True\n }\n (Find-Module @splat).Version.ForEach({$_.ToString()})\n EOH\n ).stdout.to_s.chomp.split(\"\\r\\n\")\n Chef::Log.debug(\"Available versions: #{latest.join(', ')}\")\n\n return latest.map { |v| Gem::Version.new(v) }\n end", "def yum_repo_platform_string\n platform = platform?('fedora') ? 'fedora' : 'rhel'\n release = platform?('amazon') ? '7' : '$releasever'\n \"#{platform}-#{release}-$basearch\"\n end", "def uri_to_repo(uri)\n\tparsed = URI.parse(uri)\n\tnew_uri = parsed.to_s\n\tnew_uri.gsub!(\"https\", \"http\") if parsed.scheme == \"https\"\n\tnew_uri.chop! if new_uri[-1] == \"/\"\n\tif @bigboy.value?(new_uri)\n\t\tputs \"\\nThe repo for \" + \"#{ARGV[0]} \".green + \"is \" + \"#{@bigboy.key(new_uri)}\".red\n\telsif @bigboy.values.flatten.include?(new_uri)\n\t\tarrays = @bigboy.values.select { |n| n.is_a?(Array) }\n\t\tmatches = arrays.select { |n| n.include?(new_uri) }\n\t\tputs \"\\nThe repo for \" + \"#{ARGV[0]} \".green + \"is \" + \"#{@bigboy.key(matches.flatten)}\".red\n\telse \n\t\tputs \"\\nSorry, I can't find that URI.\".magenta\n\tend\nend", "def repo_root; end", "def repository(uri, client)\n repo = client.repo uri\n repo\n rescue Octokit::InvalidRepository\n raise gem.to_s + ' didnt have github repo'\n end", "def yum_repo_platform_string\n platform = platform?('fedora') ? 'fedora' : 'rhel'\n release = platform?('amazon') ? '6' : '$releasever'\n \"#{platform}-#{release}-$basearch\"\n end", "def source_repo\n \"#{@user}/#{@repo}\"\n end", "def id_for_repo(repo)\n \"#{repo.url}##{repo.version}\"\n end", "def normalized_repos\n v = config[:repos]\n if not v.has_key?(:CRAN)\n v[:CRAN] = config[:cran]\n end\n # If the version is less than 3.2 we need to use http repositories\n if r_version_less_than('3.2')\n v.each {|_, url| url.sub!(/^https:/, \"http:\")}\n config[:bioc].sub!(/^https:/, \"http:\")\n end\n v\n end", "def repo!\n read_attribute :repo\n end", "def do_add_repo(repository,mirror_enabled,mirror_url,mirror_ca_cert)\n name = repository['name']\n url = repository['url']\n if !mirror_enabled\n cmd = \"helm repo add #{name} #{url} \"\n else\n if mirror_ca_cert == \"\"\n cmd = \"helm repo add #{name} #{mirror_url} \"\n else\n cmd = \"helm repo add --ca-file=/var/vcap/store/action/config/mirror_ca_cert.pem #{name} #{mirror_url} \"\n end\n end\n \"#{cmd};helm repo update\"\nend", "def image(image = nil)\n if image\n if image.include?('/')\n # pathological case, a ':' may be present which starts the 'port'\n # part of the image name and not a tag. example: 'host:1337/blah'\n # fortunately, tags are only found in the 'basename' part of image\n # so we can split on '/' and rebuild once the tag has been parsed.\n dirname, _, basename = image.rpartition('/')\n r, t = basename.split(':', 2)\n r = [dirname, r].join('/')\n else\n # normal case, the ':' starts the tag part\n r, t = image.split(':', 2)\n end\n repo r\n tag t if t\n end\n \"#{repo}:#{tag}\"\n end", "def get_repo (repoName='ComputerScience_test')\n puts \"get_repo\"\n set_repository repoName\n puts \"set_repository #{@repository}\"\n result = @repository.query.perform \"select ?s ?p ?o {?s ?p ?o} order by ?s\"\n #puts \"@repository.query.perform #{result}\"\n \n graph = RDF::Graph.new(\"http://ninjalearning.info:8080/repositories/ComputerScience_test\") # TODO: repo name, host, port, ...\n \n result['values'].each do |triple|\n graph << RDF::Statement.new(\n RDF::URI.new(triple[0]),\n RDF::URI.new(triple[1]),\n RDF::URI.new(triple[2]),\n )\n end\n graph\n end", "def github_repo\n DeliveryGolang::Helpers::Publish.github_repo(node)\n end", "def repo_name\n if @vcs == 'gitlab'\n @data['project']['name']\n elsif @vcs == 'tfs'\n @data['resource']['repository']['name']\n else\n @data['repository']['name']\n end\n end", "def canonical_repository_url\n \"https://#{@settings.host}/scm/#{attributes[:repository_namespace]}/#{attributes[:repository_name]}.git\"\n end", "def configuration_for_repository\n {\n \"name\" => self.configuration_name,\n \"namespace\" => self.configuration_namespace,\n \"methodConfigVersion\" => self.configuration_snapshot,\n \"methodRepoMethod\" => {\n \"methodName\" => self.name,\n \"methodNamespace\" => self.namespace,\n \"methodVersion\" => self.snapshot\n },\n \"inputs\" => self.repository_parameter_list(:inputs),\n \"outputs\" => self.repository_parameter_list(:outputs),\n \"prerequisites\" => {},\n \"rootEntityType\" => self.entity_type,\n \"deleted\" => false\n }\n end", "def repository repoid\n @repos[repoid] or raise \"unknown repository #{repoid.inspect}\"\n end", "def repo=(_arg0); end", "def repo=(_arg0); end", "def repo=(_arg0); end", "def repo=(_arg0); end", "def component_repo(cmp)\n component(cmp).fetch('repo', \"#{CONF_DEFAULT_BASE_REPO}/#{cmp}.git\")\nend", "def component_repo(cmp)\n component(cmp).fetch('repo', \"#{CONF_DEFAULT_BASE_REPO}/#{cmp}.git\")\nend", "def repository_elements(marc)\n field852 = field852_for(marc)\n repository = {\n name: 'repository',\n encodinganalog: '852',\n children: []\n }\n repository[:children] << {\n name: 'corpname',\n value: \"#{field852['a']}\"\n }\n repository[:children] << {\n name: 'address',\n children: [{ name: \"addressline\", value: field852['e']}]\n }\n [repository]\n end", "def gitrepo(repo, name, ref = 'master')\n mod name.split('/').join('-'),\n :git => \"https://#{repo}/#{name}.git\",\n :ref => ref\nend", "def repo_name(repo)\n name = nil\n case repo\n when 'stable'\n name = 'omnibus-stable-local'\n when 'current'\n name = 'omnibus-current-local'\n when nil # pass through\n else\n raise \"#{repo} not supported. must be current or stable\"\n end\n name\nend", "def repository!\n Repository.find_or_create_by!(url: repository_url) do |repo|\n repo.organization_id = org_id\n repo.name = repository_name\n end\n end", "def title_for_repo(repo)\n \"#{repo.name} #{repo.version}\"\n end", "def get_remote_repository_name\n return @registry_server + \"/\" + @repository_name\n end", "def process_repo_info(repo)\n info = {}\n info['top'] = {}\n unless repo.nil?\n %w(name uri url parent_institution_name image_url repo_code description).each do |item|\n info['top'][item] = repo[item] unless repo[item].blank?\n end\n unless repo['agent_representation'].blank? || repo['agent_representation']['_resolved'].blank? || repo['agent_representation']['_resolved']['agent_contacts'].blank? || repo['agent_representation']['_resolved']['jsonmodel_type'] != 'agent_corporate_entity'\n in_h = repo['agent_representation']['_resolved']['agent_contacts'][0]\n %w{city region post_code country email }.each do |k|\n info[k] = in_h[k] if in_h[k].present?\n end\n if in_h['address_1'].present?\n info['address'] = []\n [1, 2, 3].each do |i|\n info['address'].push(in_h[\"address_#{i}\"]) if in_h[\"address_#{i}\"].present?\n end\n end\n info['telephones'] = in_h['telephones'] if !in_h['telephones'].blank?\n end\n end\n info\n end", "def load_os_package_repo_hash(\n instance_profile,\n provider = 'virtualbox',\n package_manager_default = 'none',\n prepend_base_directory = true,\n repository_id_default = 'none',\n rsync__options = [\n '-a',\n '--delete',\n '--verbose'\n ],\n sync_type = 'rsync'\n)\n\n repository_hash = nil\n\n provider_info = lookup_values_yaml(instance_profile, ['providers', provider])\n\n case provider\n when 'virtualbox'\n package_manager = [\n lookup_values_yaml(provider_info, ['instance', 'site_settings', 'package_managers', 'package_manager']),\n lookup_values_yaml(provider_info, ['instance', 'site_settings', 'package_managers', 'defaults', 'package_manager']),\n package_manager_default\n ].find { |i| !i.nil? }\n\n repository_id = [\n lookup_values_yaml(provider_info, ['instance', 'site_settings', 'package_managers', package_manager, 'repository_id']),\n lookup_values_yaml(provider_info, ['instance', 'site_settings', 'package_managers', 'defaults', package_manager, 'repository_id']),\n repository_id_default\n ].find { |i| !i.nil? } if package_manager\n\n case package_manager\n when 'none'\n # No package manager specified ignore repository sync setup.\n when 'dnf'\n case repository_id\n when 'none'\n # No repository specified ignore repository sync setup.\n else\n repository_hash = {\n '/etc/yum.repos.d' => {\n 'host_path' => \"files/os/package_managers/#{package_manager}/yum.repos.d/#{repository_id}\",\n 'instance_path' => '/etc/yum.repos.d',\n 'rsync__options' => rsync__options,\n 'rsync__verbose' => true,\n 'prepend_base_directory' => true,\n 'type' => sync_type\n }\n }\n repository_hash\n end\n when 'yum'\n case repository_id\n when 'none'\n # No repository specified ignore repository sync setup.\n else\n repository_hash = {\n '/etc/yum.repos.d' => {\n 'host_path' => \"files/os/package_managers/#{package_manager}/yum.repos.d/#{repository_id}\",\n 'instance_path' => '/etc/yum.repos.d',\n 'rsync__options' => rsync__options,\n 'rsync__verbose' => true,\n 'prepend_base_directory' => true,\n 'type' => sync_type\n }\n }\n repository_hash\n end\n else\n exit_with_message(\"package_manager [#{package_manager}] not supported.\")\n end\n end\n repository_hash\nend", "def repository; end", "def repository_name(item)\n repo = repository_value(item) and EmmaRepository.pairs[repo]\n end", "def repo_dir; end", "def to_s\n out = []\n\n groups = spec_groups\n\n add_PATH out, groups.delete(Gem::Resolver::VendorSpecification) { [] }\n\n add_GIT out, groups.delete(Gem::Resolver::GitSpecification) { [] }\n\n add_GEM out, groups\n\n add_PLATFORMS out\n\n add_DEPENDENCIES out\n\n out.join \"\\n\"\n end", "def get_repo(repo)\n\t\treturn self.fetch(\"repos/#{repo}\")\n\tend", "def create_repository(name, dbmod)\n c = Class.new\n mod1 = if Repository.constants.include?(name)\n Repository.const_get(name)\n else\n mod = Module.new\n Repository.const_set(name, mod)\n mod\n end\n dbmod.const_set name, c\n end", "def get_archive(repo)\n @repository ||= YAML.load_file(File.join(Rails.root, \"config\", ARCHIVES_FILE))['type']\n @repository[repo]\n end", "def write_repository(product_dir, releases)\n platforms = releases.map { |release| release[:platform] }.uniq\n platforms.each do |platform|\n configuration_directory = File.join(product_dir, platform)\n FileUtils.mkdir_p(configuration_directory)\n releases_by_version = Hash.new { |hash, key| hash[key] = [] }\n releases.each do |release|\n next if release[:platform] != platform\n\n releases_by_version[release[:version]] << extract_release_fields(release)\n end\n releases_by_version.each_pair do |version, version_releases|\n File.write(File.join(configuration_directory, \"#{version.gsub('/', '-')}.json\"),\n JSON.pretty_generate(version_releases))\n end\n end\n end", "def repository\n @repository ||= repo_type.repository_for(container)\n end", "def repository=(filename, opts={}, &block)\n contents(filename).repository = opts.delete(:name) || ArrayRespository.new()\n end", "def get_repos(provisioner_server_node, platform, version)\n admin_ip = Chef::Recipe::Barclamp::Inventory.get_network_by_type(provisioner_server_node, \"admin\").address\n web_port = provisioner_server_node[:provisioner][:web_port]\n provisioner_web = \"http://#{admin_ip}:#{web_port}\"\n default_repos_url = \"#{provisioner_web}/suse-#{version}/repos\"\n\n repos = Mash.new\n\n case platform\n when \"suse\"\n repos = Mash.new\n repos_from_attrs = suse_get_repos_from_attributes(provisioner_server_node,platform,version)\n\n case version\n when \"11.3\"\n repo_names = %w(\n SLE-Cloud\n SLE-Cloud-PTF\n SUSE-Cloud-5-Pool\n SUSE-Cloud-5-Updates\n SLES11-SP3-Pool\n SLES11-SP3-Updates\n )\n when \"12.0\"\n repo_names = %w(\n SLE12-Cloud-Compute\n SLE12-Cloud-Compute-PTF\n SLE-12-Cloud-Compute5-Pool\n SLE-12-Cloud-Compute5-Updates\n SLES12-Pool\n SLES12-Updates\n )\n else\n raise \"Unsupported version of SLE/openSUSE!\"\n end\n\n # Add the new (not predefined) repositories from attributes\n repos_from_attrs.each do |name,repo|\n repo_names << name unless repo_names.include? name\n end\n\n # This needs to be done here rather than via deep-merge with static\n # JSON due to the dynamic nature of the default value.\n repo_names.each do |name|\n repos[name] = repos_from_attrs.fetch(name, Mash.new)\n suffix = name.sub(/^SLE-Cloud/, 'Cloud')\n repos[name][:url] ||= default_repos_url + '/' + suffix\n end\n\n # optional repos\n unless provisioner_server_node[:provisioner][:suse].nil?\n [[:hae, :missing_hae], [:storage, :missing_storage]].each do |optionalrepo|\n unless provisioner_server_node[:provisioner][:suse][optionalrepo[1]]\n suse_optional_repos(version, optionalrepo[0]).each do |name|\n repos[name] = repos_from_attrs.fetch(name, Mash.new)\n repos[name][:url] ||= default_repos_url + '/' + name\n end\n end\n end\n end\n end\n\n repos\n end", "def to_puppet\n %w(ioa_interface force10_portchannel).each do |r_source|\n next unless port_resources[r_source]\n port_resources[r_source].keys.each do |name|\n resource = port_resources[r_source][name]\n\n [\"vlan_tagged\", \"vlan_untagged\", \"tagged_vlan\", \"untagged_vlan\"].each do |prop|\n next unless resource[prop].is_a?(Array)\n resource[prop] = resource[prop].sort.uniq.join(\",\")\n end\n end\n end\n\n port_resources\n end", "def transform_out(rally_artifact)\n RallyLogger.debug(self,\"Transforming out #{rally_artifact}/#{rally_artifact.class}/#{@field_name}\")\n\n # have to read because the helpful addition they made to make projects, iterations and releases render as strings\n rally_artifact.read\n reference_value = rally_artifact[@field_name]\n return nil if reference_value.nil?\n \n if reference_value.class == String #wsapi prior to 1.19 gives us a string of username\n return reference_value\n else\n return make_xml_for_objects(reference_value)\n end\n end", "def repo\n read_attribute(:repo).presence || self.original_repo\n end", "def to_h\n results = {}\n defaults = @config[\"default_reports\"] || {}\n\n @config[\"repositories\"].map { |repo_config|\n repo = repo_config[\"repository\"]\n config = config_for(repo)\n\n config[\"reports\"].map { |format, report_config|\n # Sometimes report_data has unused keys, which generates a warning, but\n # we're okay with it, so we wrap it with silence_warnings {}.\n filename = silence_warnings {\n tmp_filename = report_config[\"filename\"] || defaults[format][\"filename\"]\n tmp_filename % metadata(repo)\n }\n\n directory = report_config[\"directory\"] || defaults[format][\"directory\"]\n file = File.join(directory, filename)\n\n # Export +report+ to the specified +format+ with the specified\n # +frontmatter+.\n frontmatter = report_config[\"frontmatter\"] || {}\n if defaults.has_key?(format) && defaults[format].has_key?(\"frontmatter\")\n frontmatter = defaults[format][\"frontmatter\"].merge(frontmatter)\n end\n frontmatter = nil if frontmatter == {}\n\n export = @reports[repo].send(\"to_#{format}\", frontmatter)\n\n results[file] = export\n }\n }\n results\n end", "def repository_name\n @repository_name ||= \"#{project_name}-boshrelease\"\n end" ]
[ "0.62212133", "0.60758203", "0.59144396", "0.5874723", "0.5872641", "0.577458", "0.5752192", "0.5725992", "0.57204676", "0.56908524", "0.56577855", "0.56495017", "0.563324", "0.5542828", "0.55235654", "0.54997605", "0.54997605", "0.54989964", "0.5482917", "0.5474779", "0.54679036", "0.5454118", "0.5454118", "0.5454118", "0.5454118", "0.5434783", "0.5408123", "0.5387866", "0.53742385", "0.5346433", "0.5345049", "0.53445506", "0.53445506", "0.53371936", "0.533618", "0.5332484", "0.53308004", "0.53281915", "0.5324585", "0.5287873", "0.5283368", "0.52725595", "0.5262524", "0.5260849", "0.5214916", "0.5214056", "0.5210333", "0.51945347", "0.5180859", "0.5180635", "0.51738334", "0.5171009", "0.51680017", "0.515706", "0.5154239", "0.51521593", "0.5129824", "0.51220196", "0.5115775", "0.5101841", "0.508821", "0.5086873", "0.508235", "0.50815815", "0.5071202", "0.50704384", "0.5068818", "0.5067393", "0.50591594", "0.5058899", "0.5056281", "0.5056281", "0.5056281", "0.5056281", "0.5055795", "0.5055795", "0.5055078", "0.5052321", "0.505023", "0.5048256", "0.50426155", "0.5041903", "0.5030864", "0.50212777", "0.5018616", "0.50174695", "0.50025654", "0.49931377", "0.49823123", "0.49750006", "0.49739292", "0.49731895", "0.49721378", "0.4972005", "0.49650034", "0.49649352", "0.49570674", "0.49534634", "0.49528313", "0.49519235" ]
0.73310894
0
see the sum of all up and down votes in our Answer database
def points self.answer_votes.sum(:value).to_i end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def total_votes\n up_votes + down_votes\n end", "def upvotes\n votes.sum(:upvote)\n end", "def votes_count\n upvotes + downvotes\n end", "def votes\n up_votes - down_votes\n end", "def total_votes\n self.up - self.down\n end", "def total_votes\n self.get_upvotes.size - self.get_downvotes.size\n end", "def tally\n up_votes - down_votes\n end", "def vote_sum\n sum = 0.0\n votes.each do |v|\n sum += v.value\n end\n sum\n end", "def score\n \tself.get_upvotes.size - self.get_downvotes.size\n end", "def score\n \tself.get_upvotes.size - self.get_downvotes.size\n\tend", "def score\n self.up_votes - self.down_votes\n end", "def rate\n votes = answers.inject(0) do |sum, a| \n sum + a.votes.count \n end\n \n # Return a count of votes and answers\n answers.count + votes\n end", "def votes\n total_votes = 0\n self.options.each do |vote_val|\n #todo: this is probably wrong\n total_votes += vote_val.to_i\n end\n total_votes += (REDIS_VOTES.get('YES:' + self.id).to_i + REDIS_VOTES.get('NO:' + self.id).to_i)\n end", "def total_upvotes\n\t\tself.get_upvotes.sum(:vote_weight)\n\tend", "def total\n votes.sum(:value)\n end", "def points\n votes.sum(:value)\n end", "def points\n votes.sum(:value)\n end", "def upvotes\n votes.where(Vote.arel_table[:weight].gt(0)).sum(:weight)\n end", "def total_votes\n self.votes.sum(:value)\n end", "def upvotes_count\n topic_votes.where(value: 1).sum(:value)\n end", "def total_upvotes\n \tself.shout_votes.where(upvote: true).length - self.shout_votes.where(upvote: false).length\n end", "def vote_score\n votes.sum(:value)\n end", "def total_down_votes\n self.reload\n self.votes.are(:down).count\n end", "def total_down_votes\n self.reload\n self.votes.are(:down).count\n end", "def get_vote_tally\n return self.get_upvotes.size - self.get_downvotes.size\n end", "def total_votes\n answers.sum(&:votes_count)\n end", "def calculate_tallied_votes\n self.tallied_votes = upvotes - downvotes\n self.save\n end", "def downvotes_count\n topic_votes.where(value: -1).sum(:value) * -1\n end", "def score\n votes.sum(:vote)\n end", "def upvotes_received\n reports.inject(0){|sum, report| sum + report.get_vote_difference }\n end", "def score\n\t\tupvotes.count\n\tend", "def rating\n (get_upvotes.size + 2)/(get_downvotes.size + 2)\n end", "def total_votes\n votes.sum(:weight)\n end", "def up_votes\n # we find the up votes for a post by passing value: 1 to where. This fetches a collection of votes with a value of 1. \n # We then call count on the collection to get a total of all up votes.\n votes.where(value: 1).count\n end", "def total_up_votes\n self.reload\n self.votes.are(:up).count\n end", "def total_up_votes\n self.reload\n self.votes.are(:up).count\n end", "def vote_up\n 15\n end", "def vote_count\n votes.sum('direction')\n end", "def total_votes\n\t\tself.votes.where(vote: true).size - self.votes.where(vote: false).size\n\tend", "def score\n # add score caching\n total = 0\n self.votes.each do |vote|\n total += vote.value\n end\n total\n end", "def up_votes\n votes.where(value: 1).count\n end", "def up_votes\n votes.where(value: 1).count\n end", "def up_votes\n votes.where(value: 1).count\n end", "def up_votes\n votes.where(value: 1).count\n end", "def vote_total\n if self.votes.length > 0\n self.votes.reduce(0){|sum, vote| sum + vote.value}\n else\n self.votes.length\n end\n end", "def upvote_count\n self.up_retweet_count\n end", "def downvotes\n notes.select(&:downvote?).size\n end", "def up_votes # We'll want a way to see only \"up\" or \"down\" votes\n self.votes.where(value: 1).count\n end", "def points\n self.votes.inject(0) { |points, vote| points + vote.value }\n end", "def points\n self.votes.inject(0) { |points, vote| points + vote.value }\n end", "def upvote_count\n self.get_upvotes.size\n end", "def votes\n scenario_answers.count\n end", "def upvotes\n @ups\n end", "def upvotes\n @ups\n end", "def vote_count\n options.values.reduce(:+)\n end", "def set_vote_tally!\n self.vote_tally = self.get_upvotes.size - self.get_downvotes.size\n end", "def answers_count\n\n\n self.answers_count_yeses + self.answers_count_noes\n\n\n end", "def user_reputation\n tally = []\n answers.each do |answer|\n tally.push(answer.answer_votes_total)\n end\n questions.each do |question|\n tally.push(question.question_votes_total)\n end\n tally = tally.inject(:+)\n return tally\n end", "def up_votes\n\t#\tself.votes.where(value: 1).count\n\t\tvotes.where(value: 1).count\n\tend", "def downvotes\n votes.where(Vote.arel_table[:weight].lt(0)).sum(:weight).abs\n end", "def total_votes\n \tself.shout_votes.count\n end", "def count_votes\n votes = 0\n QuestionVote.where(question_id: self.id).each do |question|\n votes += question.vote\n end\n votes\n end", "def total_up_votes\r\n self.song_up_votes.size\r\n end", "def upvote(answer)\n upvoted_answers << answer\n end", "def show\n @answers = @question.answers.order_by(:overall_votes.desc)\n @question_upvotes = @question.votes.where(status: 1).count\n @question_downvotes = @question.votes.where(status: -1).count\n end", "def show\n @answers = @question.answers.order_by(:overall_votes.desc)\n @question_upvotes = @question.votes.where(status: 1).count\n @question_downvotes = @question.votes.where(status: -1).count\n end", "def votes_cast_count\n total_votes\n end", "def total_points\n if self.has_paid?\n sum = 0\n self.bets.each { |b| sum+=b.points }\n self.answers.each { |a| sum+=a.points }\n sum\n else\n -1\n end\n end", "def likes\n self.cached_votes_total\n end", "def total_votes\n num_of_votes = self.votes.count\n return num_of_votes\n end", "def vote_down\n 100\n end", "def down_votes\n votes.where(value: -1).count\n end", "def down_votes\n votes.where(value: -1).count\n end", "def upvotes_index\n self.get_upvotes.count\n end", "def value\n return self.upvote ? 1 : -1\n end", "def votes_left\n vote_total = 10\n # @submissions = Submission.find_all_by_story_id(params[:id])\n @submissions.each do |submission|\n vote_total -= submission.vote\n end\n vote_total\nend", "def votes_count\n Vote.select('SUM(votes.value) AS total').\n where(\"votes.votable_id = ? AND votes.votable_type = ? \", self.id, 'Event').\n first.total.to_i\n end", "def up_down_votes\n Vote.seperate_votes(self.votes)\n end", "def votes_count\n votes.size\n end", "def plusminus\n votes_for - votes_against\n end", "def rating\n return 0 if total_votes == 0\n (100 * self.yeses) / total_votes\n end", "def get_vote_count\n\t\treturn calculate_vote_count\n\tend", "def report_votes(total_votes)\n total_votes.each do |finalist, vote_count|\n puts \"#{finalist.to_s.capitalize.light_blue} received #{vote_count.to_s.yellow} votes.\"\n end\n end", "def vote_up\n update_votes(1)\n end", "def total_down_votes_count(voting_field = \"votes\")\n eval(voting_field).try(:[], 'total_down_count') || 0\n end", "def post_votes\n []\n end", "def hitting_total\n self.rating_19 +\n self.rating_20 +\n self.rating_21 +\n self.rating_22 +\n self.rating_23 +\n self.rating_24 +\n self.rating_25 +\n self.rating_26 +\n self.rating_27\n end", "def total_votes\n self.reload\n self.votes.count\n end", "def total_votes\n self.reload\n self.votes.count\n end", "def baserunning_total\n self.rating_15 + \n self.rating_16 +\n self.rating_17 +\n self.rating_18\n end", "def up_votes\n RedisVoteable.redis.scard prefixed(\"#{class_key(self)}:#{UP_VOTES}\")\n end", "def vote_count\n self.votes.count\n end", "def update_rating!\n # not using count because lates some votes might be something other than +/- 1\n self.positive_vote_count = votes.positive.sum(:value).abs\n self.negative_vote_count = votes.negative.sum(:value).abs\n self.rating = votes.sum(:value)\n save!\n end", "def review_points\n self.reviews.map { |review| review.vote_count }.inject(:+)\n end", "def upvote\n\t\tvotes = self.nov\n\t\tupdate(nov: votes +1)\n\tend", "def set_votes\n \n @upvote = []\n @downvote = []\n\n if logged_in?\n votes = Vote.where(:user_id => session[:user_id]).to_a\n votes.each do |vote|\n @upvote << vote[:comment_id].to_i if vote[:upvote].to_i == 1\n @downvote << vote[:comment_id].to_i if vote[:downvote].to_i == 1\n end\n end\n end", "def upvote\n self.votes += 1\n save\n end", "def answers_with_data\n return answers.map(&:data_result), total_votes\n end", "def check_score\n score = @post.upvotes - @post.downvotes\n # raise 'hell'\n @post.update_attribute(:score, score)\n end", "def recalculate_score!\n self.score = Vote.find(:all, :conditions => ['proposal_id = ? AND value IN (-1, 1)',self.id], :select => 'value').map{|v|v.value}.sum\n self.save!\n end" ]
[ "0.80387414", "0.7942898", "0.7847634", "0.7722391", "0.76697755", "0.7648796", "0.7617088", "0.75817317", "0.75098", "0.7483972", "0.74685025", "0.7431745", "0.740563", "0.73689187", "0.72940624", "0.72753847", "0.72753847", "0.72522324", "0.72514665", "0.72423214", "0.7237864", "0.72318125", "0.7228848", "0.7228848", "0.72103804", "0.7209933", "0.7181352", "0.7175297", "0.7168461", "0.71412885", "0.7115141", "0.7065091", "0.70577586", "0.70549953", "0.70308733", "0.70308733", "0.7025288", "0.69857585", "0.69452935", "0.6933852", "0.6863013", "0.6863013", "0.6863013", "0.6863013", "0.68549925", "0.68515116", "0.684839", "0.68457717", "0.6829956", "0.6829956", "0.6770148", "0.67239", "0.6704784", "0.6704784", "0.6685134", "0.66790605", "0.66769046", "0.66749716", "0.66689485", "0.66640383", "0.66285354", "0.66228336", "0.6619609", "0.6558301", "0.6533421", "0.65329576", "0.6525287", "0.6502801", "0.6494559", "0.64575225", "0.64460933", "0.64446133", "0.64446133", "0.6400145", "0.6371118", "0.63390523", "0.6323333", "0.6297502", "0.62969714", "0.62868106", "0.62844586", "0.6281105", "0.6280721", "0.62714016", "0.626659", "0.6256295", "0.624223", "0.6231798", "0.6231798", "0.6223036", "0.62143475", "0.6210437", "0.6207897", "0.6202345", "0.61835676", "0.6162319", "0.6155003", "0.6153762", "0.6142774", "0.61189216" ]
0.76181686
6
whoever created a post, should automatically be set to "voting" it up
def create_vote user.answer_votes.create(value: 1, answer: self) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_vote\n self.user.votes.create(value: 1, post: self)\n end", "def create_vote\n self.user.votes.create(value: 1, post: self)\n end", "def create_vote\n\t\tuser.votes.create(value: 1, post: self)\n\tend", "def create_vote\n user.votes.create(value: 1, post: self)\n end", "def create_vote\r\n\t\tuser.votes.create(value: 1, post: self)\r\n\tend", "def create_vote\n user.votes.create(value: 1, post: self)\n end", "def create\n @post = Post.new(params[:post])\n @post.user ||= current_user\n\n if @post.save\n Vote.vote_thusly_on_post_or_comment_for_user_because(1, @post.id, nil, current_user.id, nil)\n\n flash[:success] = \"Your post has been submitted successfully.\"\n\n redirect_to @post.comments_url\n else\n return render action: \"new\"\n end\n end", "def create_vote\n \t\t#self.votes.create(value: 1, user: self.user )\n \t#\tvotes.create(value: 1, user: user )\n\t \tuser.votes.create(value: 1, post: self)\n \tend", "def add_post\n if current_user.id.to_s == params[:user_id].to_s\n post = Post.new(:user_id => params[:user_id], :content => params[:content],\\\n :title => params[:title], :upvotes => 0, :downvotes => 0, :rank => 0)\n post.save!\n end\n end", "def new_post(post)\n @user = post.wall_user\n @post = post\n\n mail to: @user.email, subject: \"Someone just posted on your wall.\", reply_to: post.poster.email\n end", "def user_has_voted(post)\n if (current_user)\n if (post.voted_by?(current_user))\n 1\n else\n 0\n end\n else\n 2\n end\nend", "def uppost\n post = Post.find_by_id(params[:id])\n if !post.nil?\n post.vote(current_user, true)\n end\n end", "def create\n @post = Post.new(params[:post])\n @post.user = current_user\n @post.score = 0\n @post.upvoters = \"[-1]\"\n @post.downvoters = \"[-2]\"\n \n respond_to do |format|\n if @post.save\n format.html { redirect_to @post, notice: 'Post was successfully created.' }\n else\n format.html { render action: \"new\" }\n end\n end\n\n if params[:promote]\n curr_user = User.find(current_user.id)\n tel_numbers = curr_user.destring_favs(curr_user)\n tel_numbers.each do |id|\n user = User.find(id)\n TextMessage.new(\"#{curr_user.name} says #{params[:post][:content]}\", user.phone_no.to_s).send\n end\n end\n\n end", "def create_initial_vote_for_author\n votes.create(:user => user, :value => 1)\n end", "def create\n @postvote = Postvote.new(params[:id])\n @postvote.user_id = session[:user_id]\n @postvote.post_id = @@temp\n @postvote.time = Time.now\n @pst = Post.find(@postvote.post_id)\n @pst.attributes = {:time => Time.now} #Updates the time for the vote in the POST table as well. To record recent activity.\n respond_to do |format|\n if @postvote.save\n @pst.save\n format.html { redirect_to post_path(@pst), notice: 'You have voted for the current post' }\n format.json { render json: @postvote, status: :created, location: @postvote }\n else\n format.html { render action: \"new\", notice: 'Vote not recorded' }\n format.json { render json: @postvote.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @post = Post.find(params[:postID])\n if (@post.checkIfVoted(@post.id.to_s, session[:user_id].to_s)[0]!=nil )\n redirect_to @post, notice: 'You have already voted for this post.'\n elsif @post.postUserID.to_s == session[:user_id].to_s\n redirect_to @post, notice: 'You can not vote for yourself'\n else\n @vote = Vote.new(params[:vote])\n @vote.postID = params[:postID]\n @vote.userID = session[:user_id]\n\n @post.increment(:votes)\n @post.save\n # @post.voted((@post.votes+1).to_s,@post.id.to_s)\n #@post.increment(votes)\n\n respond_to do |format|\n if @vote.save\n format.html { redirect_to @post, notice: 'Vote successfully.' }\n format.json { render json: @vote, status: :created, location: @vote }\n else\n format.html { render action: \"new\" }\n format.json { render json: @vote.errors, status: :unprocessable_entity }\n end\n end\n end\n end", "def newPost(_, _, _, metaweblog_struct, _)\n post = Post.create(@db.content_path, metaweblog_struct)\n @db.refresh_post_paths\n\n run_user_cmd\n post['postid']\n end", "def create\n #initialize a new post object with the parameters submitted, validated by post_params\n @post = Post.new(post_params)\n \n isComment = false\n #check whether this is actually a comment, meaning it should have kind=2 and will need an originating post id\n if params[:kind].present?\n @post.kind = params[:kind].to_i\n @post.originatingPost_id = params[:originatingPost_id].to_i\n isComment = true\n \n #otherwise, it is a post, which optionally has tags\n else\n @post.kind = 0\n @tagsToAdd = params[:tagsToAdd].split(\" \")\n @tagsToAdd.each do |t|\n @post.tags << createTag(t)\n end\n end\n \n #either way, the currently logged in user should be logged as the creator of this post/comment\n @post.user = User.find(session[:user_id])\n \n if @post.save!\n if isComment\n redirect_to action: \"show\", :id => params[:originatingPost_id] #stay on the post's show page\n else\n redirect_to action: \"show\", :id => @post.id #go to this new post's show page\n end\n else\n redirect_to action: 'new' #upon failure, try again\n end\n end", "def create\n @post = current_user.posts.new(params[:post])\n @post.edited_at = Time.current\n @post.published_at = Time.current if params[:publish]\n if @post.save\n flash[:success] = \"Post criado com sucesso.\"\n redirect_to admin_posts_path\n else\n @page_title = \"Novo Post\"\n render :action => 'new'\n end\n end", "def upvoted?(post)\n voted_up_on? post\n end", "def create\n @post = Post.new(post_params)\n @post.user = current_user\n @post.save\n redirect_to @post\n\n end", "def create\n @post.character = current_user.character\n authorize! :create, @post\n create! { posts_url }\n end", "def create_post_owner_notification_of_reply(reply)\n return if reply.user.id == self.user.id # don't notify user of his own replies..\n if self.user.role == \"Poster\"\n url = \"wall_expert/#{self.id}\"\n else\n url = \"wall/#{self.id}\"\n end\n notify(self.user, \"Your post was commented on!\", \"#{reply.user.profile.full_name} commented on your post!\", :from => reply.user, :key => post_reply_notification_key(reply), :link => '#{url}?reply=#{reply.id}')\n end", "def create\n if(Post.find(params[:post_vote][:post_id]).user_id != params[:post_vote][:user_id]) # only vote if it's not the same user who wrote the post!\n @api_v1_post_vote=PostVote.find_by_post_id_and_user_id(params[:post_vote][:post_id], params[:post_vote][:user_id])\n if @api_v1_post_vote.nil?\n @api_v1_post_vote = PostVote.new(params[:post_vote])\n else\n @api_v1_post_vote.vote = params[:post_vote][:vote]\n end\n\n respond_to do |format|\n if @api_v1_post_vote.save\n #format.html { redirect_to @api_v1_post_vote, notice: 'Post vote was successfully created.' }\n format.json { render json: @api_v1_post_vote, status: :created }\n else\n # format.html { render action: \"new\" }\n format.json { render json: @api_v1_post_vote.errors.messages.values, status: 400 }\n end\n end\n else\n render json: \"Can't vote for yourself.\", status: 400\n end\n end", "def create\n if params[:post][:repost_id]\n #Adding if clicking repost, deleting if unclicking repost\n repost_id = (params[:post][:repost_id]).to_i\n repost_exist = current_user.posts.all.select do |post|\n if post.repost_id == repost_id\n post\n end\n end\n if repost_exist.length > 0\n Post.destroy(repost_exist.first.id)\n else\n @post = Post.new(params[:post])\n @post.user_id = current_user.id\n @post.save\n end\n else\n @post = Post.new(params[:post])\n sentiment_data_json = Sentimentalizer.analyze(params[:post][:content])\n sentiment_data = JSON.parse(sentiment_data_json)\n sentiment_smiley = sentiment_data[\"sentiment\"]\n if sentiment_smiley == \":)\"\n @post.sentiment = true\n else\n @post.sentiment = false\n end\n @post.sentiment_prob = sentiment_data[\"probability\"]*100\n @post.user_id = current_user.id\n @post.save\n end\n respond_to do |format|\n format.html { redirect_to posts_url, notice: 'Post was successfully created.' }\n format.json { render json: @posts, status: :created, location: @post }\n end\n end", "def create\n puts \"IS THIS WORKING NOWWWW?\"\n\n @previous_votes = Vote.where(:post_id => params[:post_id],:user_id => @current_user.id).all\n if @previous_votes.blank?\n @vote = Vote.new(params[:vote])\n @vote.post_id = params[:post_id]\n @vote.user_id = @current_user.id\n else\n @vote = nil\n end\n\n respond_to do |format|\n if @vote\n @vote.save\n format.html { redirect_back_or(posts_url, \"Thank you for voting!\", \"valid\")}\n format.json { render json: @vote, status: :created, location: @vote }\n else\n format.html {redirect_back_or(posts_url, \"You can't vote for a post more than once!\", \"invalid\")}\n format.json { render json: @vote.errors, status: :unprocessable_entity }\n end\n end\n end", "def up_vote\n @post = Post.not_published.find(params[:id])\n vote = @post.moderator_votes.new(:up_vote => true)\n vote.user = current_user if logged_in?\n vote.session_id = session[:session_id]\n vote.save\n flash[:notice] = \"Thanks for your help moderating upcoming posts.\"\n redirect_to moderators_path\n end", "def new\n @post = Post.new\n @post.user ||= current_user\n end", "def create\n # @post = Post.new(params[:post])\n @post = Post.new\n @post.body = params[:body]\n @post.user_id = params[:user_id]\n @post.parent_id = params[:parent_id]\n\n respond_to do |format|\n if session[:user].nil?\n flash[:error] = 'Only users can post.'\n format.html { redirect_to \"/\" }\n elsif @post.user_id == session[:user].id && @post.body == \"\"\n flash[:error] = 'Users can\\'t upvote their own posts.'\n format.html { redirect_to \"/\" }\n elsif @post.save\n format.html { redirect_to \"/\", notice: 'Post was successfully created.' }\n format.json { render json: @post, status: :created, location: @post }\n else\n format.html { render action: \"new\" }\n format.json { render json: @post.errors, status: :unprocessable_entity }\n end\n end\n end", "def setup #Because setup is called by a before_filter, the authorization with be checked every time a vote is submitted.\n @topic = Topic.find(params[:topic_id])\n @post = @topic.posts.find(params[:post_id])\n authorize! :create, Vote, message: \"You need to be a user to do that.\" #the setup method has authorize so that only Bloccit members can vote\n\n @vote = @post.votes.where(user_id: current_user.id).first\n end", "def create\n @post = Post.new(post_params)\n @post.user_id = current_user.id\n \n respond_to do |format|\n if @post.save\n current_user.update(point: current_user.point + 1)\n format.html { redirect_to @post, notice: 'Post was successfully created.' }\n format.json { render action: 'show', status: :created, location: @post }\n \n else\n format.html { render action: 'new' }\n format.json { render json: @post.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @post = Post.new(post_params)\n @post.user_id = current_user.id\n\n flash[:notice] = 'Post was successfully created.' if @post.save\n redirect_to posts_url\n end", "def create\n @resource = Post.find_by(user_id: current_user.id, url: params[\"post\"][\"url\"])\n if @resource.present?\n # refer article recentry or not\n @refer_recently = @resource.created_at > DateTime.now - 7.days\n else\n @resource = Post.create!(user_id: current_user.id, url: params[\"post\"][\"url\"], title: params[\"post\"][\"title\"])\n UsersPost.create! user_id: current_user.id, post_id: @resource.id\n @refer_recently = false\n end\n end", "def add_post(post)\n @posts << post #pushes post into post array so that we can compare the author with that author\n post.author = self\nend", "def create_post_owner_notification_of_like(like)\n return if like.user.id == self.user.id # don't notify user of his own likes..\n case like.likeable.wallable.class.name\n when 'Recipe'\n url = \"recipes/#{self.wallable_id}\"\n when 'Tip'\n url = \"tips/#{self.wallable_id}\"\n else\n base_url = self.user.poster? ? \"wall_expert\" : \"wall\"\n url = !self.parent_post_id.nil? ? \"#{base_url}/#{self.parent_post_id}?reply=#{self.id}\" : \"#{base_url}/#{self.id}\"\n end\n notify(self.user, \"Your post was liked!\", \"#{like.user.profile.full_name} liked your post!\", :from => like.user, :key => post_like_notification_key(like), :link => url)\n end", "def create\n post = Post.find(params[:post_id])\n Share.create(user_id: current_user.id, post_id: post.id)\n Post.find(post.id).update_attribute(:updated_at, Time.now)\n redirect_to posts_path\n end", "def add_post(post)\n post.author = self \n end", "def create\n @vote = Vote.new\n if params[:problem_id] && params[:vote_type] && params[:post_type]\n vote_exists = false\n author_vote = false\n @post_type = params[:post_type].to_s.downcase\n @post_id = params[:id]\n \n votetype = VoteType.find_by_name(params[:vote_type])\n @vote.vote_type = votetype\n @vote.user_id = session[:user_id]\n if params[:post_type] == \"Post\"\n @vote.post = Post.find_by_id(params[:id])\n @vote_object = @vote.post\n vote_exists = true unless Vote.where(\"post_id=#{params[:id]} AND user_id=#{session[:user_id]}\").blank?\n author_vote = true if @vote.user_id == @vote.post.user_id\n elsif params[:post_type] == \"Solution\"\n @vote.solution = Solution.find_by_id(params[:id])\n @vote_object = @vote.solution\n vote_exists = true unless Vote.where(\"solution_id=#{params[:id]} AND user_id=#{session[:user_id]}\").blank?\n author_vote = true if @vote.user_id == @vote.solution.user_id\n elsif params[:post_type] == \"Comment\"\n @vote.comment = Comment.find_by_id(params[:id])\n @vote_object = @vote.comment\n vote_exists = true unless Vote.where(\"comment_id=#{params[:id]} AND user_id=#{session[:user_id]}\").blank?\n author_vote = true if @vote.user_id == @vote.comment.user_id\n else\n @vote = Vote.new\n end\n if !vote_exists && !author_vote && @vote.save\n respond_to do |format|\n format.html { \n flash[:notice] = \"#{params[:post_type].to_s} Vote Successful\"\n redirect_to(:controller => 'posts', :action => 'show', :id => params[:problem_id])\n }\n format.js {\n @vote_failed = false\n @notice = \"Vote Successful\" \n }\n end\n else \n respond_to do |format|\n format.html { \n flash[:notice] = \"Vote Failed\"\n flash[:notice] += \". You have already voted on this #{@post_type}\" if vote_exists\n flash[:notice] += \". You cannot vote on a #{@post_type} that you created.\" if author_vote\n redirect_to(:controller => 'posts', :action => 'show', :id => params[:problem_id])\n }\n format.js {\n @vote_failed = true\n @notice = \"Vote Failed\"\n @notice += \". You have already voted on this #{@post_type}\" if vote_exists\n @notice += \". You cannot vote on a #{@post_type} that you created.\" if author_vote\n }\n end\n end\n else\n flash[:notice] = \"An error occured\"\n redirect_to(:controller => 'public', :action => 'index')\n end\n end", "def upvote\n\t\tif vote_once\n\t\t\t@vote = @post.votes.create(user_id: current_user.id)\n\t\telse\n\t\t\t@vote = false\n\t\tend\n\t\tbyebug\n\t\trespond_to do |format|\n\t\t\tformat.js\n\t\tend\n\tend", "def create\n #Only the loged user can create posts for himself, not any other user.\n @post = @user.posts.new(post_params)\n\n respond_to do |format|\n if @post.save\n format.html { redirect_to [@user,@post], notice: @user.first_name + ', the post was successfully created.' }\n format.json { render :show, status: :created, location: @post }\n else\n format.html { render :new }\n format.json { render json: @post.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n\t\t@post = Post.find(params[:post_id])\n @replyUserID = current_user.id\n\t\t@reply = @post.replies.create(reply_params)\n # puts line of code is here for checking if the user_id was being taken in \n # This was used in debugging when linking replies to a user_id. \n puts '////'\n puts @reply.inspect\n puts '////'\n if @reply.save\n redirect_to @post, notice: 'Reply was successfully created!' \n else\n render :new\n end\n\tend", "def create\r\n @post = Post.new(post_params)\r\n\r\n #-------add current_user to user_id-----------\r\n @post.user_id = current_user.id\r\n #---------------------------------------------\r\n \r\n respond_to do |format|\r\n if @post.save\r\n format.html { redirect_to @post, notice: 'Post was successfully created.' }\r\n format.json { render :show, status: :created, location: @post }\r\n else\r\n format.html { render :new }\r\n format.json { render json: @post.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end", "def add_post(post)\n post.author = self\n end", "def add_post(post)\n post.author = self\n end", "def add_post(post)\n post.author = self\n end", "def create\n @post = Post.new(post_params)\n @post.user_id = current_user.id\n\n if @post.save\n redirect_to @post\n else\n render :new\n end\n end", "def create\n\t\tif already_liked?\n\t\t\tflash[:notice] = \"you can't like more than once\"\n\t\telse\n\t\t\t\n\t\t\t@like = @post.likes.create(:user_id => current_user.id)\n\t\tend\n\t\tredirect_to post_path(@post)\n\t\t\n\tend", "def owner\n if @post.author_id == @current_user.id && @current_user.baned == false\n else\n respond_to do |format|\n format.html { redirect_to posts_url, alert: 'У вас нет прав, зарегестрируйтесь.' }\n end\n end\n end", "def create\n @post = Post.new(post_params)\n @post.update_attribute(:user_id, current_user.id)\n \n if @post.save\n flash[:success] = \"Post Created\"\n redirect_to home_path\n \n else\n render action: 'new' \n \n end\n \n end", "def createPost(post)\n isSavePost = post.save\n end", "def create\n @post = current_user.posts.new(params[:post])\n if @post.save\n redirect_to @post, notice: \"Post has been created\"\n else\n render :new\n end\n end", "def getPostedBy\r\n\t\t\t\t\treturn @postedBy\r\n\t\t\t\tend", "def getPostedBy\r\n\t\t\t\t\treturn @postedBy\r\n\t\t\t\tend", "def write!(other_user)\n wallposts.create!(posted_id: other_user.id)\n end", "def create\n \n unless current_user.can_post\n redirect_to welcome_page_path\n end\n \n @post = Post.new\n @post.user = current_user\n @post.privly_application = params[:post][:privly_application]\n\n # Posts default to Private\n if params[:post][:public]\n @post.public = params[:post][:public]\n else\n @post.public = false\n end\n\n set_burn_date\n \n # The random token will be required for users other than the owner\n # to access the content. The model will generate a token before saving\n # if it is not assigned here.\n @post.random_token = params[:post][:random_token]\n \n @post.update_attributes(params[:post])\n \n respond_to do |format|\n if @post.save\n response.headers[\"X-Privly-Url\"] = @post.privly_URL\n format.any { render :json => get_json, \n :status => :created, :location => @post }\n else\n format.any { render :json => @post.errors, \n :status => :unprocessable_entity }\n end\n end\n end", "def create\n @post = Post.new(post_params)\n @post.user = current_user\n if @post.save\n success_post_create\n else\n error_post_save\n end\n end", "def create\n @post = Post.new(post_params)\n if ( @post.user_id != current_user.id )\n redirect_to posts_path, notice: 'Undifined user, post was not successfully created.'\n else\n respond_to do |format|\n if @post.save\n format.html { redirect_to @post, notice: 'Post was successfully created.' }\n format.json { render :show, status: :created, location: @post }\n else\n format.html { render :new }\n format.json { render json: @post.errors, status: :unprocessable_entity }\n end\n end\n end\n end", "def post_ad\n # the post_ad method is defined in lib/post_o_matic.rb. the method creates a new listing on kingsnake.com\n # and returns true or false, depending on whether or not it was posted.\n is_posted = super\n if is_posted\n update_posted_ad!\n end\n is_posted\n end", "def set_poster post, sentFlg\n sentFlg ? post.recipient : post.user\n end", "def add_post(post)\n @posts << post\n post.author = self\n @@post_count += 1\n end", "def add_post(post)\n @posts << post\n post.author = self\n @@post_count += 1\n end", "def set_post\n @post = Post.find(params[:id])\n @talk = @post.talk\n @originator_person = @post.person\n end", "def require_creator\n unless logged_in? && current_user = @post.user\n flash[:error] = \"Can't be done\"\n redirect_to root_path\n end\n end", "def voted_for?(post)\n evaluations.where(target_type: post.class, target_id: post.id).present?\n end", "def voted_for?(post)\n evaluations.where(target_type: post.class, target_id: post.id).present?\n end", "def create_vote\n user.answer_votes.create(value: 1, answer: self)\n end", "def create\n @post = Post.new(params[:post])\n if current_user\n @post.user_id = current_user.id\n end\n \n respond_to do |format|\n if @post.save\n format.html { redirect_to(root_path, :notice => 'Post was successfully created.') }\n format.xml { render :xml => @post, :status => :created, :location => @post }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @post.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @post = Post.new(params[:post])\n @post.user_id = current_user\n\n respond_to do |format|\n if @post.save\n \n # mixpanel track post created\n if Rails.env.production?\n #mixpanel.track 'Post Created', { :distinct_id => current_user.id }\n end\n \n format.html { redirect_to @post, notice: 'Post was successfully created.' }\n format.json { render json: @post, status: :created, location: @post }\n else\n format.html { render action: \"new\" }\n format.json { render json: @post.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_vote\n @vote = Vote.create(vote_params)\n if !following_story\n Follow.create(user_id: session[:user_id], story_id: get_story_id)\n end\n cannonize\n redirect_to request.referrer\n end", "def create\r\n new_post = current_user.posts.new(post_params)\r\n new_post.save\r\n redirect_to post_path new_post\r\n end", "def create\n @post = Post.new(post_params)\n @post.postable_id = current_user.id\n if Lender.where(email: session[:user_email]).first\n @post.postable_type = \"Lender\"\n elsif Borrower.where(email: session[:user_email]).first\n @post.postable_type = \"Borrower\"\n end\n if @post.save\n redirect_to \"/chat/#{current_user.id}\"\n else\n flash[:errors] = @post.errors.full_messages\n end\n end", "def create\n @post.author = current_user\n\n if @post.save\n flash[:notice] = 'Post was successfully created.'\n end\n \n respond_with @post\n end", "def can_create_comment?(post); true; end", "def fetch_create_post\n end", "def upvote\n @post.likes.create(user_id: current_user.id)\n\n respond_to do |format|\n format.html { redirect_to posts_path }\n format.js\n end\n end", "def create\n @post = Post.new(name: params[:name],\n email: params[:email],\n year: params[:year],\n major: params[:major],\n Q1: params[:Q1],\n Q2: params[:Q2],\n Q3: params[:Q3],\n Q4: params[:Q4],\n Q5: params[:Q5],\n Q6: params[:Q6],\n user_id: @current_user.id)\n if @post.save\n flash[:notice] = \"Your answer has been recorded.\"\n redirect_to(\"/user/#{@current_user.id}\")\n else\n render(\"post/new\")\n end\n end", "def create\n @post.author = current_user\n @post.current_revision.user = current_user\n handle_images\n respond_to do |format|\n if @post.save\n handle_pingbacks\n flash[:notice] = 'Post was successfully created.'\n format.html { redirect_to(@post) }\n format.xml { render :xml => @post, :status => :created, :location => @post }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @post.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create_other_members_posts\n\t\tposts = []\n\t\tother_user = create_other_member_user\n\t\tposts << FactoryGirl.create(:post, :user_id => other_user.id, :title => \"Post 1\", :body => \"In Post 1 we talk about...\", :public => true) \n\t posts << FactoryGirl.create(:post, :user_id => other_user.id, :title => \"Post 2\", :body => \"In Post 2 we talk about...\", :public => false) \n\t posts << FactoryGirl.create(:post, :user_id => other_user.id, :title => \"Post 3\", :body => \"In Post 3 we talk about...\", :public => true)\n\tend", "def create\n @vote = Vote.new(params[:vote])\n @vote.user_id = session[:user_id]\n if params[:type].eql? \"post\"\n @vote.post_id = params[:post_id]\n else\n @vote.comment_id = params[:comment_id]\n end\n @vote.save\n redirect_to post_url(Post.find(params[:post_id]))\n=begin\n respond_to do |format|\n if @vote.save\n format.html { redirect_to @vote, notice: 'Vote was successfully created.' }\n format.json { render json: @vote, status: :created, location: @vote }\n else\n format.html { render action: \"new\" }\n format.json { render json: @vote.errors, status: :unprocessable_entity }\n end\n end\n=end\n end", "def check_post_owner\n json_response({ error: 'Not authorized' }, :unauthorized) unless @post.user_id == current_user.id\n end", "def create\n @vote = Vote.new\n end", "def upvote\n @post = Post.find(params[:id])\n @post_count = Post.count\n @vote = Vote.new(user_id: session[:id], post_id: @post.id, score: 1)\n if @vote.save\n @post.update_attribute(:respect, @post.respect + 1)\n flash[:notice] = 'Post upvoted successfully'\n end\n redirect_to(action: 'index', topic_id: @topic.id)\n end", "def owner?(post_owner) # <= User object\n current_user == post_owner\n end", "def create\n @post = Post.new(post_params)\n @post.author = @current_user\n\n respond_to do |format|\n if @post.save\n format.html { redirect_to @post, notice: 'Post was successfully created.' }\n format.json { render action: 'show', status: :created, location: @post }\n else\n format.html { render action: 'new' }\n format.json { render json: @post.errors, status: :unprocessable_entity }\n end\n end\n end", "def vote_object(the_user)\n post_vote_array(the_user).first.id\n end", "def create\n @post = @current_user.posts.build(params[:post])\n if @post.save then\n Feed.create_post(@post)\n end\n end", "def initialize #initializes post belonging to author \r\n @post = author\r\n end", "def create_guest_vote\n user = current_or_guest_user\n if user.first_name == \"guest\"\n if session[:votes].nil?\n session[:votes] = [params[:post_id]]\n else\n session[:votes].push(params[:post_id])\n end\n end\n redirect_to '/posts'\n end", "def create\n @post = Post.new(params[:post].merge!(:user => current_user))\n if @post.save\n flash[:notice] = 'Your post has been created.'\n redirect_to root_path\n else\n render action: \"new\"\n end\n end", "def create\n @user = current_user #User.find(params[:user_id])\n @post = Post.new(post_params)\n @post.user = @user\n @post.tag_list = params[:tag_list] || ''\n\n\n if ( params[:post][:game_id].to_i > 0 )\n @game = Game.find(params[:post][:game_id])\n if @game.owner == current_user\n @post.game = @game\n end\n end\n\n if ( params[:post][:character_id].to_i > 0 )\n @character = Character.find(params[:post][:character_id])\n if @character.owner == current_user\n @post.character = @character\n end\n end\n\n logger.info \"create params: \" + params.inspect\n\n if !current_user.admin?\n params[:is_system_announcement] = false\n params[:is_game_announcement] = false\n end\n \n respond_to do |format|\n if @post.save\n @post.create_poll(params[:poll], params[:choices]) if params[:poll]\n \n flash[:notice] = @post.category ? :post_created_for_category.l_with_args(:category => @post.category.name.singularize) : :your_post_was_successfully_created.l\n format.html { \n if @post.is_live?\n if [email protected]?\n redirect_to seo_game_path(@post.game)\n elsif [email protected]?\n redirect_to seo_character_path(@post.character)\n else\n redirect_to user_path(@user)\n end\n else\n redirect_to manage_user_posts_path(@user)\n end\n }\n format.js\n else\n format.html { render :action => \"new\" }\n format.js \n end\n end\n end", "def create\n @post = Post.new(post_params)\n @post.user_id=current_user.id\n create_review(@post)\n end", "def is_the_author_of_the_post(post)\n post.author_id == current_author.id\n end", "def published_post\n if self.published && self.published_at.nil?\n self.published_at = Time.now\n end\n end", "def create\n @post = Post.new(post_params)\n @post.user = current_user\n if @post.save\n redirect_to post_path(@post)\n else\n render :new\n end\n end", "def create\n @post = current_user.posts.create(params[:post])\n\n if @post.save\n flash[:success] = 'Post was successfully created.'\n redirect_to @post\n else\n render :new\n end\n\n end", "def create\n @post = Post.new(post_params)\n @post.user = current_user\n if @post.save\n flash[:success] = \"Post was successfully saved\"\n redirect_to post_path(@post)\n else \n render 'new'\n end\n end", "def post_owner(params)\n if validate_post_owner(params)\n db = connect_to_db()\n db.results_as_hash = true\n \n result = db.execute('SELECT userId FROM posts where id=?', params[\"id\"].to_i)\n return result[0][0]\n else\n return false\n end\n end", "def create\n @post = Post.new(params[:post])\n @reply = @post.replies.build\n @post.user_id = current_user.id\n\n respond_to do |format|\n if @post.save\n format.html { redirect_to edit_reply_path(@reply), notice: @post.title }\n format.json { render json: @post, status: :created, location: @post }\n else\n format.html { render action: \"new\" }\n format.json { render json: @post.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n if current_user.admin\n @post = Post.new(post_params)\n @post.user = current_user\n\n respond_to do |format|\n if @post.save\n format.html { redirect_to @post, notice: \"Postitus edukalt loodud!\" }\n format.json { render :show, status: :created, location: @post }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @post.errors, status: :unprocessable_entity }\n end\n end\n\n else\n flash[:notice] = 'You dont have permission to do that!'\n redirect_to posts_path\n end\n\n end", "def like_post!(post)\n likes.create!(epost_id: post.id, like: 1)\n end" ]
[ "0.71825874", "0.71825874", "0.715724", "0.71569854", "0.7144041", "0.70861995", "0.7047792", "0.69802547", "0.6722854", "0.65306336", "0.65235966", "0.6502019", "0.64960873", "0.6470705", "0.64136326", "0.63874906", "0.63657665", "0.63622963", "0.63470256", "0.63135093", "0.6285881", "0.6270901", "0.6252667", "0.62520134", "0.6215291", "0.6192344", "0.61870784", "0.6185979", "0.6172253", "0.6161855", "0.61607224", "0.61579543", "0.6148843", "0.6141954", "0.6140544", "0.6138634", "0.6115678", "0.61154526", "0.6114886", "0.61136967", "0.6111475", "0.6101695", "0.6098028", "0.6098028", "0.6098028", "0.60887176", "0.60809064", "0.6070987", "0.60690695", "0.60672593", "0.6062821", "0.6055391", "0.6055391", "0.60548836", "0.605459", "0.6037053", "0.6028665", "0.60193396", "0.6018619", "0.6014902", "0.6014902", "0.5992129", "0.59841526", "0.5980233", "0.5980233", "0.59768826", "0.59708214", "0.5956956", "0.59513205", "0.5949129", "0.59438765", "0.59369195", "0.5933043", "0.5930628", "0.5923716", "0.5919291", "0.5911772", "0.5910794", "0.59090286", "0.5902764", "0.58972317", "0.5896578", "0.58923084", "0.5889255", "0.58886206", "0.58876234", "0.58857656", "0.58755475", "0.5872084", "0.58717495", "0.5868808", "0.58681667", "0.5866104", "0.5856226", "0.58497626", "0.5842306", "0.58404624", "0.58403903", "0.5837127", "0.5836606" ]
0.5990325
62
def current_user return unless session[:user_id]
def arrange end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def current_user\n !session[:uid].nil?\n end", "def current_user?\n return session[:user_id] != nil\n end", "def current_user\n if session[:user] != nil\n # return the user id\n return session[:user]\n else\n return false\n end\nend", "def current_user # Check the current user\n @current_user ||= User.find(session[:user_id]) if session[:user_id]\n end", "def current_user\n User.find_by_id(session[:user_id]) if session[:user_id] \n end", "def current_user\n\tsession[:user_id] ? User.find(session[:user_id]) : nil\nend", "def current_user\n\tsession[:user_id] ? User.find(session[:user_id]) : nil\nend", "def current_user\n if session[:user_id]\n @current_user = User.find(session[:user_id])\n else\n false\n end\n end", "def current_user\n User.find_by_id(session[:user_id]) if session[:user_id].present?\n end", "def current_user\n logged_in? ? User.find_by_id(session[:user_id]) : nil\n end", "def current_user\t\n\treturn unless session[:user_id]\n\t@current_user ||= User.find_by_id(session[:user_id])\n end", "def current_user\n return unless session[:user_id]\n @current_user ||= User.find_by_id(session[:user_id])\n end", "def current_user\n return unless session[:user_id]\n @current_user ||= User.find_by_id(session[:user_id])\nend", "def current_user\n if session[:user_id]\n @current_user = User.find(session[:user_id])\n @current_user\n else\n false\n end\n end", "def current_user\n if session[:user_id]\n @current_user = User.find(session[:user_id])\n @current_user\n else\n false\n end\n end", "def current_user\n @current_user ||= User.find(session[:user_id]) if session[:user_id]\n #if current_user exist or equil to this user then keep going\n end", "def current_user\n if session[:user]\n session[:user][0]\n else\n false\n end\n end", "def current_user\n if session[:user_id]\n @current_user=session[:user_id]\n end\nend", "def current_user\n current_user ||= User.find_by_id(session[:user_id]) if session[:user_id]\n end", "def current_user\n if !session[:user_id] then nil else User.find(session[:user_id]) end\n end", "def current_user\n# if there is a current user rtn that if not then rtn nil\n@current_user ||= User.find(session[:user_id]) if session[:user_id]\n end", "def current_user\n @_current_user ||= session[:user_id] && User.find(:first, :conditions => ['id = ?', session[:user_id]]).id\n end", "def current_user\n User.find session[:id] if session[:id]\n end", "def current_user\n \t\treturn unless session[:user_id]\n \t\t@current_user ||= User.find_by_id(session[:user_id])\n \tend", "def current_user\n User.find(session[:user_id]) if session[:user_id]\n end", "def current_user\n User.find(session[:user_id]) if session[:user_id]\n end", "def current_user \n @current_user ||= User.find(session[:id]) if session[:id] \nend", "def current_user \n @current_user ||= User.find(session[:user_id]) if\n session[:user_id] \n end", "def current_user\n\t@current_user = User.find(session[:user_id]) if session[:user_id]\nend", "def current_user \n @current_user ||= User.find(session[:user_id]) if session[:user_id] \n end", "def current_user\n\tif session[:user_id].present?\n\t\tUser.find(session[:user_id])\t\n\tend\nend", "def current_user\n return current_session && current_session.user\n end", "def current_user\n if session[:user_id]\n @current_user ||= User.find_by(id: session[:user_id])\n end\n end", "def logged_in?\n not session[:user_id].nil?\n end", "def logged_in?\n not session[:user_id].nil?\n end", "def logged_in?\n not session[:user_id].nil?\n end", "def current_user\n @current_user ||= User.find(session[:user_id]) if session[:user_id]\n end", "def current_user\n return nil unless session[:user_id]\n User.get(session[:user_id])\nend", "def current_user\n \t@current_user ||= User.find_by(:id, session[:user_id])\n end", "def is_current_user?\n current_user && current_user.id == params[:id]\n end", "def current_user\n @current_user || User.find(session[:user_id]) if session[:user_id]\n \tend", "def current_user\n if session[:user_id]\n User.find(session[:user_id])\n end\nend", "def current_user\n \t@current_user || User.find(session[:user_id])\n end", "def current_user\n\n @current_user ||= User.find(session[:user_id]) if session[:user_id]\n\n end", "def current_user\n@current_user ||= User.find_by(id: session[:user_id])\nend", "def current_user\n @current_user ||= User.find(session[:user_id]) if session[:user_id] # ||= means if not, here means if there is no current user logged in, do the following\n end", "def current_user\n current_user ||= ((session[:user_id] && User.find_by_id(session[:user_id])) || 0)\n end", "def current_user\n\t return unless session[:user_id]\n\t @current_user ||= User.find_by_id(session[:user_id]) \n\tend", "def current_user\n\n#return this @current user, only if he exists, if not, find him based on the current id stored in session\n@current_user ||= User.find(session[:user_id]) if session[:user_id] #return this user if there is a user id stored in our session hash\n\nend", "def current_user\n \t@current_user || User.find(session[:user_id]) if session[:user_id]\n end", "def current_user\n # if current_user is et use it otherwise ask the data store\n @current_user ||= User.find_by(id: session[:user_id])\n end", "def loggedin?\n not session[:userid].nil?\nend", "def current_user \n\tif session[:user_id] \n\t\t@current_user = User.find(session[:user_id]) \n\tend \nend", "def current_user\n @current_user ||= User.find_by(id: session[:user_id])\n end", "def current_user\n @current_user ||= User.find_by(id: session[:user_id])\nend", "def current_user\n @current_user ||= User.find_by(id: session[:user_id])\nend", "def current_user\n @current_user ||= User.find_by_id(session[:userid]) if session[:userid]\n end", "def current_user\n @current_user ||= User.find(session[:user_id]) if session[:user_id] #find user in database based on id, if not already logged in. \n \n end", "def current_user\n return nil unless session[:user_id] \n @current_user ||= User.find(session[:user_id])\n end", "def current_user\n\t\t@current_user ||= (login_from_session) unless @current_user == false\n end", "def current_user\n if session[:user_id]\n @current_user ||= User.find(session[:user_id]) \n else \n @current_user = nil\n end \n return @current_user\n end", "def current_user\n\t\t\t@current_user ||= User.find_by(id: session[:user_id])\t\n\tend", "def current_user\n @current_user ||=User.find(session[:user_id]) if session[:user_id] # memoization\n end", "def current_user\n\tif session[:user_id]\n\t@current_user = User.find(session[:user_id])\n\tend\nend", "def user_logged?\n !session[:user_id].nil?\n end", "def current_user\n if session[:user_id]\n @current_user = Account.find(session[:user_id])\n @current_user\n else\n false\n end\n end", "def current_user\n \t@current_user ||= User.find(session[:userid]) if session[:userid]\n end", "def logged_in?\n unless session[:user_id].nil?\n return current_user\n end\n end", "def current_user\n @current_user ||= User.find_by(id:session[:user_id])\nend", "def current_user\n current_user ||= User.find(session[:user_id]) if session[:user_id] # First time will set it, other times will re-use the set variable\n end", "def current_user_id\n session[:user_id]\n end", "def current_user\n @_current_user ||= session[:user_id] && User.find(session[:user_id])\n end", "def current_user\n if session[:user_id]\n User.find(session[:user_id])\n else\n end\n end", "def current_user\n @current_user ||= User.find(session[:user_id]) if session[:user_id] \n #this code will return nil rather than throwing an exception. \n #'||=' memoization technique is a minor performance optimization.\n #if @current_user exist, do not run the code after '||=' else do so, ensuring we will hit database once. \n end", "def current_user \n\t\t@current_user ||= User.find(session[:user_id]) if session[:user_id]\n\tend", "def current_user\n User.find_by(id: session[:user_id])\n end", "def current_user\n User.find_by(id: session[:user_id])\n end", "def current_user\n session[:usr_id]\n end", "def current_user\n @current_user ||= User.find_by(id:session[:user_id])\n end", "def user_authenticated?\n !session[:user_id].nil?\n end", "def current_user\n if session[:user_id] && !User.where(id: session[:user_id]).empty?\n @current_user = User.find(session[:user_id])\n else\n log_out\n end\n end", "def current_user\n @current_user ||= User.find(session[:user_id]) if session[:user_id] \n end", "def current_user\n if session[:user_id]\n return User.find(session[:user_id])\n end\nend", "def current_user\n User.where(id: session[:user_id]).first\n end", "def current_user\n user = User.find_by(id: session[:user_id])\n end", "def current_user\r\n @_current_user ||= session[:user_id] && User.find(session[:user_id])\r\n end", "def is_this_user\n\t\[email protected] == current_user.id\n\tend", "def current_user\n\tUser.find_by(id: session[:user_id])\n end", "def current_user\n return unless session[:user_id]\n @current_user = User.find_by_id(session[:user_id])\n end", "def current_user\n\n \tif session[:user_id].present?\n \t\tUser.find(session[:user_id])\n\n \tend\n\n end", "def current_user\n\t\t @current_user ||= (login_from_session) unless @current_user == false\n end", "def current_user\n @current_user ||= User.find_by(id: session[:user_id]) if session[:user_id]\n end", "def current_user\n @current_user ||= User.find_by(id: session[:user_id]) if session[:user_id]\n end", "def current_user\n @current_user ||= User.find_by(id: session[:user_id]) if session[:user_id]\n end", "def current_user\n @current_user ||= User.find_by(id: session[:user_id]) if session[:user_id]\n end", "def current_user\n @current_user ||= User.find_by(id: session[:user_id]) if session[:user_id]\n end", "def current_user\n User.find_by(uid: session[:user]) if logged_in?\n end", "def current_user\n if session[:user_id] # if there is a session id, the current user is the one whose session id is the same as the user id \n @current_user ||= User.find_by(id: session[:user_id])\n end\n end", "def current_user\n User.find session[:user_id] if user_signed_in?\n end", "def current_user\n @user ||= User.find_by_id(session[:user_id])if logged_in?\n end", "def current_user\n \t@current_user ||= User.find(session[:user_id]) if session[:user_id]\n end" ]
[ "0.9053675", "0.8909354", "0.8758037", "0.862908", "0.85814047", "0.85674876", "0.85674876", "0.85239", "0.85052484", "0.8493934", "0.84783936", "0.84589034", "0.8444358", "0.84422547", "0.8428561", "0.8427593", "0.8391325", "0.83606094", "0.8360404", "0.8357033", "0.8333456", "0.8318023", "0.8309605", "0.82979965", "0.8296105", "0.8296105", "0.8293421", "0.82827437", "0.8279572", "0.8277501", "0.8270304", "0.8267095", "0.82630247", "0.8250884", "0.8250884", "0.8250884", "0.8242912", "0.8241346", "0.8235727", "0.82332075", "0.822686", "0.8223194", "0.8218773", "0.8206097", "0.82025397", "0.81994754", "0.81957364", "0.8195222", "0.8194493", "0.81935", "0.8190787", "0.8189373", "0.8187968", "0.81847626", "0.8180377", "0.8180377", "0.8169574", "0.8165222", "0.81641275", "0.81620425", "0.8161933", "0.81587964", "0.81587464", "0.8156177", "0.81542873", "0.8152365", "0.8145719", "0.8140512", "0.81345546", "0.813434", "0.81328356", "0.81327516", "0.8132416", "0.81306505", "0.8130617", "0.81280416", "0.81280416", "0.8126176", "0.81248194", "0.8123676", "0.81209666", "0.8117026", "0.81167924", "0.8116205", "0.81161845", "0.81159985", "0.8114556", "0.8112323", "0.8111292", "0.81097853", "0.81093293", "0.8107743", "0.8107743", "0.8107743", "0.8107743", "0.8107743", "0.8102413", "0.8101704", "0.8097869", "0.8096668", "0.8095119" ]
0.0
-1
Reads current instance id from EC2 metadata. We are assuming instance id cannot change while current process is running and thus memoizing it.
def current_vm_id @metadata_lock.synchronize do return @current_vm_id if @current_vm_id http_client = HTTPClient.new http_client.connect_timeout = METADATA_TIMEOUT # Using 169.254.169.254 is an EC2 convention for getting # instance metadata uri = "http://169.254.169.254/latest/meta-data/instance-id/" response = http_client.get(uri) unless response.status == 200 cloud_error("Instance metadata endpoint returned " \ "HTTP #{response.status}") end @current_vm_id = response.body end rescue HTTPClient::TimeoutError cloud_error("Timed out reading instance metadata, " \ "please make sure CPI is running on EC2 instance") end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def lookup_instance_id\n metadata_endpoint = 'http://169.254.169.254/latest/meta-data/'\n instance_id = Net::HTTP.get( URI.parse( metadata_endpoint + 'instance-id' ) )\n end", "def instance_id\n return ec2_meta_data('instance-id')\n end", "def current_vm_id\n # xxxx = coreCloud.current_vm_id()\n # process xxxx based on version\n # return based on version\n\n return @current_vm_id if @current_vm_id\n\n http_client = HTTPClient.new\n http_client.connect_timeout = METADATA_TIMEOUT\n headers = {}\n\n # Using 169.254.169.254 is an EC2 convention for getting\n # instance metadata\n response = http_client.put('http://169.254.169.254/latest/api/token', nil, { 'X-aws-ec2-metadata-token-ttl-seconds' => '300' })\n if response.status == 200\n headers['X-aws-ec2-metadata-token'] = response.body #body consists of the token\n end\n\n response = http_client.get('http://169.254.169.254/latest/meta-data/instance-id/', nil, headers)\n unless response.status == 200\n cloud_error('Instance metadata endpoint returned ' \\\n \"HTTP #{response.status}\")\n end\n\n @current_vm_id = response.body\n rescue HTTPClient::TimeoutError\n cloud_error('Timed out reading instance metadata, ' \\\n 'please make sure CPI is running on EC2 instance')\n end", "def get_instance_id(instance_info)\n # puts \"look up instanceId in #{instance_info.inspect}\"\n instance_info['instancesSet']['item'][0]['instanceId']\n end", "def instance_id\n `curl --connect-timeout 1 #{aws_url} 2>&1`.chomp\n end", "def my_instance_id\n Net::HTTP.get(URI('http://169.254.169.254/1.0/meta-data/instance-id'))\n end", "def me\n require 'open-uri'\n begin\n instance_id = open('http://169.254.169.254/latest/meta-data/instance-id').read\n instance instance_id\n rescue OpenURI::HTTPError => e\n nil\n end\n end", "def current_vm_id\n @metadata_lock.synchronize do\n instance_manager.instance_id\n end\n end", "def getAWSInfo\n begin\n Timeout::timeout(10) do\n aws_metadata = open('http://169.254.169.254/2014-11-05/dynamic/instance-identity/document'){ |io| data = io.read }\n aws_JSON_Information = JSON.parse(aws_metadata)\n return \"#{aws_JSON_Information['instanceId']}_#{aws_JSON_Information['region']}_#{aws_JSON_Information['accountId']}\"\n end\n rescue\n Chef::Log.warn('Unable to get AWS instance ID, Timeout while reading') \n return ''\n end\nend", "def instance_id\n data[:instance_id]\n end", "def get_instance_id\n MUTEX.synchronize do\n @instance_index = 0 if @instance_index == 10000\n @instance_index += 1\n end\n end", "def ostack_get_instance_id ()\n # The instance id is kept in @passthrough[:instance] or\n # can be obtained from @instance_data which has all instance\n # details.\n if ! @instance_data.nil? and ! @instance_data.id.nil?\n return @instance_data.id # we already know the id\n elsif @passthrough.has_key?(:instance)\n return @passthrough[:instance] # we know the id we want\n else\n @logger.debug(sprintf('unable to determine id from instance_data[%s] or passthrough specification[%s]', @instance_data, @passthrough))\n return nil # we don't have an id yet, likely a up() call\n end\n end", "def aws_instance_get(opts)\n AWS::EC2.new.instances[opts[:instance_id]]\n end", "def get_aws_instance_id_by_node_name(node_name)\n return nil unless configured?\n\n found_instance = instances_list.find { |instance| instance[:node_name] == node_name }\n found_instance.nil? ? nil : found_instance[:instance_id]\n end", "def get_ec2_details(instance_id)\n ec2 = Aws::EC2::Client.new()\n begin\n res = ec2.describe_instances({instance_ids:[instance_id]})\n rescue\n puts \"Can't found an EC2 instance with given ID: #{instance_id}\"\n exit\n end\n i = res.reservations[0].instances[0]\n instance = make_instance( i )\n\n begin\n res = ec2.describe_image_attribute({image_id: i.image_id, attribute: \"description\"})\n instance[:ami_desc] = res.description.value\n rescue\n instance[:ami_desc] = '-- unavailable as of now --'\n end\n\n if i.vpc_id\n instance[:vpc] = getName( ec2.describe_vpcs({vpc_ids:[i.vpc_id]}).vpcs[0].tags )\n instance[:subnet] = getName( ec2.describe_subnets({subnet_ids:[i.subnet_id]}).subnets[0].tags )\n end\n\n instance\nend", "def get_instance instance_id\n execute do\n instances.get_instance(\n instance_path(instance_id)\n )\n end\n end", "def get_instance instance_id\n instances.get_instance name: instance_path(instance_id)\n end", "def get_pid\n File.open(@lock_file, 'r').read.to_i\n end", "def get_instance_data\n JSON.parse(Net::HTTP.get(URI.parse('http://169.254.169.254/latest/dynamic/instance-identity/document')))\n end", "def find_instance_id(instance_name, list_command_output)\n file = list_command_output.tap(&:run_command).stdout\n puts \"searching instance id for instance with name : #{instance_name}...\"\n # puts file\n file.lines.each do |line|\n if line.include?(\"#{instance_name}\")\n puts \"#{line}\"\n return \"#{line}\".split(' ').first\n end\n end\nend", "def instance_data\n @instance_data ||= JSON.parse(Net::HTTP.get(URI.parse('http://169.254.169.254/latest/dynamic/instance-identity/document')))\n end", "def id( )\n return @instances.map{ |i| i[:aws_instance_id] }\n end", "def get_pid_and_profile\n ck_valid\n \n # The PID file is required - If it is not found, it means we\n # cannot get any information for this instance. Bail out.\n raise InvalidInstance, _(\"Instance %s for %s is not running or did not \" +\n \"register its PID \") % \n [@num, @laboratory.name] unless File.exists? pid_file\n @pid = File.read(pid_file).chomp.to_i\n\n # However, the profile file is merely informational. We can safely\n # ignore it if it does not exist or is not readable\n begin\n @profile = Profile.find_by_id(File.read(prof_file).to_i)\n rescue Errno::ENOENT, Errno::EACCES\n @profile = nil\n end\n end", "def get_aws_instance_id_by_config_id(configuration_id, node_name)\n return nil unless configured?\n\n found_instance = instances_list.find do |instance|\n instance[:node_name] == node_name &&\n instance[:configuration_id] == configuration_id\n end\n found_instance.nil? ? nil : found_instance[:instance_id]\n end", "def ec2_instance(instance_id)\n EC2Instance.new(instance_id)\n end", "def ec2_instance(instance_id)\n EC2Instance.new(instance_id)\n end", "def instance_id\n @grpc.name.split(\"/\")[3]\n end", "def instance_id\n @grpc.name.split(\"/\")[3]\n end", "def instance_id\n @grpc.name.split(\"/\")[3]\n end", "def instance_id\n @grpc.name.split(\"/\")[3]\n end", "def instance_id\n @grpc.name.split(\"/\")[3]\n end", "def instance_id\n @grpc.name.split(\"/\")[3]\n end", "def instance_id\n @grpc.name.split(\"/\")[3]\n end", "def get_next_id\n id = java.lang.System.nanoTime.to_s\n $log.info(\"*** get_next_id: \" + id)\n return id\n end", "def right_aws\n cloud_provider.ec2.describe_instances instance_id\n end", "def instance_profile_id\n data.instance_profile_id\n end", "def get_next_id\r\n id = java.lang.System.nanoTime.to_s\r\n $log.info(\"*** get_next_id: \" + id)\r\n return id\r\n end", "def aws_find_instance( iprops )\n if iprops[:id]\n @aws_instances.find do |r|\n r[:id] == iprops[:id] && r[:region] == iprops[:region]\n end\n else\n [ :internet_name, :internet_ip, :name ].inject( nil ) do |found, key|\n found || @aws_instances.find { |r| r[key] == iprops[key] }\n end\n end\n end", "def spot_instance_request_id\n data[:spot_instance_request_id]\n end", "def get_instance_by_id(id)\n get_instances_description.select {|a| a.instance_id == id}[0] rescue nil\n end", "def read_container_id\n @id = ContainerStateFiles.read_container_id(store_address)\n cid = @id\n # SystemDebug.debug(SystemDebug.containers, 'read container from file ', @container_id)\n if @id == -1 || @id.nil? # && set_state != :nocontainer\n info = container_api.inspect_container_by_name(@container_name) # docker_info\n info = info[0] if info.is_a?(Array)\n if info.key?(:RepoTags)\n #No container by that name and it will return images by that name WTF\n @id = -1\n else\n @id = info[:Id] if info.key?(:Id)\n end\n end\n save_state unless cid == @id\n @id\n rescue EnginesException\n clear_cid unless cid == -1\n @id = -1\n end", "def read_pid\n File.read(pid_path).to_i\n end", "def read_pid\n File.read(pid_path).to_i\n end", "def db_instance_arn\n data[:db_instance_arn]\n end", "def get_pid\n File.exists?(@pid_file) ? File.read(@pid_file).strip : 0\n end", "def get_instance(id)\n begin\n instance = @ec2.instance(id)\n if instance.exists?\n return instance\n else\n raise RuntimeError.new(\"Instance #{id} does not exist\")\n end\n rescue => e\n raise e\n end\n end", "def id\n vm_info['VM']['ID'].to_i\n end", "def set_ec2_instance\n @ec2_instance = Ec2Instance.find(params[:id])\n end", "def pid\n File.read(@pid_file).to_i\n end", "def get_id\n id = Thread.current[:id] \n id = 1 if @java_flag\n id\n end", "def pid\n File.read(@pid_file).strip.to_i\n end", "def current_instance_number\n sql_select_one 'SELECT Instance_Number FROM v$Instance'\n end", "def instance(instance_id)\n instances([instance_id])[0]\n end", "def ec2_instance_data # rubocop:disable Metrics/MethodLength, Metrics/AbcSize\n i = {\n :placement => {\n :availability_zone => config[:availability_zone]\n },\n :instance_type => config[:instance_type],\n :ebs_optimized => config[:ebs_optimized],\n :image_id => config[:image_id],\n :key_name => config[:aws_ssh_key_id],\n :subnet_id => config[:subnet_id],\n :private_ip_address => config[:private_ip_address]\n }\n i[:block_device_mappings] = block_device_mappings unless block_device_mappings.empty?\n i[:security_group_ids] = config[:security_group_ids] if config[:security_group_ids]\n i[:user_data] = prepared_user_data if prepared_user_data\n if config[:iam_profile_name]\n i[:iam_instance_profile] = { :name => config[:iam_profile_name] }\n end\n if !config.fetch(:associate_public_ip, nil).nil?\n i[:network_interfaces] =\n [{\n :device_index => 0,\n :associate_public_ip_address => config[:associate_public_ip],\n :delete_on_termination => true\n }]\n # If specifying `:network_interfaces` in the request, you must specify\n # network specific configs in the network_interfaces block and not at\n # the top level\n if config[:subnet_id]\n i[:network_interfaces][0][:subnet_id] = i.delete(:subnet_id)\n end\n if config[:private_ip_address]\n i[:network_interfaces][0][:private_ip_address] = i.delete(:private_ip_address)\n end\n if config[:security_group_ids]\n i[:network_interfaces][0][:groups] = i.delete(:security_group_ids)\n end\n end\n i\n end", "def current_container_id\n cgroup_content = File.read(\"/proc/1/cgroup\")\n @running_in_container = cgroup_content.include? \"docker/\"\n if @running_in_container\n cgroup_content.split.each { |line|\n if line.include? \"docker/\" or line.include? \"kubepods/\"\n parts = line.split(\":\")\n res_parts = parts[2].split(\"/\")\n return res_parts[-1][0..12]\n end\n }\n end\n nil\nend", "def instance instance_id\n ensure_service!\n grpc = service.get_instance instance_id\n Instance.from_grpc grpc, service\n rescue Google::Cloud::NotFoundError\n nil\n end", "def get_ingest_id( filename )\n\n begin\n File.open( \"#{filename}.id\", 'r') do |file|\n id = file.read( )\n return id\n end\n rescue => e\n end\n return ''\n end", "def current_id(refresh = false)\n @current_id = nil if refresh\n @current_id ||= @redis.get(key).to_i\n end", "def instance_ec2\n @@cache[:fedora_image] = ImageService::create_image(nil,\n :broker_image_id => 'fedora',\n :provider_image_id => 'ami-bafcf3ce',\n :provider_id => @@cache[:ec2_provider].id,\n )\n @@cache[:instance_ec2] = LaunchService.launch(Seeding[:pool_ec2].id, 'test instance '+Time.now.to_i.to_s, Seeding[:hwp_1].name, Seeding[:fedora_image].broker_image_id,\n #:flavor => 'm1-small',\n :keyname => 'mpovolny'\n )\n end", "def get_pvm_instance(instance_id)\n get(\"cloud-instances/#{guid}/pvm-instances/#{instance_id}\")\n end", "def pid\n File.open( pid_path ) { |f| return f.gets.to_i } if File.exist?(pid_path)\n end", "def id\n @instance.id\n end", "def aws_instance_elastic_ip_get(opts)\n opts[:instance].elastic_ip\n end", "def get_agent_image_id(image_name = agent_ami_name)\n image = ec2.client.describe_images(\n :filters => [{\n :name => \"name\",\n :values => [image_name]\n }]\n ).flat_map(&:images).first\n raise(\"Unable to find AMI Image #{image_name} to launch Smartstate agent\") if image.nil?\n\n _log.info(\"AMI Image: #{image_name} [#{image.image_id}] is used to launch smartstate agent.\")\n\n image.image_id\n end", "def ec2\n return @ec2\n end", "def pid\n `cat #{pid_file_path}`.gsub(\"\\n\", \"\")\n end", "def random_instance_id # TODO: don't count on this for security; migrate to a proper instance id, in a cookie, at least twice as long, and with verified randomness\n \"%08x\" % rand( 1 << 32 ) + \"%08x\" % rand( 1 << 32 ) # rand has 52 bits of randomness; call twice to get 64 bits\n end", "def get_pcloud_instance\n get(\"cloud-instances/#{guid}\")\n end", "def current\n if global_id = @cursor_instance.get_value\n global_id = global_id.to_i\n end\n # return ID\n return global_id\n end", "def server_id(server_name)\n Puppet.warning \"[DEPRICATED]: Use find_match in common.rb\"\n @compute.servers.each do |server|\n return server.id if server.name == server_name\n end\n return nil\n end", "def pid\n @pid ||= metadata.fetch(@args.command, nil)\n end", "def read_replica_source_db_instance_identifier\n data[:read_replica_source_db_instance_identifier]\n end", "def pid()\n #This is a stub, used for indexing\n end", "def get_existing_docker_image()\n @image = Docker::Image.get(ENV['IMAGE_ID'])\n set :docker_image, @image.id\n\n docker_image = @image.id\n puts \"Using supplied image id: #{docker_image}\"\nend", "def pid\n return @pid if @pid_set\n @pid = File.readlines(pid_file).first.strip.to_i if File.exist?(pid_file)\n @pid ||= 0\n @pid_set = true\n @pid\n end", "def image_id\n data[:image_id]\n end", "def next_uid()\n\t\t\treturn @metadata.attributes[:next_uid].to_i\n\t\tend", "def _execution_id\n attributes['id']\n end", "def get_aws_region\n\n\n begin\n\n url = 'http://169.254.169.254/latest/dynamic/instance-identity/document'\n uri = URI(url)\n response = Net::HTTP.get(uri)\n\n hashOfLookupValues = JSON.parse(response)\n lookupRegion = hashOfLookupValues[\"region\"]\n\n rescue\n logonFailed('Unable to perform region lookup. Is this an EC2 instance? and does it have access to http://169.254.169.254')\n end\n\n return lookupRegion\n\n end", "def run_id()\n if @handle.ptr == nil\n raise \"this is disposed\"\n end\n result = Native.RunMetadata_run_id(@handle.ptr)\n result\n end", "def id\n @__metadata__.key || @id\n end", "def get_machine_id()\n machine_id_filepath = \".vagrant/machines/default/virtualbox/id\"\n\n if not File.exists? machine_id_filepath\n # VM hasn't been created yet.\n return false\n end\n\n # This is probably not a great way to do this: shell out to the cat command.\n # It seems likely that ruby has a get-file-contents function somewhere,\n # but I'm definitely not a ruby dev right now.\n machine_id = `cat #{machine_id_filepath}`\nend", "def identity()\n\t\t\treturn @metadata.attributes[:identity].to_i\n\t\tend", "def load_ami\n ami_id = nil\n\n error \"No file '#{AMI_ID_FILENAME}' found.\" if !File.file?(AMI_ID_FILENAME)\n\n credentials = load_credentials\n region = credentials[:aws_region]\n\n File.open(AMI_ID_FILENAME, \"r\") do |f|\n f.each_line do |line|\n if(!line.start_with?(\"#\") && line.length > 0)\n # We may add other options \n if(line.start_with?(region))\n ami_id = line.strip.gsub(\"#{region}: \", '')\n end\n end\n end\n end\n\n error \"Error: No AMI name set in the '#{AMI_ID_FILENAME}' file.\" if ami_id.nil?\n error \"AMI not available in #{region} (or there was a typo), contact @cguess to make a copy\" if(ami_id.start_with?('ami-') == false)\n\n return ami_id\n end", "def instance_config instance_config_id\n ensure_service!\n grpc = service.get_instance_config instance_config_id\n Instance::Config.from_grpc grpc\n rescue Google::Cloud::NotFoundError\n nil\n end", "def key_id\n metadata[\"keyid\"]\n end", "def process_id\n\n\t\t::Pantheios::Core.process_id\n\tend", "def pid\n process_pid = ::Process.pid\n if @ppid != process_pid\n @pid = nil\n @ppid = process_pid\n end\n @pid ||= SecureRandom.urlsafe_base64.tap { |str| @prefix_len = str.length }\n end", "def myname(compute)\n\t\t# lookup the name of the running instance\n\t\tinstanceid = Facter.value('ec2_instance_id')\n\t\tif ( instanceid =~ /i-/ )\n\t\t\treturn lookupname(compute,instanceid)\n\t\telse\n\t\t\traise \"ebsvol[aws]->myname: Sorry, I can't find my instanceId - please check Facter fact ec2_instance_id is available\"\n\t\tend\n\t\tnil\n\tend", "def pid\n return @pid unless @pid.nil?\n\n @pid = (open(pidpath, 'r').read.to_i if pidfile_exists?)\n end", "def next_id\n id = nil\n MinterState.transaction do\n state = read\n minter = ::Noid::Minter.new(state)\n id = minter.mint\n write!(minter)\n end # transaction\n id\n end", "def get_combustion_pid\n begin\n if File.exists? @@options[:pid_file]\n file = File.open(@@options[:pid_file], \"r\")\n contents = file.read\n combustion_pid = Integer(contents.split(\"\\n\")[0])\n file.close\n combustion_pid\n else\n nil\n end\n rescue ArgumentError => e\n file.close\n nil\n end\n end", "def process_id\n attributes.fetch(:processId)\n end", "def pid\n @pid ||= down? ? nil : @raw[2]\n end", "def associate_address(instance_id)\n new_ip = next_unused_elastic_ip\n vputs(\"Assigning #{new_ip} to the ec2 instance #{instance_id}\")\n ec2.associate_address(instance_id, new_ip)\n loop do\n if describe_instance(:instance_id => instance_id).public_ip == new_ip\n return new_ip\n end\n sleep 1\n end\n end", "def id_instance_attributes(instance_id)\n @id_instance_attributes ||= {}\n @id_instance_attributes[instance_id] ||= InstanceAttributes.new(instance_id)\n end", "def retrieve_image_id\n name = config['image_name']\n\n img_data = connection.image_list.body[\"DATA\"].find { |i| i[\"LABEL\"] == name }\n if img_data.nil?\n names = connection.image_list.body[\"DATA\"].map { |i| i[\"LABEL\"] }.join(\", \")\n add_error \"There is no image named #{name}. \" \\\n \"Options are: #{names}\"\n else\n id = img_data[\"IMAGEID\"]\n end\n id\n end", "def get_mockcourse_pid\n script = File.expand_path('../../../mockcourse/getpid', __FILE__)\n out = `#{script}`\n pids = out.lines.map(&:chomp)\n pid = 0\n _delta = 0\n for x in pids\n delta = (x.to_i - @@pid).abs\n if pid == 0 or (pid != 0 and delta < _delta)\n _delta = delta\n pid = x\n end\n end\n pid\n end", "def getpid\n sax_document = SaxDocumentGetNextPID.new\n pid_doc = @repository.next_pid(:namespace => @namespace)\n Nokogiri::XML::SAX::Parser.new(sax_document).parse(pid_doc)\n return sax_document.pids.shift\n end" ]
[ "0.7583619", "0.75733006", "0.7213643", "0.7139777", "0.70686495", "0.70601314", "0.6842846", "0.68295383", "0.6734501", "0.66981256", "0.6679955", "0.6452198", "0.63420796", "0.61435443", "0.5894712", "0.5879966", "0.5863609", "0.57861704", "0.5744457", "0.57294726", "0.5706086", "0.5687161", "0.56736153", "0.5652471", "0.5613566", "0.5613566", "0.5610972", "0.5610972", "0.5610972", "0.5610972", "0.5610972", "0.5610972", "0.5610972", "0.56079245", "0.5604465", "0.55994934", "0.5559336", "0.5549053", "0.552489", "0.55124074", "0.549982", "0.5474755", "0.5474755", "0.5470501", "0.54396856", "0.54358894", "0.5415361", "0.5386686", "0.5360099", "0.5347219", "0.5339731", "0.53378177", "0.5316216", "0.52983135", "0.52948636", "0.52927226", "0.52900356", "0.52799714", "0.5263395", "0.5263284", "0.52630055", "0.52529746", "0.5215666", "0.5212497", "0.521139", "0.5194779", "0.5184029", "0.51769465", "0.5153496", "0.51483464", "0.51430094", "0.51394796", "0.5114636", "0.5096365", "0.5093719", "0.5093152", "0.5090191", "0.5088598", "0.5084833", "0.50621706", "0.505884", "0.503422", "0.50304586", "0.5028448", "0.50230885", "0.5020174", "0.501392", "0.50009584", "0.4997664", "0.4996809", "0.49888876", "0.4986966", "0.49860832", "0.49781266", "0.49778843", "0.4971315", "0.49712712", "0.49650225", "0.4963288" ]
0.77501756
1
Create an EC2 instance and wait until it's in running state
def create_vm(agent_id, stemcell_id, vm_type, network_spec, disk_locality = nil, environment = nil) with_thread_name("create_vm(#{agent_id}, ...)") do # do this early to fail fast stemcell = StemcellFinder.find_by_id(@ec2_client, stemcell_id) begin instance, block_device_agent_info = @instance_manager.create( agent_id, stemcell.image_id, vm_type, network_spec, (disk_locality || []), environment, options, ) logger.info("Creating new instance '#{instance.id}'") NetworkConfigurator.new(network_spec).configure(@ec2_client, instance) registry_settings = initial_agent_settings( agent_id, network_spec, environment, stemcell.root_device_name, block_device_agent_info ) registry.update_settings(instance.id, registry_settings) instance.id rescue => e # is this rescuing too much? logger.error(%Q[Failed to create instance: #{e.message}\n#{e.backtrace.join("\n")}]) instance.terminate(fast_path_delete?) if instance raise e end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def launch\n puts \"==> Creating EC2 instance...\"\n\n @instance = @aws_ec2.instances.create( @config.env.merge( { \"key_name\" => key_name, \"security_groups\" => [security_group_name] } ) )\n @instance.tag(\"environment\", {value: @config.environment})\n\n while @instance.status == :pending\n print \".\"\n sleep 2\n end\n\n # Sleep for 30 more seconds\n 15.times do\n print \".\"\n sleep 2\n end\n puts \".\" # new line\n\n puts \"==> Successfully created EC2 instance '#{@instance.id}'\"\n end", "def create_resource\n # response = ec2.run_instances(\n # node_config(max_count: 1, self.to_h)\n # ).instances.first\n\n instance_attr_accessor response\n # id = @response[:instance_id]\n begin\n ec2.wait_until(:instance_running, instance_ids: [id]) do\n logger.info \"waiting for #{ids.count} Neurons to start...\"\n end\n rescue Aws::Waiters::Errors::WaiterFailed => e\n # TODO: retry stuff\n # redo unless (count += 1 <=3 )\n end\n\n yield self if block_given?\n self\n end", "def createEc2Instance\n\t\t name = @server[\"name\"]\n\t\t node = @server['mu_name']\n\t\t\tbegin\n\t\t\t\t@server['iam_role'] = MU::Server.createIAMProfile(\"Server-\"+name, base_profile: @server['iam_role'], extra_policies: @server['iam_policies'])\n\t\t\trescue Aws::EC2::Errors::RequestLimitExceeded => e\n\t\t\t\tsleep 10\n\t\t\t\tretry\n\t\t\tend\n\t\t\t@server['iam_role'] = @server['iam_role']\n\n\t\t\tbegin\n\t\t\t\[email protected]\n\t\t\trescue Aws::EC2::Errors::RequestLimitExceeded => e\n\t\t\t\tsleep 10\n\t\t\t\tretry\n\t\t\tend\n\n\t\t instance_descriptor = {\n\t\t :image_id => @server[\"ami_id\"],\n\t\t :key_name => @deploy.keypairname,\n\t\t :instance_type => @server[\"size\"],\n\t\t :disable_api_termination => true,\n\t\t :min_count => 1,\n\t\t :max_count => 1,\n\t\t\t\t:network_interfaces => [\n\t\t\t\t\t{\n\t\t\t\t\t\t:associate_public_ip_address => name[\"associate_public_ip\"]\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t }\n\t\t\t\n\t\t\tif !@server['private_ip'].nil?\n\t\t\t\tinstance_descriptor[:private_ip_address] = @server['private_ip']\n\t\t\tend\n\n\t\t\tvpc_id=subnet_id=nat_host_name=nat_ssh_user = nil\n\t\t\tsubnet_retries = 0\n\t\t\tif !@server[\"vpc\"].nil?\n\t\t\t\tbegin\n\t\t\t\t\tvpc_id, subnet_ids, nat_host_name, nat_ssh_user = MU::VPC.parseVPC(@server['vpc'])\n\t\t\t\trescue Aws::EC2::Errors::ServiceError => e\n\t\t\t\t\tMU.log e.message, MU::ERR, details: @server\n\t\t\t\t\tif subnet_retries < 5\n\t\t\t\t\t subnet_retries = subnet_retries + 1\n\t\t\t\t\t sleep 15\n\t\t\t\t\t retry\n\t\t\t\t\tend\n\t\t\t\t\traise e\n\t\t\t\tend\n\t\t\t\tsubnet_id = subnet_ids.first\n\t\t\t\tif subnet_id.nil? or subnet_id.empty?\n\t\t\t\t\tMU.log \"Got null Subnet id out of #{@server['vpc']}\", MU::ERR\n\t\t\t\t\traise \"deploy failure\"\n\t\t\t\tend\n\n\t\t\t\tMU.log \"Deploying #{node} into VPC #{vpc_id} Subnet #{subnet_id}\"\n\n\t\t\t\tif !@server[\"vpc\"][\"nat_host_name\"].nil? or !@server[\"vpc\"][\"nat_host_id\"].nil?\n\t\t\t\t\tadmin_sg = MU::Server.punchAdminNAT(@server, node)\n\t\t\t\telse\n\t\t\t\t\tadmin_sg = MU::FirewallRule.setAdminSG(vpc_id: vpc_id, region: @server['region'])\n\t\t\t\tend\n\n\t\t\t\tinstance_descriptor[:subnet_id] = subnet_id\n\t\t\t\tnode_sg = MU::FirewallRule.createEc2SG(\n\t\t\t\t\t\t@server[\"name\"].upcase,\n\t\t\t\t\t\t@server[\"ingress_rules\"],\n\t\t\t\t\t\tdescription: \"SG holes for #{node}\",\n\t\t\t\t\t\tvpc_id: vpc_id,\n\t\t\t\t\t\tregion: @server['region']\n\t\t\t\t)\n\t\t\telse\n\t\t\t\tadmin_sg = MU::FirewallRule.setAdminSG(region: @server['region'])\n\t\t\t\tnode_sg = MU::FirewallRule.createEc2SG(\n\t\t\t\t\t\t@server[\"name\"].upcase,\n\t\t\t\t\t\t@server[\"ingress_rules\"],\n\t\t\t\t\t\tdescription: \"SG holes for #{node}\",\n\t\t\t\t\t\tregion: @server['region']\n\t\t\t\t)\n\t\t\tend\n\t\t\tsecurity_groups = Array.new\n\t\t\tsecurity_groups << admin_sg\n\t\t\tsecurity_groups << node_sg\n\t\t\tif !@server[\"add_firewall_rules\"].nil?\n\t\t\t\t@server[\"add_firewall_rules\"].each { |acl|\n\t\t\t\t\tsg = MU::FirewallRule.find(sg_id: acl[\"rule_id\"], name: acl[\"rule_name\"], region: @server['region'])\n\t\t\t\t\tif sg.nil?\n\t\t\t\t\t\tMU.log \"Couldn't find dependent security group #{acl} for server #{node}\", MU::ERR\n\t\t\t\t\t\traise \"deploy failure\"\n\t\t\t\t\tend\n\t\t\t\t\tsecurity_groups << sg.group_id\n\t\t\t\t}\n\t\t\tend\n\n\t\t\tinstance_descriptor[:security_group_ids] = security_groups\n\n\t\t if [email protected]? and [email protected]?\n\t\t instance_descriptor[:user_data] = Base64.encode64(@userdata)\n\t\t end\n\n\t\t if !@server[\"iam_role\"].nil?\n\t\t instance_descriptor[:iam_instance_profile] = { name: @server[\"iam_role\"]}\n\t\t end\n\n\t\t\tconfigured_storage = Array.new\n\t\t\tif @server[\"storage\"]\n\t\t\t\t@server[\"storage\"].each { |vol|\n\t\t\t\t\tconfigured_storage << MU::Server.convertBlockDeviceMapping(vol)\n\t\t\t\t}\n\t\t\tend\n\t\t\n\t\t\tMU::Server.waitForAMI(@server[\"ami_id\"], region: @server['region'])\n\n\t\t\tinstance_descriptor[:block_device_mappings] = configured_storage\n\t\t\tinstance_descriptor[:block_device_mappings].concat(@ephemeral_mappings)\n\n\t\t\tinstance_descriptor[:monitoring] = { enabled: @server['monitoring'] }\n\n\t\t\tMU.log \"Creating EC2 instance #{node}\"\n\t\t\tMU.log \"Instance details for #{node}: #{instance_descriptor}\", MU::DEBUG\n#\t\t\t\tif instance_descriptor[:block_device_mappings].empty?\n#\t\t\t\t\tinstance_descriptor.delete(:block_device_mappings)\n#\t\t\t\tend\n#pp instance_descriptor[:block_device_mappings]\n\t\t\tretries = 0\n\t\t\tbegin\n\t\t\t\tresponse = MU.ec2(@server['region']).run_instances(instance_descriptor)\n\t\t\trescue Aws::EC2::Errors::InvalidGroupNotFound, Aws::EC2::Errors::InvalidSubnetIDNotFound, Aws::EC2::Errors::InvalidParameterValue, Aws::EC2::Errors::RequestLimitExceeded => e\n\t\t\t\tif retries < 10\n\t\t\t\t\tif retries > 7\n\t\t\t\t\t\tMU.log \"Seeing #{e.inspect} while trying to launch #{node}, retrying a few more times...\", MU::WARN, details: instance_descriptor\n\t\t\t\t\tend\n\t\t\t\t\tsleep 10\n\t\t\t\t\tretries = retries + 1\n\t\t\t\t\tretry\n\t\t\t\telse\n\t\t\t\t\traise e\n\t\t\t\tend\n\t\t\tend\n\n\t\t\tinstance = response.instances.first\n\t\t\tMU.log \"#{node} (#{instance.instance_id}) coming online\"\n\n\n\t\t\treturn instance\n\n\t\tend", "def createEc2Instance\n\n instance_descriptor = {\n :image_id => @config[\"image_id\"],\n :key_name => @deploy.ssh_key_name,\n :instance_type => @config[\"size\"],\n :disable_api_termination => true,\n :min_count => 1,\n :max_count => 1\n }\n\n instance_descriptor[:iam_instance_profile] = getIAMProfile\n\n security_groups = myFirewallRules.map { |fw| fw.cloud_id }\n if security_groups.size > 0\n instance_descriptor[:security_group_ids] = security_groups\n else\n raise MuError, \"Didn't get any security groups assigned to be in #{@mu_name}, that shouldn't happen\"\n end\n\n if @config['private_ip']\n instance_descriptor[:private_ip_address] = @config['private_ip']\n end\n\n if [email protected]? and @config.has_key?(\"vpc\")\n subnet = mySubnets.sample\n if subnet.nil?\n raise MuError, \"Got null subnet id out of #{@config['vpc']}\"\n end\n MU.log \"Deploying #{@mu_name} into VPC #{@vpc.cloud_id} Subnet #{subnet.cloud_id}\"\n allowBastionAccess\n instance_descriptor[:subnet_id] = subnet.cloud_id\n end\n\n if [email protected]? and [email protected]?\n instance_descriptor[:user_data] = Base64.encode64(@userdata)\n end\n\n MU::Cloud::AWS::Server.waitForAMI(@config[\"image_id\"], region: @region, credentials: @credentials)\n\n instance_descriptor[:block_device_mappings] = MU::Cloud::AWS::Server.configureBlockDevices(image_id: @config[\"image_id\"], storage: @config['storage'], region: @region, credentials: @credentials)\n\n instance_descriptor[:monitoring] = {enabled: @config['monitoring']}\n\n if @tags and @tags.size > 0\n instance_descriptor[:tag_specifications] = [{\n :resource_type => \"instance\",\n :tags => @tags.keys.map { |k|\n { :key => k, :value => @tags[k] }\n }\n }]\n end\n\n MU.log \"Creating EC2 instance #{@mu_name}\", details: instance_descriptor\n\n instance = resp = nil\n loop_if = Proc.new {\n instance = resp.instances.first if resp and resp.instances\n resp.nil? or resp.instances.nil? or instance.nil?\n }\n\n bad_subnets = []\n mysubnet_ids = if mySubnets\n mySubnets.map { |s| s.cloud_id }\n end\n begin\n MU.retrier([Aws::EC2::Errors::InvalidGroupNotFound, Aws::EC2::Errors::InvalidSubnetIDNotFound, Aws::EC2::Errors::InvalidParameterValue], loop_if: loop_if, loop_msg: \"Waiting for run_instances to return #{@mu_name}\") {\n resp = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).run_instances(instance_descriptor)\n }\n rescue Aws::EC2::Errors::Unsupported => e\n bad_subnets << instance_descriptor[:subnet_id]\n better_subnet = (mysubnet_ids - bad_subnets).sample\n if e.message !~ /is not supported in your requested Availability Zone/ and\n (mysubnet_ids.nil? or mysubnet_ids.empty? or\n mysubnet_ids.size == bad_subnets.size or\n better_subnet.nil? or better_subnet == \"\")\n raise MuError.new e.message, details: mysubnet_ids\n end\n instance_descriptor[:subnet_id] = (mysubnet_ids - bad_subnets).sample\n if instance_descriptor[:subnet_id].nil?\n raise MuError.new \"Specified subnet#{bad_subnets.size > 1 ? \"s do\" : \" does\"} not support instance type #{instance_descriptor[:instance_type]}\", details: bad_subnets\n end\n MU.log \"One or more subnets does not support instance type #{instance_descriptor[:instance_type]}, attempting with #{instance_descriptor[:subnet_id]} instead\", MU::WARN, details: bad_subnets\n retry\n rescue Aws::EC2::Errors::InvalidRequest => e\n MU.log e.message, MU::ERR, details: instance_descriptor\n raise e\n end\n\n MU.log \"#{@mu_name} (#{instance.instance_id}) coming online\"\n\n instance\n end", "def ec2_instance_ready?(instance)\n instance.exists? && instance.status.eql?(:running) && systems_ok(instance)\n end", "def instance_ec2\n @@cache[:fedora_image] = ImageService::create_image(nil,\n :broker_image_id => 'fedora',\n :provider_image_id => 'ami-bafcf3ce',\n :provider_id => @@cache[:ec2_provider].id,\n )\n @@cache[:instance_ec2] = LaunchService.launch(Seeding[:pool_ec2].id, 'test instance '+Time.now.to_i.to_s, Seeding[:hwp_1].name, Seeding[:fedora_image].broker_image_id,\n #:flavor => 'm1-small',\n :keyname => 'mpovolny'\n )\n end", "def create_ec2_instance(attrs)\n instance = ec2.instances.create(attrs)\n perform_instance_checks(instance)\n instance\n end", "def store_image instance, tags\n begin\n \n puts \"waiting 2 minutes before starting to take the image...\"\n sleep 120\n puts \"creating image...\"\n \n image = @ec2.images.create( \n :instance_id => instance.id,\n :no_reboot => true,\n :description => \"automaticaly created #{tags[ 'image_type' ]} image\",\n :name => \"#{tags[ 'image_type' ]} #{Digest::SHA1.hexdigest tags.inspect}\" )\n \n wait_for_image image\n \n tags.each do | key, value |\n image.add_tag( key, :value => value )\n end \n ensure\n stop_instance instance\n end\n end", "def createEc2Instance\n name = @config[\"name\"]\n node = @config['mu_name']\n\n instance_descriptor = {\n :image_id => @config[\"ami_id\"],\n :key_name => @deploy.ssh_key_name,\n :instance_type => @config[\"size\"],\n :disable_api_termination => true,\n :min_count => 1,\n :max_count => 1\n }\n\n arn = nil\n if @config['generate_iam_role']\n role = @deploy.findLitterMate(name: @config['name'], type: \"roles\")\n s3_objs = [\"#{@deploy.deploy_id}-secret\", \"#{role.mu_name}.pfx\", \"#{role.mu_name}.crt\", \"#{role.mu_name}.key\", \"#{role.mu_name}-winrm.crt\", \"#{role.mu_name}-winrm.key\"].map { |file| \n 'arn:'+(MU::Cloud::AWS.isGovCloud?(@config['region']) ? \"aws-us-gov\" : \"aws\")+':s3:::'+MU.adminBucketName+'/'+file\n }\n role.cloudobj.injectPolicyTargets(\"MuSecrets\", s3_objs)\n\n @config['iam_role'] = role.mu_name\n arn = role.cloudobj.createInstanceProfile\n# @cfm_role_name, @cfm_prof_name\n\n elsif @config['iam_role'].nil?\n raise MuError, \"#{@mu_name} has generate_iam_role set to false, but no iam_role assigned.\"\n end\n if !@config[\"iam_role\"].nil?\n if arn\n instance_descriptor[:iam_instance_profile] = {arn: arn}\n else\n instance_descriptor[:iam_instance_profile] = {name: @config[\"iam_role\"]}\n end\n end\n\n security_groups = []\n if @dependencies.has_key?(\"firewall_rule\")\n @dependencies['firewall_rule'].values.each { |sg|\n security_groups << sg.cloud_id\n }\n end\n\n if security_groups.size > 0\n instance_descriptor[:security_group_ids] = security_groups\n else\n raise MuError, \"Didn't get any security groups assigned to be in #{@mu_name}, that shouldn't happen\"\n end\n\n if !@config['private_ip'].nil?\n instance_descriptor[:private_ip_address] = @config['private_ip']\n end\n\n vpc_id = subnet = nil\n if [email protected]? and @config.has_key?(\"vpc\")\n subnet_conf = @config['vpc']\n subnet_conf = @config['vpc']['subnets'].first if @config['vpc'].has_key?(\"subnets\") and !@config['vpc']['subnets'].empty?\n tag_key, tag_value = subnet_conf['tag'].split(/=/, 2) if !subnet_conf['tag'].nil?\n\n subnet = @vpc.getSubnet(\n cloud_id: subnet_conf['subnet_id'],\n name: subnet_conf['subnet_name'],\n tag_key: tag_key,\n tag_value: tag_value\n )\n if subnet.nil?\n raise MuError, \"Got null subnet id out of #{subnet_conf['vpc']}\"\n end\n MU.log \"Deploying #{node} into VPC #{@vpc.cloud_id} Subnet #{subnet.cloud_id}\"\n punchAdminNAT\n instance_descriptor[:subnet_id] = subnet.cloud_id\n end\n\n if [email protected]? and [email protected]?\n instance_descriptor[:user_data] = Base64.encode64(@userdata)\n end\n\n MU::Cloud::AWS::Server.waitForAMI(@config[\"ami_id\"], region: @config['region'], credentials: @config['credentials'])\n\n # Figure out which devices are embedded in the AMI already.\n image = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_images(image_ids: [@config[\"ami_id\"]]).images.first\n ext_disks = {}\n if !image.block_device_mappings.nil?\n image.block_device_mappings.each { |disk|\n if !disk.device_name.nil? and !disk.device_name.empty? and !disk.ebs.nil? and !disk.ebs.empty?\n ext_disks[disk.device_name] = MU.structToHash(disk.ebs)\n end\n }\n end\n\n configured_storage = Array.new\n cfm_volume_map = {}\n if @config[\"storage\"]\n @config[\"storage\"].each { |vol|\n # Drop the \"encrypted\" flag if a snapshot for this device exists\n # in the AMI, even if they both agree about the value of said\n # flag. Apparently that's a thing now.\n if ext_disks.has_key?(vol[\"device\"])\n if ext_disks[vol[\"device\"]].has_key?(:snapshot_id)\n vol.delete(\"encrypted\")\n end\n end\n mapping, cfm_mapping = MU::Cloud::AWS::Server.convertBlockDeviceMapping(vol)\n configured_storage << mapping\n }\n end\n\n instance_descriptor[:block_device_mappings] = configured_storage\n instance_descriptor[:block_device_mappings].concat(@ephemeral_mappings)\n instance_descriptor[:monitoring] = {enabled: @config['monitoring']}\n\n MU.log \"Creating EC2 instance #{node}\"\n MU.log \"Instance details for #{node}: #{instance_descriptor}\", MU::DEBUG\n#\t\t\t\tif instance_descriptor[:block_device_mappings].empty?\n#\t\t\t\t\tinstance_descriptor.delete(:block_device_mappings)\n#\t\t\t\tend\n\n retries = 0\n begin\n response = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).run_instances(instance_descriptor)\n rescue Aws::EC2::Errors::InvalidGroupNotFound, Aws::EC2::Errors::InvalidSubnetIDNotFound, Aws::EC2::Errors::InvalidParameterValue => e\n if retries < 10\n if retries > 7\n MU.log \"Seeing #{e.inspect} while trying to launch #{node}, retrying a few more times...\", MU::WARN, details: instance_descriptor\n end\n sleep 10\n retries = retries + 1\n retry\n else\n raise MuError, e.inspect\n end\n end\n\n instance = response.instances.first\n MU.log \"#{node} (#{instance.instance_id}) coming online\"\n\n return instance\n\n end", "def run\n node = Node.new(:instance_type => Aws.instance_type, :instance_id => Aws.instance_id)\n node.save\n write_pid\n process_loop\n end", "def spawn_and_wait(instance)\n instance.register\n\n 20.times do\n instance.send(:connection_open?) ? break : sleep(0.1)\n end\n\n # Extra time to make sure the output can attach\n sleep 1\n end", "def create_instances\n min_count = max_count = @bs.number_of_nodes\n puts \"\\nCreating #{max_count} on-demand instance(s)\"\n options = {\n 'ClientToken' => generate_token,\n 'KeyName' => Chef::Config[:knife][:aws_ssh_key_id],\n 'InstanceType' => @bs.flavor,\n 'SubnetId' => @bs[:novpc] ? nil : @bs.subnet_id,\n 'Placement.AvailabilityZone' => @bs.mixins.az.data,\n 'SecurityGroupId' => @bs.mixins.sg.data\n }\n options['EbsOptimized'] = !! @bs[:ebs_optimized]\n\n ## REVIEW\n if ami.root_device_type == \"ebs\"\n ami_map = ami.block_device_mapping.first\n block_device_mapping = {\n 'DeviceName' => ami_map['deviceName'],\n 'Ebs.VolumeSize' => ami_map['volumeSize'].to_s,\n 'Ebs.DeleteOnTermination' => ami_map['deleteOnTermination']\n }\n options['BlockDeviceMapping'] = [block_device_mapping]\n end\n\n ## Optionally only include mapped devices\n ## This way we get all of the ephemeral drives, some unmapped however\n if @bs.mixins.volume.data[:ephemeral_available]\n ephmap = @bs.mixins.volume.data.ephemeral_available.each_with_index.map do |d,i|\n {\n 'VirtualName' => \"ephemeral#{i}\",\n 'DeviceName' => d\n }\n end\n options['BlockDeviceMapping'].concat( ephmap )\n end\n\n if (max_count == 1) and @bs[:private_ip_address]\n options['PrivateIpAddress'] = @bs.private_ip_address\n puts \"Assigning IP ADDRESS : #{options['PrivateIpAddress']}\"\n end\n\n if Chef::Config[:knife][:aws_user_data]\n begin\n options['UserData']= File.read(Chef::Config[:knife][:aws_user_data])\n rescue\n ui.warn(\"Cannot read #{Chef::Config[:knife][:aws_user_data]}:\"\\\n \" #{$!.inspect}. Ignoring option.\")\n end\n end\n\n # -----------------------------------------------------------------\n tries = 5\n print_table(options, 'Launch Config')\n begin\n puts \"\\nSending request...\"\n response = connection.run_instances(@bs.image, min_count,\n max_count, options)\n ui.msg(response.inspect)\n rescue Exception => e\n ui.warn(\"#{e.message}\\nException creating instances\")\n if (tries -= 1) <= 0\n ui.warn(\"\\n\\nMax tries reached. Exiting.\\n\\n\")\n exit 1\n else\n ui.msg(\"Trying again.\\n\")\n retry\n end\n end\n # now we have our servers\n instances = response.body['instancesSet']\n # select only instances that have instanceId key and collect those ids\n # into an array\n @bs[:instance_ids] =\n instances.select {|i| i.has_key?('instanceId')}.collect do |i|\n i['instanceId']\n end\n\n puts \"\\nNumber of instances started: #{@bs.instance_ids.size}\\n\"\n sleep 10\n puts \"Getting servers..\"\n # collect an array of servers retrieved based on the instance ids we\n # obtained above\n @bs[:servers] = @bs.instance_ids.collect do |id|\n begin\n server = connection.servers.get(id)\n rescue Exception => e\n sleep 7\n retry\n end\n raise Ec2Error.new(\"server #{id} was nil\") if server.nil?\n server\n end\n end", "def spin_up_instance ami_name, vpc_id, key_name, security_group_id, subnet_id, instance_type = \"t2.micro\"\n resp = client.run_instances({\n dry_run: false,\n image_id: ami_name, # required\n min_count: 1, # required\n max_count: 1, # required\n key_name: key_name,\n instance_type: instance_type, # accepts t1.micro, t2.nano, t2.micro, t2.small, t2.medium, t2.large, m1.small, m1.medium, m1.large, m1.xlarge, m3.medium, m3.large, m3.xlarge, m3.2xlarge, m4.large, m4.xlarge, m4.2xlarge, m4.4xlarge, m4.10xlarge, m4.16xlarge, m2.xlarge, m2.2xlarge, m2.4xlarge, cr1.8xlarge, r3.large, r3.xlarge, r3.2xlarge, r3.4xlarge, r3.8xlarge, x1.16xlarge, x1.32xlarge, i2.xlarge, i2.2xlarge, i2.4xlarge, i2.8xlarge, hi1.4xlarge, hs1.8xlarge, c1.medium, c1.xlarge, c3.large, c3.xlarge, c3.2xlarge, c3.4xlarge, c3.8xlarge, c4.large, c4.xlarge, c4.2xlarge, c4.4xlarge, c4.8xlarge, cc1.4xlarge, cc2.8xlarge, g2.2xlarge, g2.8xlarge, cg1.4xlarge, p2.xlarge, p2.8xlarge, p2.16xlarge, d2.xlarge, d2.2xlarge, d2.4xlarge, d2.8xlarge\n monitoring: {\n enabled: true, # required\n },\n network_interfaces: [\n {\n subnet_id: subnet_id,\n groups: [security_group_id],\n device_index: 0,\n associate_public_ip_address: true\n }\n ],\n\n instance_initiated_shutdown_behavior: \"stop\", # accepts stop, terminate\n })\n\n error \"Error starting EC2 instance #{resp.inspect}\" if resp.instances.nil? || resp.instances.size == 0\n\n return resp.instances[0]\n end", "def start\n MU.log \"Starting #{@cloud_id}\"\n MU::Cloud::Google.compute(credentials: @config['credentials']).start_instance(\n @project_id,\n @config['availability_zone'],\n @cloud_id\n )\n begin\n sleep 5\n end while cloud_desc.status != \"RUNNING\"\n end", "def ecs_wait_container_instances\n @ecs_wait_container_instances = true\n end", "def aws_instance_create(opts)\n AWS::EC2::InstanceCollection.new.create(\n image_id: Rails.configuration.x.aws[Rails.configuration.x.aws['region']][\"ami_#{self.os}\"], \n private_ip_address: self.ip_address,\n key_name: Rails.configuration.x.aws['ec2_key_pair_name'],\n user_data: self.generate_init,\n instance_type: \"t2.small\",\n subnet: self.subnet.driver_id\n )\n end", "def create_server(options = {})\n begin\n server = connection.servers.create(options[:server_def])\n rescue Excon::Errors::BadRequest => e\n response = Chef::JSONCompat.from_json(e.response.body)\n if response['badRequest']['code'] == 400\n message = \"Bad request (400): #{response['badRequest']['message']}\"\n ui.fatal(message)\n else\n message = \"Unknown server error (#{response['badRequest']['code']}): #{response['badRequest']['message']}\"\n ui.fatal(message)\n end\n raise CloudExceptions::ServerCreateError, message\n end\n\n msg_pair(\"Instance Name\", server.name)\n msg_pair(\"Instance ID\", server.id)\n\n print \"\\n#{ui.color(\"Waiting for server [wait time = #{options[:server_create_timeout]}]\", :magenta)}\"\n\n # wait for it to be ready to do stuff\n server.wait_for(Integer(options[:server_create_timeout])) { print \".\"; ready? }\n\n puts(\"\\n\")\n server\n end", "def create_instance(name, key, type, wait=true)\n if name\n @mgr.normalize(name)\n else\n @mgr.normalize_name_parameters()\n end\n @mgr.setparam(\"key\", key)\n name, fqdn, volname, snapid, datasize, dryrun = @mgr.getparams(\"name\", \"fqdn\", \"volname\", \"snapid\", \"datasize\", \"dryrun\")\n\n # Acquire global lock before lookups - ensure if instance doesn't exist,\n # it doesn't get created by another process.\n # NOTE: unlock() will be called by either abort_instance or update_dns\n @mgr.lock()\n\n stale_dns = false\n # Catch any exceptions so we can unlock\n begin\n i, err = resolve_instance()\n if i\n msg = \"Instance #{name} already exists\"\n yield \"#{@mgr.timestamp()} #{msg}\"\n @mgr.unlock\n return nil, msg\n end\n\n if @mgr[\"PrivateDNSId\"]\n lookup = {\n hosted_zone_id: @mgr[\"PrivateDNSId\"],\n start_record_name: fqdn,\n max_items: 1,\n }\n resp = @mgr.route53.list_record_sets(lookup)\n records = resp.resource_record_sets\n if records.size == 1 and fqdn =~ /#{records[0].name}\\.?/\n record = records[0]\n if record.type == \"CNAME\"\n @mgr.unlock()\n raise \"CNAME exists for #{fqdn}, aborting\"\n elsif record.type != \"A\"\n @mgr.unlock()\n raise \"Unable to handle record type for #{fqdn}: #{record.type}\"\n end\n raise \"Unable to handle multiple value lookup for #{fqdn}: #{record.type}\" if record.resource_records.size > 1\n private_ip = record.resource_records[0].value\n @mgr.log(:debug, \"Checking to see if #{fqdn} -> #{private_ip} is stale\")\n f = [ { name: \"private-ip-address\", values: [ private_ip ] } ]\n instances = @resource.instances(filters: f)\n raise \"Private DNS record for #{fqdn} points to existing instance #{instances.first.id}\" if instances.count() > 1\n stale_dns = true\n @mgr.log(:info, \"Found stale private DNS record #{fqdn} -> #{private_ip}, ignoring\")\n end\n end\n rescue => e\n @mgr.unlock\n raise e\n end\n\n begin\n template = @mgr.load_template(\"ec2\", type)\n rescue => e\n msg = \"Caught exception loading ec2 template #{type}: #{e.message}\"\n yield \"#{@mgr.timestamp()} #{msg}\"\n @mgr.unlock\n return nil, msg\n end\n\n if volname and ( snapid or datasize )\n msg = \"Invalid parameters: volume provided with snapshot and/or data size\"\n yield \"#{@mgr.timestamp()} #{msg}\"\n @mgr.unlock\n return nil, msg\n end\n\n if dryrun == \"true\" or dryrun == true\n @mgr.unlock\n dry_run = true\n else\n dry_run = false\n end\n\n begin\n if volname\n yield \"#{@mgr.timestamp()} Looking up volume: #{volname}\"\n volume, err = resolve_volume()\n unless volume\n msg = \"Error looking up given volume: #{err}\"\n yield \"#{@mgr.timestamp()} #{msg}\"\n return nil, msg\n end\n end\n existing, err = resolve_volume(name)\n if existing\n if volume\n msg = \"Launching with volume #{volname} will create a duplicate volume name for existing volume #{name}; delete existing volume or use attach volume instead\"\n yield \"#{@mgr.timestamp()} #{msg}\"\n return nil, msg\n else\n volume = existing\n yield \"#{@mgr.timestamp()} Found existing volume for #{name}: #{volume.id()}\"\n end\n end\n\n @mgr.symbol_keys(template[\"api_template\"])\n\n ispec = template[\"api_template\"]\n if volume\n # When re-attaching a volume, the instance needs to launch in the same\n # availability zone\n vol_az = volume.availability_zone()[-1]\n if ispec[:subnet_id]\n if ispec[:subnet_id].match(/#[FL0-9a-j?]/)\n @mgr.log(:debug, \"Setting instance location to ##{vol_az} to match the existing volume\")\n ispec[:subnet_id] = ispec[:subnet_id].sub(/#[FL0-9a-j?]/,\"##{vol_az}\")\n end\n elsif ispec[:network_interfaces][0][:subnet_id]\n if ispec[:network_interfaces][0][:subnet_id].match(/#[FL0-9a-j?]/)\n @mgr.log(:debug, \"Setting instance location to ##{vol_az} to match the existing volume\")\n ispec[:network_interfaces][0][:subnet_id] = ispec[:network_interfaces][0][:subnet_id].sub(/#[FL0-9a-j?]/,\"##{vol_az}\")\n end\n else\n @mgr.log(:warn, \"Unable to identify a location specifier in the instance subnet_id, launching without updating instance location to match the volume\")\n end\n end\n\n if ispec[:block_device_mappings]\n ispec[:block_device_mappings].delete_if() do |dev|\n if dev[:device_name].end_with?(\"a\")\n false\n elsif dev[:device_name].end_with?(\"a1\")\n false\n elsif volume\n true\n else\n e=dev[:ebs]\n if snapid\n e.delete(:encrypted)\n snapshot, err = resolve_snapshot(snapid)\n unless snapshot\n yield \"#{@mgr.timestamp()} Error resolving snapshot: #{snapid}\"\n return nil\n end\n sname = get_tag(snapshot, \"Name\")\n stime = snapshot.start_time.getlocal.strftime(\"%F|%R\")\n yield \"#{@mgr.timestamp()} Launching with data volume from snapshot #{snapshot.id()} for #{sname} created: #{stime}\"\n e[:snapshot_id] = snapshot.id()\n else\n e.delete(:snapshot_id)\n end\n e.delete(:iops) unless e[:volume_type] == \"io1\"\n false\n end\n end\n end\n if ispec[:block_device_mappings] && ispec[:block_device_mappings].size == 0\n ispec.delete(:block_device_mappings)\n end\n\n @mgr.normalize_name_parameters()\n\n @mgr.resolve_vars(template, \"api_template\")\n @mgr.resolve_vars(template, \"tags\")\n\n # Set up tags for private_sg\n cfgtags = @mgr.tags\n name = @mgr.getparam(\"name\")\n cfgtags[\"Name\"] = name\n cfgtags[\"Domain\"] = @mgr[\"DNSDomain\"]\n sgtags = cfgtags.apitags()\n cfgtags.add(template[\"tags\"]) if template[\"tags\"]\n itags = cfgtags.apitags()\n cfgtags[\"InstanceName\"] = @mgr.getparam(\"name\")\n vtags = cfgtags.apitags()\n\n # Create a private security group for this instance?\n if template[\"private_sg\"] == true\n begin\n if ispec[:network_interfaces][0][:subnet_id]\n vpc_id = @resource.subnet(ispec[:network_interfaces][0][:subnet_id]).vpc.id\n elsif ispec[:subnet_id]\n vpc_id = @resource.subnet(ispec[:subnet_id]).vpc.id\n else\n raise \"Unable to identify a subnet/VPC for creating instance private security group\"\n end\n existing = @resource.security_groups({\n filters: [\n { name: \"vpc-id\",\n values: [ vpc_id ] },\n { name: \"group-name\",\n values: [ name ] },\n ]\n })\n if existing.count() == 1\n @mgr.log(:info, \"Deleting orphaned private security group #{name}\")\n existing.first.delete()\n end\n @mgr.log(:debug, \"Creating private security group #{name}\")\n private_sg = @resource.create_security_group({\n description: \"Private security group for #{name}\",\n group_name: name,\n vpc_id: vpc_id\n })\n @resource.create_tags(\n resources: [ private_sg.id() ],\n tags: sgtags\n )\n if ispec[:network_interfaces][0][:subnet_id]\n ispec[:network_interfaces][0][:groups] << private_sg.id()\n elsif ispec[:subnet_id]\n ispec[:security_group_ids] << private_sg.id()\n end\n rescue => e\n @mgr.unlock()\n raise \"Error creating instance private security group #{name}: #{e.message}\"\n end\n end\n\n if ispec[:user_data]\n ispec[:user_data] = Base64::encode64(ispec[:user_data])\n end\n\n yield \"#{@mgr.timestamp()} Dry run, creating: #{ispec}\" if dry_run\n\n interfaces = []\n if template[:additional_interfaces]\n @mgr.resolve_vars(template, :additional_interfaces)\n template[:additional_interfaces].each() do |iface|\n interfaces << @resource.create_network_interface(iface)\n end\n end\n unless @mgr.govcloud\n ispec[:tag_specifications] = [\n {\n resource_type: \"instance\",\n tags: itags,\n },\n {\n resource_type: \"volume\",\n tags: vtags,\n }\n ]\n end\n rescue => e\n @mgr.unlock\n raise e\n end\n # puts \"Creating: #{ispec}\"\n\n begin\n instances = @resource.create_instances(ispec)\n rescue => e\n msg = \"Caught exception creating instance: #{e.message}\"\n yield \"#{@mgr.timestamp()} #{msg}\"\n @mgr.unlock\n abort_instance(nil, interfaces, wait, false)\n return nil, msg\n end\n instance = nil\n unless dry_run\n instance = instances.first()\n yield \"#{@mgr.timestamp()} Created instance #{name} (id: #{instance.id()}), waiting for it to enter state running ...\"\n instance.wait_until_running()\n yield \"#{@mgr.timestamp()} Running\"\n if interfaces.size > 0\n iface_index = 1\n interfaces.each() do |iface|\n yield \"#{@mgr.timestamp()} Attaching additional interface ##{iface_index} to #{instance.id()}\"\n attach = iface.attach({ instance_id: instance.id(), device_index: iface_index })\n iface.modify_attribute({ attachment: { attachment_id: attach.attachment_id, delete_on_termination: true } })\n iface_index += 1\n end\n end\n\n begin\n msg = nil\n if volume\n if volume.state == \"available\"\n msg = \"Used existing volume\"\n yield \"#{@mgr.timestamp()} Attaching data volume: #{volume.id()}\"\n begin\n instance.attach_volume({\n volume_id: volume.id(),\n device: \"/dev/sdf\",\n })\n rescue => e\n msg = \"Unable to attach volume, aborting\"\n yield \"#{@mgr.timestamp()} #{msg}\"\n abort_instance(instance, [], wait, true) { |s| yield s }\n return nil, msg\n end\n @client.wait_until(:volume_in_use, volume_ids: [ volume.id() ])\n else\n msg = \"Data volume not in state 'available', aborting\"\n yield \"#{@mgr.timestamp()} #{msg}\"\n abort_instance(instance, [], wait, true) { |s| yield s }\n return nil, msg\n end\n end\n\n # Need to refresh to get attached volumes\n instance = @resource.instance(instance.id())\n if @mgr.govcloud\n tag_instance(instance, itags, vtags) { |s| yield s }\n end\n\n if @mgr[\"PrivateDNSId\"] and not stale_dns\n rr = @mgr.route53.lookup(@mgr[\"PrivateDNSId\"])\n if rr.size != 0\n msg = \"DNS record for #{name} created during launch\"\n yield \"#{@mgr.timestamp()} #{msg}\"\n abort_instance(instance, [], wait, true) { |s| yield s }\n return nil, msg\n end\n end\n\n update_dns(nil, wait, instance, true) { |s| yield s }\n rescue => e\n @mgr.unlock\n raise e\n end\n\n # @mgr.unlock() - called by update_dns as soon as records are added\n return instance, msg\n else\n return nil, nil\n end\n end", "def with_running_instance(stop_on_exit: false, destroy_on_exit: false, port: nil)\n log_debug \"[ #{@node}/#{@environment} ] - Create instance...\"\n create\n begin\n wait_for_state!(%i[running created exited])\n if %i[created exited].include?(state)\n log_debug \"[ #{@node}/#{@environment} ] - Start instance...\"\n start\n end\n begin\n wait_for_state!(:running)\n instance_ip = ip\n if instance_ip.nil?\n log_debug \"[ #{@node}/#{@environment} ] - No host_ip linked to the instance.\"\n elsif instance_ip != @nodes_handler.get_host_ip_of(@node)\n log_debug \"[ #{@node}/#{@environment} ] - Set host_ip to #{instance_ip}.\"\n # The instance is running on an IP that is not the one registered by default in the metadata.\n # Make sure we update it.\n @nodes_handler.override_metadata_of @node, :host_ip, instance_ip\n @nodes_handler.invalidate_metadata_of @node, :host_keys\n # Make sure the SSH transformations don't apply to this node\n @config.ssh_connection_transforms.replace(@config.ssh_connection_transforms.map do |ssh_transform_info|\n {\n nodes_selectors_stack: ssh_transform_info[:nodes_selectors_stack].map do |nodes_selector|\n @nodes_handler.select_nodes(nodes_selector).reject { |selected_node| selected_node == @node }\n end,\n transform: ssh_transform_info[:transform]\n }\n end)\n end\n wait_for_port!(port) if port\n yield\n ensure\n if stop_on_exit\n log_debug \"[ #{@node}/#{@environment} ] - Stop instance...\"\n stop\n wait_for_state!(:exited)\n end\n end\n ensure\n if stop_on_exit && destroy_on_exit\n log_debug \"[ #{@node}/#{@environment} ] - Destroy instance...\"\n destroy\n end\n end\n end", "def start_and_create_instances(num=1, user_data=nil)\n logger.info \"ENTERING DELAYED JOB\"\n begin\n new_instances = run_spot_instances(num, user_data)\n new_instances.each do |i|\n temp = Instance.create_from_aws_hash(i)\n temp.user_data = user_data\n temp.state = 'launched'\n temp.save\n end\n logger.info \"Started and saved #{num} #{ami_id} instances.\"\n EventLog.info \"Started and saved #{num} #{ami_id} instances.\"\n rescue Exception => e\n logger.error \"Caught exception when trying to start #{num} #{ami_id} instances!: #{e.message} #{e.backtrace}\"\n EventLog.error \"Caught exception when trying to start #{num} #{ami_id} instances!: #{e.message} #{e.backtrace}\"\n end\n end", "def create # rubocop:disable Metrics/AbcSize\n inst_details = AttrFinder.new(@instanceparameters)\n inst_details.options = @options\n inst_details.validate = @validate\n inst_details.function = 'server'\n BmcAuthenticate.new(@options)\n request = OracleBMC::Core::Models::LaunchInstanceDetails.new\n ssh_public_key = @instanceparameters['server']['ssh-key']\n request.availability_domain = inst_details.ad\n request.compartment_id = inst_details.compartment\n request.display_name = @instanceparameters['server']['display_name']\n request.image_id = inst_details.image\n request.shape = @instanceparameters['server']['shape']\n request.subnet_id = inst_details.subnet\n request.metadata = { 'ssh_authorized_keys' => ssh_public_key }\n api = OracleBMC::Core::ComputeClient.new\n response = api.launch_instance(request)\n @instance_id = response.data.id\n compartment(inst_details.compartment)\n running_instance = api.get_instance(@instance_id).wait_until(:lifecycle_state,\n OracleBMC::Core::Models::Instance::LIFECYCLE_STATE_RUNNING,\n max_interval_seconds: 5, max_wait_seconds: 300)\n if @instanceparameters['server']['attachments']\n @instanceparameters['server']['attachments'].each do |vol|\n attach(@instance_id, vol['volume'])\n end\n end\n running_instance\n end", "def spawn_and_wait(instance)\n instance.register\n\n output_queue # materialize in this thread\n\n Thread.new {\n instance.run(output_queue)\n }\n\n 20.times do\n instance.connected? ? break : sleep(0.1)\n end\n\n # Extra time to make sure the consumer can attach\n # Without this there's a chance the shutdown code will execute\n # before consumption begins. This is tricky to do more elegantly\n sleep 1\n end", "def aws_instance_wait_till_status_equals(obj, status, time)\n log \"AWS: waiting for #{obj.class.to_s.split(\"::\").last} '#{obj.id}' status to change to ':#{status}'\"\n begin\n Timeout.timeout(time) do \n sleep 1 while aws_call(\n 'aws_instance_status', \n instance: obj,\n errs: { AWS::EC2::Errors::InvalidInstanceID::NotFound => 60 }\n ) != status\n end\n rescue Timeout::Error => e\n raise \"AWS: timeout while waiting for #{obj.class.to_s.split(\"::\").last} '#{obj.id} status to change to ':#{status}'\"\n end\n end", "def start_terminating! options={}\n return true if away?\n return :wait if terminating? || busy?\n Log.info \"Terminating #{self}\"\n response = Wucluster.ec2.terminate_instances options.merge(:instance_id => [self.id])\n new_state = response.instancesSet.item.first.currentState.name rescue nil\n Log.warn \"Request returned funky status: #{new_state}\" unless (['shutting-down', 'terminated'].include? new_state)\n self.status = new_state.gsub(/-/,'_').to_sym\n dirty!\n response\n end", "def create(state)\n info(\"Creating instance #{instance.name}\")\n return if state[:server_id]\n\n domain = create_domain\n state[:server_id] = domain.id\n state[:hostname] = domain.public_ip_address\n\n instance.transport.connection(state).wait_until_ready\n\n info(\"Libvirt instance #{domain.name} created.\")\n end", "def create_instances(count)\n result = client.run_instances(AwsForm.map(params.merge(count: count)))\n instance_ids = result.instances.map(&:instance_id)\n ret = wait_for_create(instance_ids)\n # need to make sure add_tags is done after after wait_until or can have error that instance ids dont exist\n add_tags?(instance_ids, with_dtk_tag: true)\n IamInstanceProfile.set_iam_instance_profiles(self, instance_ids, params.iam_instance_profile) unless params.iam_instance_profile.nil?\n ret\n end", "def _wait_for_instance_health_check(instance)\n health_threshold = health_check.healthy_threshold\n interval = health_check.interval\n\n # wait a little longer than the defined threshold to account for application launch time\n timeout = ((health_threshold + 2) * interval)\n\n begin\n client.wait_until(:instance_in_service, {load_balancer_name: name,\n instances: [{instance_id: instance.ec2_instance_id}]}) do |w|\n w.before_attempt do |attempt|\n puts \"Attempt #{attempt} to check health status for #{instance.hostname}\".light_black\n end\n w.interval = 10\n w.max_attempts = timeout / w.interval\n end\n puts \"Instance #{instance.hostname} is now InService\".green\n true\n rescue Aws::Waiters::Errors::WaiterFailed => e\n puts \"Instance #{instance.hostname} failed to move to InService, #{e.message}\".red\n false\n end\n\n end", "def postBoot(instance_id = nil)\n @cloud_id ||= instance_id\n _node, _config, deploydata = describe(cloud_id: @cloud_id)\n\n raise MuError, \"Couldn't find instance #{@mu_name} (#{@cloud_id})\" if !cloud_desc\n return false if !MU::MommaCat.lock(@cloud_id+\"-orchestrate\", true)\n return false if !MU::MommaCat.lock(@cloud_id+\"-groom\", true)\n\n getIAMProfile\n\n finish = Proc.new { |status|\n MU::MommaCat.unlock(@cloud_id+\"-orchestrate\")\n MU::MommaCat.unlock(@cloud_id+\"-groom\")\n return status\n }\n\n MU::Cloud::AWS.createStandardTags(\n @cloud_id,\n region: @region,\n credentials: @credentials,\n optional: @config['optional_tags'],\n nametag: @mu_name,\n othertags: @config['tags']\n )\n\n # Make double sure we don't lose a cached mu_windows_name value.\n if (windows? or !@config['active_directory'].nil?)\n @mu_windows_name ||= deploydata['mu_windows_name']\n end\n\n loop_if = Proc.new {\n !cloud_desc(use_cache: false) or cloud_desc.state.name != \"running\"\n }\n MU.retrier([Aws::EC2::Errors::ServiceError], max: 30, wait: 40, loop_if: loop_if) { |retries, _wait|\n if cloud_desc and cloud_desc.state.name == \"terminated\"\n logs = if !@config['basis'].nil?\n pool = @deploy.findLitterMate(type: \"server_pools\", name: @config[\"name\"])\n if pool\n MU::Cloud::AWS.autoscale(region: @region, credentials: @credentials).describe_scaling_activities(auto_scaling_group_name: pool.cloud_id).activities\n else\n nil\n end\n end\n raise MuError.new, \"#{@cloud_id} appears to have been terminated mid-bootstrap!\", details: logs\n end\n if retries % 3 == 0\n MU.log \"Waiting for EC2 instance #{@mu_name} (#{@cloud_id}) to be ready...\", MU::NOTICE\n end\n }\n\n allowBastionAccess\n\n setAlarms\n\n # Unless we're planning on associating a different IP later, set up a\n # DNS entry for this thing and let it sync in the background. We'll come\n # back to it later.\n if @config['static_ip'].nil? and !@named\n MU::MommaCat.nameKitten(self)\n @named = true\n end\n\n if !@config['src_dst_check'] and !@config[\"vpc\"].nil?\n MU.log \"Disabling source_dest_check #{@mu_name} (making it NAT-worthy)\"\n MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).modify_instance_attribute(\n instance_id: @cloud_id,\n source_dest_check: { value: false }\n )\n end\n\n # Set console termination protection. Autoscale nodes won't set this\n # by default.\n MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).modify_instance_attribute(\n instance_id: @cloud_id,\n disable_api_termination: { value: true}\n )\n\n tagVolumes\n configureNetworking\n saveCredentials\n\n if !@config['image_then_destroy']\n notify\n end\n\n finish.call(false) if !bootstrapGroomer\n\n # Make sure we got our name written everywhere applicable\n if !@named\n MU::MommaCat.nameKitten(self)\n @named = true\n end\n\n finish.call(true)\n end", "def cleanup(vpc_id, client, asg, elbv1, elbv2, region)\n begin\n response = client.describe_vpcs(vpc_ids: [vpc_id])\n rescue Aws::EC2::Errors::InvalidVpcIDNotFound => e\n puts \"Error: vpc_id [#{vpc_id}] does not exist... exiting.\"\n puts \"Make sure you passed the correct region on the command-line if it's not in the default us-west-2\"\n exit 0\n end\n\n # 1) Delete Auto Scaling group\n begin\n asg.delete_auto_scaling_group(auto_scaling_group_name: 'asg-nginx_auto', force_delete: true)\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n\n sleep 2\n # TODO: properly wait here until ASG is fully deleted before proceeding...\n begin\n asg.delete_launch_configuration(launch_configuration_name: 'lc-nginx_auto')\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n sleep 2\n\n # 2) instance handling\n term_error = 0\n instances_to_term = []\n terminate_states = %w[pending running shutting-down stopping stopped]\n puts 'Checking for nginx and nat instances, of all states...'\n begin\n response = client.describe_instances(filters: [{name: 'tag:Name', values: ['autoASG nginx server',\n 'nat instance']}])\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n response.reservations.each do |reservation|\n reservation.instances.each do |instance|\n puts \"Check #1: instance-id=[#{instance.instance_id}] AMI=[#{instance.image_id}] state=[#{instance.state.name}]\"\n instances_to_term.push(instance.instance_id) if terminate_states.include? instance.state.name\n end\n end\n\n if instances_to_term.any? # If array has content, proceed.\n term_error = 0\n instance_cnt = instances_to_term.length # TODO: Check before/after termination attempts and ensure = 0\n before_terminate = Time.now\n begin\n client.wait_until(:instance_terminated,instance_ids: instances_to_term) do |wait|\n wait.interval = 8 # Seconds between polling attempts. Same as wait.delay\n wait.max_attempts = 15 # Polling attempts before giving up. Wait time is 15*8=120 seconds.\n puts \"Attempting to terminate [#{instance_cnt}] instance(s), please wait up to 120 seconds...\"\n begin\n client.terminate_instances(instance_ids: instances_to_term)\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n end\n rescue Aws::Waiters::Errors::WaiterFailed => error\n term_error = 1 # TODO: Do something more reliable if this ever occurs.\n puts \"Exception: failed waiting for instance running: #{error.message}\"\n end\n puts \"#{Time.now - before_terminate.to_time} seconds elapsed while terminating.\" if term_error.zero?\n end\n\n if term_error.zero?\n # Debug with final instance check... this shouldn't print anything aside from terminated instances.\n begin\n response = client.describe_instances(filters: [{name: 'tag:Name', values: ['autoASG nginx server',\n 'nat instance']}])\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n response.reservations.each do |reservation|\n reservation.instances.each do |instance|\n puts \"Check #2: instance-id=[#{instance.instance_id}] AMI=[#{instance.image_id}] state=[#{instance.state.name}]\"\n end\n end\n end\n\n puts 'Sleeping for 5 seconds...'\n sleep 5\n\n # 3) Delete listeners\n printf 'Deleting ALB listeners... '\n begin\n response = elbv2.describe_load_balancers(names: ['AutoALB'])\n alb_arn = response.load_balancers[0].load_balancer_arn\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n sleep 2\n begin\n response = elbv2.describe_listeners(load_balancer_arn: alb_arn)\n listener_arn = response.listeners[0].listener_arn\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n begin\n elbv2.delete_listener(listener_arn: listener_arn)\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n sleep 2\n puts 'done.'\n\n # 4) Delete target groups\n printf 'Deleting ALB target groups... '\n begin\n response = elbv2.describe_target_groups(names: ['AutoALBTargetGroup'])\n target_group_arn = response.target_groups[0].target_group_arn\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n begin\n elbv2.delete_target_group(target_group_arn: target_group_arn)\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n sleep 5\n puts 'done.'\n\n # 5) Delete load balancers\n printf 'Deleting application and classic load balancers... '\n begin\n elbv1.delete_load_balancer(load_balancer_name: 'AutoCLB')\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n begin\n elbv2.delete_load_balancer(load_balancer_arn: alb_arn)\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n puts 'done.'\n\n puts 'Sleeping for 120 seconds, enough time for the ASG to fully disappear before deleting subnets.'\n sleep 120\n\n # 6) subnets\n begin\n response = client.describe_subnets(filters: [{name: 'vpc-id', values: [vpc_id]}])\n response.subnets.each do |sn|\n printf \"Removing subnet: #{sn.subnet_id}, #{sn.vpc_id}, #{sn.cidr_block}, #{sn.availability_zone}; \"\n client.delete_subnet(subnet_id: sn.subnet_id)\n puts 'Done.'\n end\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n\n # 7) security groups\n begin\n response = client.describe_security_groups(filters: [{name: 'vpc-id', values: [vpc_id]}])\n response.security_groups.each do |sg|\n next if sg.group_name == 'default' # This name is reserved by aws and cannot be removed.\n printf \"Removing security group: #{sg.group_id}, #{sg.vpc_id}, #{sg.group_name}, Desc='#{sg.description}'; \"\n client.delete_security_group(group_id: sg.group_id)\n puts 'Done.'\n end\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n\n # 8) Internet gateway\n begin\n response = client.describe_internet_gateways(filters: [{name: 'attachment.vpc-id', values: [vpc_id]}])\n response.internet_gateways.each do |igw|\n printf \"Detaching Internet gateway: #{igw.internet_gateway_id} <-> #{igw.attachments[0].vpc_id}; \"\n client.detach_internet_gateway(internet_gateway_id: igw.internet_gateway_id, vpc_id: igw.attachments[0].vpc_id)\n printf \"Removing Internet gateway: #{igw.internet_gateway_id} <-> #{igw.attachments[0].vpc_id}; \"\n client.delete_internet_gateway(internet_gateway_id: igw.internet_gateway_id)\n puts 'Done.'\n end\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n\n # 9) Route tables\n begin\n response = client.describe_route_tables(filters: [{name: 'vpc-id', values: [vpc_id]}])\n response.route_tables.each do |rtl|\n if rtl.associations[0] != nil\n if rtl.associations[0].route_table_association_id != nil\n puts \"Skipping #{rtl.associations[0].route_table_association_id}, causes exception.\"\n end\n next\n end\n printf \"Removing route table: #{rtl.route_table_id}, #{rtl.vpc_id}; \"\n client.delete_route_table(route_table_id: rtl.route_table_id)\n puts 'Done.'\n end\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n\n # 10) Delete VPC\n begin\n puts \"Attempting to delete VPC [#{vpc_id}]\"\n client.delete_vpc(vpc_id: vpc_id)\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n\n begin\n response = client.describe_vpcs(vpc_ids: [vpc_id])\n rescue Aws::EC2::Errors::InvalidVpcIDNotFound => e\n puts \"Deletion of vpc_id=[#{vpc_id}] was a success.\"\n exit 0\n end\n\n # Shouldn't get here, but sometimes does due to the ASG taking too long to\n # release subnet dependencies, or if running a cleanup before a launch has\n # fully completed, etc.\n puts 'Oops. Termination sometimes fails for various reasons.'\n puts 'Note: running this twice usually clears up any lingering dependencies.'\n puts ' With that said, there is still work to be done. Execute again.'\n exit 0\nend", "def get_available_host\n @hosts.each do |host|\n return host if ws_running?([host]) < @ws_limit\n end\n puts \"No host available! Providing a new one on aws\"\n json = `aws ec2 run-instances --image-id ami-689cc908 --instance-type t2.medium --key-name mior --security-group-id sg-5bb3af3f`\n json = JSON.parse(json)\n internal_ip = json[\"Instances\"][0][\"NetworkInterfaces\"][0][\"PrivateIpAddress\"]\n public_ip = nil\n while public_ip.nil?\n cmd = \"aws ec2 describe-instances --filter \\\"Name=private-ip-address, Values=#{internal_ip}\\\"\"\n json = JSON.parse(`#{cmd}`)\n public_ip = json[\"Reservations\"][0][\"Instances\"][0][\"NetworkInterfaces\"][0][\"Association\"][\"PublicIp\"]\n end\n puts \"New instance on aws: #{public_ip}\"\n @hosts.push(public_ip)\n puts 'Waiting the host powers up..'\n sleep 30\n public_ip\n # The elegant way should be instantiate a new host to receive more requests\n end", "def wait_for_up!\n instance.wait_for_up!('eth1')\n end", "def openvz_fog_test_server\n server = openvz_service.servers.find { |s| s.ctid == '104' }\n unless server\n server = openvz_service.servers.create :ctid => '104'\n server.start\n server.reload\n # Wait for the server to come up\n begin\n server.wait_for(120) { server.reload rescue nil; server.ready? }\n rescue Fog::Errors::TimeoutError\n # Server bootstrap took more than 120 secs!\n end\n end\n\n openvz_fog_test_cleanup\n\n server\nend", "def create\n @ec2_instance = Ec2Instance.new(ec2_instance_params)\n\n respond_to do |format|\n if @ec2_instance.save\n format.html { redirect_to @ec2_instance, notice: 'Ec2 instance was successfully created.' }\n format.json { render :show, status: :created, location: @ec2_instance }\n else\n format.html { render :new }\n format.json { render json: @ec2_instance.errors, status: :unprocessable_entity }\n end\n end\n end", "def find_or_create_target(target_type, aws_instance_options)\n if target_type.downcase == 'aws'\n # Check or create new cluster on AWS\n if File.exist?(\"#{aws_instance_options[:cluster_name]}.json\")\n puts \"It appears that a cluster for #{aws_instance_options[:cluster_name]} is already running.\"\n puts \"If this is not the case then delete ./#{aws_instance_options[:cluster_name]}.json file.\"\n puts \"Or run 'bundle exec rake clean'\"\n puts 'Will try to continue'\n\n # Load AWS instance\n aws = OpenStudio::Aws::Aws.new\n aws.load_instance_info_from_file(\"#{aws_instance_options[:cluster_name]}.json\")\n server_dns = \"http://#{aws.os_aws.server.data.dns}\"\n puts \"Server IP address #{server_dns}\"\n\n else\n puts \"Creating cluster for #{aws_instance_options[:user_id]}\"\n puts 'Starting cluster...'\n\n # Don't use the old API (Version 1)\n ami_version = aws_instance_options[:os_server_version][0] == '2' ? 3 : 2\n aws_options = {\n ami_lookup_version: 3,\n openstudio_server_version: aws_instance_options[:os_server_version]\n }\n aws = OpenStudio::Aws::Aws.new(aws_options)\n\n server_options = {\n instance_type: aws_instance_options[:server_instance_type],\n user_id: aws_instance_options[:user_id],\n tags: aws_instance_options[:aws_tags]\n }\n\n worker_options = {\n instance_type: aws_instance_options[:worker_instance_type],\n user_id: aws_instance_options[:user_id],\n tags: aws_instance_options[:aws_tags]\n }\n\n start_time = Time.now\n\n # Create the server & worker\n aws.create_server(server_options)\n aws.save_cluster_info(\"#{aws_instance_options[:cluster_name]}.json\")\n aws.print_connection_info\n aws.create_workers(aws_instance_options[:worker_node_number], worker_options)\n aws.save_cluster_info(\"#{aws_instance_options[:cluster_name]}.json\")\n aws.print_connection_info\n server_dns = \"http://#{aws.os_aws.server.data.dns}\"\n\n puts \"Cluster setup in #{(Time.now - start_time).round} seconds. Awaiting analyses.\"\n puts \"Server IP address is #{server_dns}\"\n end\n OpenStudio::Analysis::ServerApi.new(hostname: server_dns)\n else\n OpenStudio::Analysis::ServerApi.new(hostname: lookup_target_url(target_type))\n end\nend", "def create_aws_instance(config, name, instance_type=\"m3.medium\")\n config.ssh.pty = true\n config.vm.define name do |server|\n server.vm.box = AWS_BOX\n server.vm.provider :aws do |aws, override|\n aws.instance_type = instance_type\n aws.region = AWS_REGION\n aws.ami = AWS_AMI\n aws.keypair_name = AWS_PRIVATE_KEY\n override.ssh.username = AWS_SSH_USERNAME\n override.ssh.private_key_path = AWS_PRIVATE_KEY_PATH\n yield(aws,override,server)\n end\n end\nend", "def create\n ec2 = self.class.new_ec2(@resource.value(:user), @resource.value(:password))\n group = @resource.value(:name)\n begin\n ec2.describe_security_groups({:group_name => group})\n rescue Exception => e\n ec2.create_security_group({ \n :group_name => group,\n :group_description => @resource.value(:desc)\n })\n end\n # if instance in that security group exists, start it\n # otherwise just create a new instance \n ec2.run_instances(\n { :image_id => @resource.value(:image),\n # security groups\n :security_group => group,\n :instance_type => @resource.value(:type)\n })\n end", "def ec2_instance(instance_id)\n EC2Instance.new(instance_id)\n end", "def ec2_instance(instance_id)\n EC2Instance.new(instance_id)\n end", "def Main()\r\n puts \"thanks for turning me on\"\r\n ec2 = SetupEC2Instance()\r\n data = GetInstanceInfo(ec2)\r\n #data = \"data_not_accurate\"\r\n ChangeEC2State(data, ec2)\r\n puts \"i hope you are satisfied\"\r\nend", "def create\n @instance = Instance.new(params[:instance])\n\n respond_to do |format|\n if @instance.save\n\n system(\"ssh root@#{CloudGui::Application.config.hypervisor_ip} \\\"exec /data/cloud/scripts/provision.sh #{params[:instance][:cpus]} #{params[:instance][:memory]} #{params[:instance][:baseimage]} 2>&1 | tee /data/cloud/logs/cloud_gui.log\\\"\")\n\n format.html { redirect_to @instance, notice: 'Instance was successfully created.' }\n format.json { render json: @instance, status: :created, location: @instance }\n else\n format.html { render action: \"new\" }\n format.json { render json: @instance.errors, status: :unprocessable_entity }\n end\n end\n end", "def start_instances\n started = 0\n autoscaling_instances.each do |instance|\n ec2_instance = instance.ec2_instance\n next if !ec2_instance.exists?\n\n if ec2_instance.status == :stopped\n @task.unsafe(\"Starting instance #{instance.instance_id}\") do\n ec2_instance.start\n load_balancers.each do |elb|\n elb.instances.register(instance.instance_id)\n end\n started += 1\n end\n else\n @task.debug { \"Instance #{instance.instance_id} already running\" }\n end\n end\n\n # FIXME\n # This is to give instances a little more time to start up and become\n # healthy before restarting autoscaling processes.\n # If an instance isn't started and healthy in time, the autoscale will kill\n # it for being unhealthy.\n #\n # The \"right\" way to do it would be to actually poll the instances until\n # they are healthy (or a timeout is reached). With the current task model,\n # other actions are blocked while this is waiting, so I can't afford to\n # wait too long.\n sleep(@grace_period) if started > 0\n end", "def start_instances\n started = 0\n autoscaling_instances.each do |instance|\n ec2_instance = instance.ec2_instance\n next if !ec2_instance.exists?\n\n if ec2_instance.status == :stopped\n @task.unsafe(\"Starting instance #{instance.instance_id}\") do\n ec2_instance.start\n load_balancers.each do |elb|\n elb.instances.register(instance.instance_id)\n end\n started += 1\n end\n else\n @task.debug { \"Instance #{instance.instance_id} already running\" }\n end\n end\n\n # FIXME\n # This is to give instances a little more time to start up and become\n # healthy before restarting autoscaling processes.\n # If an instance isn't started and healthy in time, the autoscale will kill\n # it for being unhealthy.\n #\n # The \"right\" way to do it would be to actually poll the instances until\n # they are healthy (or a timeout is reached). With the current task model,\n # other actions are blocked while this is waiting, so I can't afford to\n # wait too long.\n sleep(@grace_period) if started > 0\n end", "def aws_instance_elastic_ip_create(instance)\n log \"AWS: creating ElasticIP for Instance '#{instance.id}'\"\n # get elastic ip object\n elastic_ip = aws_call('aws_elastic_ip_create')\n log \"AWS: created ElasticIP '#{elastic_ip.public_ip}'\"\n\n # this is interesting, perhaps elastic ips dont have statuses like other resources, or else why not use our helper fn?\n log \"AWS: waiting for ElasticIP '#{elastic_ip.public_ip}' to exist\"\n Timeout.timeout(360) { sleep 1 while not aws_call('aws_obj_exists?', obj: elastic_ip) }\n\n # give our NAT vm its elastic IP!\n log \"AWS: associating ElastipIP '#{elastic_ip.public_ip}' with Instance '#{instance.id}'\"\n aws_call(\n 'aws_instance_elastic_ip_associate',\n instance: instance,\n elastic_ip: elastic_ip,\n errs: { AWS::EC2::Errors::InvalidAllocationID::NotFound => 60 }\n )\n \n # update ip_address_public attribute\n self.update_attribute(:ip_address_public, elastic_ip.public_ip)\n end", "def create_server(options = {})\n begin\n add_custom_attributes(options[:server_def])\n server = connection.servers.create(options[:server_def])\n\n print \"\\nWaiting For Server\"\n server.wait_for(Integer(options[:server_create_timeout])) do\n print '.'\n !locked?\n end\n\n # attach/or create any volumes.\n options[:server_volumes].each do |voldef|\n Chef::Log.debug(\"Volume definition: #{voldef}\")\n if voldef.key?(:size) || voldef.key?(:size_gb)\n # create a new volume\n result = connection.add_volume(server.id, voldef)\n name = (result / 'disk/name').first.text\n elsif voldef.key? :id\n server.attach_volume(voldef)\n name = voldef[:id]\n else\n raise CloudExceptions::ServerCreateError, \"cannot handle volume definition #{voldef}\"\n end\n\n print \"\\nAttached #{name} volume\"\n end\n\n print \"\\nWaiting For Volumes\"\n server.wait_for(Integer(options[:server_create_timeout])) do\n print '.'\n !locked?\n end\n Chef::Log.debug(\"options: #{options}\")\n server.start_with_cloudinit(user_data: options[:cloud_init])\n rescue Excon::Error::BadRequest => e\n response = Chef::JSONCompat.from_json(e.response.body)\n message = if response['badRequest']['code'] == 400\n \"Bad request (400): #{response['badRequest']['message']}\"\n else\n \"Unknown server error (#{response['badRequest']['code']}): #{response['badRequest']['message']}\"\n end\n ui.fatal(message)\n raise CloudExceptions::ServerCreateError, message\n rescue Fog::Errors::Error => e\n raise CloudExceptions::ServerCreateError, e.message\n end\n\n print \"\\n#{ui.color(\"Waiting for server [wait time = #{options[:server_create_timeout]}]\", :magenta)}\"\n\n # wait for it to be ready to do stuff\n server.wait_for(Integer(options[:server_create_timeout])) do\n print '.'\n ready?\n end\n\n puts(\"\\n\")\n server\n end", "def instance_exists?(ec2_client, instance_id)\n ec2_client.describe_instances(instance_ids: [instance_id])\n return true\nrescue StandardError\n return false\nend", "def launch_minimum_number_of_instances \n if can_start_a_new_instance? && !minimum_number_of_instances_are_running? \n list_of_pending_instances.size == 0 ? request_launch_one_instance_at_a_time : wait(\"5.seconds\")\n reset!\n launch_minimum_number_of_instances\n provision_slaves_from_n(minimum_instances.to_i)\n after_launched\n end\n end", "def check_instance\n return if ec2_instance?\n abort 'FATAL: NOT an EC2 instance or could not connect to Metadata'\n end", "def start!\n\n # Best thing to avoid run conditions are to wait\n sleep rand(10)\n\n # Find out who is who, instances alive\n # If discover() returns an Array full of nil(s), alive will become an empty Array\n alive = cleanup(@aws.discover())\n\n # Given the alive instances, find our prefix\n # If alive an empty array, selection will return the number '1'\n @prefix = ensurezero(selection(alive))\n\n # Put together hostname/fqdn\n construction()\n\n # Set the Name tag on this instance\n @aws.settag!(@hostname)\n\n # Find out who is who, instances alive\n # If discover() returns an Array full of nil(s), alive will become an empty Array\n alive = cleanup(@aws.discover())\n\n # Only enter recursion if the uniq() length of the alive array does not equal the actual length\n # On AutoScalingGroup initalization, the cleanup() should ensure the alive array is empty not nil so uniq() works\n unless alive.uniq.length == alive.length\n # There are duplicates, remove tag, wait, restart\n @aws.removetag!()\n sleep rand(10)\n start!()\n end\n\n # Register in DNS\n @aws.setroute53(@options[:zone], @fqdn)\n\n # Set the localhost hostname\n setlocal()\n\n # Set /etc/hosts\n sethostsfile()\n\n # Throw the hostname in /etc/sysconfig/httpd (if exists)\n givetohttpd()\n\n # All done\n finish!()\n end", "def request_launch_one_instance_at_a_time\n when_no_pending_instances { launch_new_instance! }\n end", "def create_instance project_id:, instance_id:\n # [START spanner_create_instance]\n # project_id = \"Your Google Cloud project ID\"\n # instance_id = \"Your Spanner instance ID\"\n\n require \"google/cloud/spanner\"\n require \"google/cloud/spanner/admin/instance\"\n\n instance_admin_client = Google::Cloud::Spanner::Admin::Instance.instance_admin\n\n project_path = instance_admin_client.project_path project: project_id\n instance_path = instance_admin_client.instance_path project: project_id, instance: instance_id\n instance_config_path = instance_admin_client.instance_config_path project: project_id, instance_config: \"regional-us-central1\"\n\n job = instance_admin_client.create_instance parent: project_path,\n instance_id: instance_id,\n instance: { name: instance_path,\n config: instance_config_path,\n display_name: instance_id,\n node_count: 2,\n labels: { cloud_spanner_samples: \"true\" } }\n\n puts \"Waiting for create instance operation to complete\"\n\n job.wait_until_done!\n\n if job.error?\n puts job.error\n else\n puts \"Created instance #{instance_id}\"\n end\n # [END spanner_create_instance]\nend", "def run_instances(parameters, secret)\n Kernel.puts(\"Received a request to run instances\")\n\n if @secret != secret\n Kernel.puts(\"Incoming secret #{secret} does not match current secret \" +\n \"#{@secret}, rejecting request.\")\n return BAD_SECRET_RESPONSE\n end\n\n Kernel.puts(\"Request parameters are #{parameters.inspect}\")\n RUN_INSTANCES_REQUIRED_PARAMS.each { |required_param|\n if parameters[required_param].nil? or parameters[required_param].empty?\n Kernel.puts(\"Incoming parameters was missing required parameter \" +\n \"#{required_param}, rejecting request.\")\n return {\"success\" => false, \"reason\" => \"no #{required_param}\"}\n end\n }\n\n reservation_id = HelperFunctions.get_random_alphanumeric()\n @reservations[reservation_id] = {\n \"success\" => true,\n \"reason\" => \"received run request\",\n \"state\" => \"pending\",\n \"vm_info\" => nil\n }\n Kernel.puts(\"Generated reservation id #{reservation_id} for this request.\")\n\n Thread.new {\n HelperFunctions.set_creds_in_env(parameters['credentials'], \"1\")\n public_ips, private_ips, ids = HelperFunctions.spawn_vms(parameters)\n @reservations[reservation_id][\"state\"] = \"running\"\n @reservations[reservation_id][\"vm_info\"] = {\n \"public_ips\" => public_ips,\n \"private_ips\" => private_ips,\n \"instance_ids\" => ids\n }\n Kernel.puts(\"Successfully finished request #{reservation_id}.\")\n }\n\n Kernel.puts(\"Successfully started request #{reservation_id}.\")\n return {\"success\" => true, \"reservation_id\" => reservation_id, \n \"reason\" => \"none\"}\n end", "def run\n super\n\n # Get the AWS Credentials\n aws_keys = get_aws_keys_from_entity_type(_get_entity_type_string)\n return unless aws_keys.access_key && aws_keys.secret_key\n\n return unless aws_keys_valid?(aws_keys.access_key, aws_keys.secret_key, aws_keys.session_token)\n\n regions = retrieve_region_list\n instance_collection = regions.map do |r|\n retrieve_instances(r, aws_keys.access_key, aws_keys.secret_key, aws_keys.session_token)\n end\n\n instance_collection.compact!\n return if instance_collection.size.zero?\n\n create_ec2_instances(instance_collection)\n end", "def create_policy_role_EC2\n\n AWS.config(\n :access_key_id => ENV[\"S3_ACCESS_KEY\"], \n :secret_access_key => ENV[\"S3_SECRET_KEY\"])\n\n # naming policy \n role_name = 'ec2-start-stop'\n policy_name = 'ec2-start-stop'\n profile_name = 'ec2-start-stop' \n instance_profile_name = 'inst-ec2-start-stop' \n\n # building a custom policy \n policy = AWS::IAM::Policy.new\n policy.allow(\n :actions => [\"ec2:StartInstances\",\"ec2:StopInstances\"],\n :resources => '*')\n\n # EC2 can generate session credentials\n assume_role_policy_document = '{\"Version\":\"2008-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Service\":[\"ec2.amazonaws.com\"]},\"Action\":[\"sts:AssumeRole\"]}]}'\n \n # creating a role\n $iam.client.create_role(\n :role_name => role_name,\n :assume_role_policy_document => assume_role_policy_document)\n\n # adding policy to role\n $iam.client.put_role_policy(\n :role_name => role_name,\n :policy_name => policy_name,\n :policy_document => policy.to_json)\n\n # creating a profile for the role\n response = $iam.client.create_instance_profile(\n :instance_profile_name => instance_profile_name)\n \n # ARN\n profile_arn = response[:instance_profile][:arn]\n \n $iam.client.add_role_to_instance_profile(\n :instance_profile_name => instance_profile_name,\n :role_name => role_name)\n\n # you can use the profile name or ARN as the :iam_instance_profile option\n $ec2 = AWS::EC2.new\n $ec2.instances.create(:image_id => \"ami-inst-id-1\", :iam_instance_profile => profile_name)\n\n redirect_to iams_path, notice: 'Added Policy and Role for EC2'\n \n end", "def configure_instance(aws_node, private_ip_address, node_name, node_config)\n # Spin up EC2 instances\n aws_node.vm.provider :aws do |ec2, override|\n ec2.keypair_name = KEYPAIR_NAME\n ec2.access_key_id = ACCESS_KEY_ID\n ec2.secret_access_key = SECRET_ACCESS_KEY\n ec2.security_groups = SECURITY_GROUPS\n override.ssh.private_key_path = PRIVATE_KEY_PATH\n\n # read region, ami etc from json.\n ec2.region = AWS_CFG['region']\n ec2.subnet_id = AWS_CFG['subnet_id']\n ec2.availability_zone = AWS_CFG['region'] + AWS_CFG['availability_zone']\n ec2.ami = node_config['ami_id']\n ec2.instance_type = node_config['instance_type']\n ec2.private_ip_address = private_ip_address\n ec2.associate_public_ip = true\n\n if node_config.key?('volume_size')\n # Size in GB\n # (untested)\n ec2.block_device_mapping = [{ 'DeviceName' => '/dev/sda1', 'Ebs.VolumeSize' => node_config['volume_size'] }]\n end\n\n override.ssh.username = AWS_CFG['ssh_username']\n\n # Collect tags (can't be longer than 250 chars)\n ec2.tags = ({})\n ec2.tags['Name'] = node_name[0..245]\n ec2.tags['Type'] = 'Hyperledger'\n ec2.tags['Version'] = VERSION\n ec2.tags['Fabric'] = node_config['fabric'].map { |f| f['role'] }.join(',')[0..245]\n end\nend", "def create_server(options = {})\n begin\n add_custom_attributes(options[:server_def])\n server = connection.servers.create(options[:server_def])\n rescue Excon::Error::BadRequest => e\n response = Chef::JSONCompat.from_json(e.response.body)\n if response[\"badRequest\"][\"code\"] == 400\n message = \"Bad request (400): #{response[\"badRequest\"][\"message\"]}\"\n ui.fatal(message)\n else\n message = \"Unknown server error (#{response[\"badRequest\"][\"code\"]}): #{response[\"badRequest\"][\"message\"]}\"\n ui.fatal(message)\n end\n raise CloudExceptions::ServerCreateError, message\n rescue Fog::Errors::Error => e\n raise CloudExceptions::ServerCreateError, e.message\n end\n\n print \"\\n#{ui.color(\"Waiting for server [wait time = #{options[:server_create_timeout]}]\", :magenta)}\"\n\n # wait for it to be ready to do stuff\n server.wait_for(Integer(options[:server_create_timeout])) { print \".\"; ready? }\n\n puts(\"\\n\")\n server\n end", "def start\n puts \"Starting #{@resource[:name]}\"\n return if running?\n cmd = [command(:instance_manager)]\n cmd += [\"start\"]\n cmd += [@resource[:name]]\n run(cmd)\n end", "def set_present_and_running\n if active_instance_dir_exists?\n return if running?\n elsif inactive_instance_dir_exists?\n activate_instance_dir\n else\n make_instance\n end\n start\n end", "def run_me\n instance_id = \"\"\n region = \"\"\n # Print usage information and then stop.\n if ARGV[0] == \"--help\" || ARGV[0] == \"-h\"\n puts \"Usage: ruby ec2-ruby-example-elastic-ips.rb \" \\\n \"INSTANCE_ID REGION\"\n # Replace us-west-2 with the AWS Region you're using for Amazon EC2.\n puts \"Example: ruby ec2-ruby-example-elastic-ips.rb \" \\\n \"i-033c48ef067af3dEX us-west-2\"\n exit 1\n # If no values are specified at the command prompt, use these default values.\n elsif ARGV.count.zero?\n instance_id = \"i-033c48ef067af3dEX\"\n # Replace us-west-2 with the AWS Region you're using for Amazon EC2.\n region = \"us-west-2\"\n # Otherwise, use the values as specified at the command prompt.\n else\n instance_id = ARGV[0]\n region = ARGV[1]\n end\n\n ec2_client = Aws::EC2::Client.new(region: region)\n\n unless instance_exists?(ec2_client, instance_id)\n puts \"Cannot find instance with ID '#{instance_id}'. Stopping program.\"\n exit 1\n end\n\n puts \"Addresses for instance with ID '#{instance_id}' before allocating \" \\\n \"Elastic IP address:\"\n describe_addresses_for_instance(ec2_client, instance_id)\n\n puts \"Allocating Elastic IP address...\"\n allocation_id = allocate_elastic_ip_address(ec2_client)\n if allocation_id.start_with?(\"Error\")\n puts \"Stopping program.\"\n exit 1\n else\n puts \"Elastic IP address created with allocation ID '#{allocation_id}'.\"\n end\n\n puts \"Associating Elastic IP address with instance...\"\n association_id = associate_elastic_ip_address_with_instance(\n ec2_client,\n allocation_id,\n instance_id\n )\n if association_id.start_with?(\"Error\")\n puts \"Stopping program. You must associate the Elastic IP address yourself.\"\n exit 1\n else\n puts \"Elastic IP address associated with instance with association ID \" \\\n \"'#{association_id}'.\"\n end\n\n puts \"Addresses for instance after allocating Elastic IP address:\"\n describe_addresses_for_instance(ec2_client, instance_id)\n\n puts \"Releasing the Elastic IP address from the instance...\"\n if elastic_ip_address_released?(ec2_client, allocation_id) == false\n puts \"Stopping program. You must release the Elastic IP address yourself.\"\n exit 1\n else\n puts \"Address released.\"\n end\n\n puts \"Addresses for instance after releasing Elastic IP address:\"\n describe_addresses_for_instance(ec2_client, instance_id)\nend", "def create_instance(security_groups, key, user_data, size, region)\n @instances = nil\n Instance.new(@@ec2.run_instances(id, 1, 1, security_groups, key, user_data, nil, size, nil, nil, region).first)\n end", "def createECSService\n puts \"Creating ECS service #{$CLUSTER_NAME}...\"\n puts `ecs-cli compose \\\n --project-name #{$CONTAINER_NAME} \\\n service up \\\n --region #{$REGION} \\\n --cluster #{$CLUSTER_NAME} \\\n --launch-type EC2 \\\n --target-group-arn #{$TARGET_GROUP_ARN} \\\n --container-name #{$CONTAINER_NAME} \\\n --container-port #{$CONTAINER_PORT} \\\n --role ecsServiceRole`\nend", "def create(state)\n self.class.lock!\n state[:hostname] = Socket.gethostname\n logger.info(\"[Localhost] Instance #{instance} ready.\")\n end", "def aws_obj_wait_till_available(obj)\n log \"AWS: waiting for #{obj.class.to_s.split(\"::\").last} '#{obj.id}' status to change to ':available'\"\n begin\n Timeout.timeout(120) do \n sleep 1 while aws_call(\n 'aws_obj_state', \n obj: obj, \n errs: { \n AWS::EC2::Errors::InvalidVpcID::NotFound => 60,\n AWS::EC2::Errors::InvalidSubnetID::NotFound => 60\n } \n ) != :available\n end\n rescue Timeout::Error => e\n raise \"AWS: timeout while waiting for #{obj.class.to_s.split(\"::\").last} '#{obj.id} status to change to ':available'\"\n end\n end", "def create\n @instance = @provider.instances.new(params[:instance])\n @instance.state = \"Building\"\n respond_to do |format|\n if @instance.save\n @instance.create_instance(@provider.connect!)\n format.html { redirect_to cloud_provider_instance_path(@provider,@instance), notice: 'Instance was successfully created.' }\n format.json { render json: @instance, status: :created, location: @instance }\n else\n format.html { render action: \"new\" }\n format.json { render json: @instance.errors, status: :unprocessable_entity }\n end\n end\n end", "def index\n\n credentials = Aws::Credentials.new('AKIAJ2JD2EKKFVDSR37A', 'cnZUnzuyYPqUevEPb045VJUnW55VR+rUCQrplzd/')\n ec2 = Aws::EC2::Client.new(\n region: \"us-east-1\",\n credentials: credentials\n )\n #i = ec2.instances.create(:image_id => \"ami-e3106686\")\n resp = ec2.run_instances({\n dry_run: true,\n image_id: \"ami-e3106686\", # required\n min_count: 1, # required\n max_count: 1, # required\n instance_type: \"t1.micro\", # accepts t1.micro, m1.small, m1.medium, m1.large, m1.xlarge, m3.medium, m3.large, m3.xlarge, m3.2xlarge, m4.large, m4.xlarge, m4.2xlarge, m4.4xlarge, m4.10xlarge, t2.micro, t2.small, t2.medium, t2.large, m2.xlarge, m2.2xlarge, m2.4xlarge, cr1.8xlarge, i2.xlarge, i2.2xlarge, i2.4xlarge, i2.8xlarge, hi1.4xlarge, hs1.8xlarge, c1.medium, c1.xlarge, c3.large, c3.xlarge, c3.2xlarge, c3.4xlarge, c3.8xlarge, c4.large, c4.xlarge, c4.2xlarge, c4.4xlarge, c4.8xlarge, cc1.4xlarge, cc2.8xlarge, g2.2xlarge, cg1.4xlarge, r3.large, r3.xlarge, r3.2xlarge, r3.4xlarge, r3.8xlarge, d2.xlarge, d2.2xlarge, d2.4xlarge, d2.8xlarge\n placement: {\n tenancy: \"default\", # accepts default, dedicated\n },\n\n block_device_mappings: [\n {\n virtual_name: \"String\",\n device_name: \"String\",\n ebs: {\n snapshot_id: \"String\",\n volume_size: 1,\n delete_on_termination: true,\n volume_type: \"standard\", # accepts standard, io1, gp2\n iops: 1,\n encrypted: true,\n },\n\n },\n ],\n monitoring: {\n enabled: true, # required\n },\n disable_api_termination: true,\n instance_initiated_shutdown_behavior: \"stop\", # accepts stop, terminate\n network_interfaces: [\n {\n delete_on_termination: true,\n private_ip_addresses: [\n {\n private_ip_address: \"172.31.2.177\", # required\n primary: true,\n },\n ],\n secondary_private_ip_address_count: 1,\n associate_public_ip_address: true,\n },\n ],\n ebs_optimized: true,\n })\n @ec2_instances = Ec2Instance.all\n end", "def create_vm(agent_id, stemcell_id, vm_type, network_spec, disk_locality = nil, environment = nil)\n with_thread_name(\"create_vm(#{agent_id}, ...)\") do\n # do this early to fail fast\n\n target_groups = vm_type.fetch('lb_target_groups', [])\n if target_groups.length > 0\n alb_accessible?\n end\n\n requested_elbs = vm_type.fetch('elbs', [])\n if requested_elbs.length > 0\n elb_accessible?\n end\n\n stemcell = StemcellFinder.find_by_id(@ec2_resource, stemcell_id)\n\n begin\n instance, block_device_agent_info = @instance_manager.create(\n agent_id,\n stemcell.image_id,\n vm_type,\n network_spec,\n (disk_locality || []),\n environment,\n options,\n )\n\n target_groups.each do |target_group_name|\n target_group = LBTargetGroup.new(client: @alb_client, group_name: target_group_name)\n target_group.register(instance.id)\n end\n\n requested_elbs.each do |requested_elb_name|\n requested_elb = ClassicLB.new(client: @elb_client, elb_name: requested_elb_name)\n requested_elb.register(instance.id)\n end\n\n logger.info(\"Creating new instance '#{instance.id}'\")\n\n NetworkConfigurator.new(network_spec).configure(@ec2_resource, instance)\n\n registry_settings = initial_agent_settings(\n agent_id,\n network_spec,\n environment,\n stemcell.root_device_name,\n block_device_agent_info\n )\n registry.update_settings(instance.id, registry_settings)\n\n instance.id\n rescue => e # is this rescuing too much?\n logger.error(%Q[Failed to create instance: #{e.message}\\n#{e.backtrace.join(\"\\n\")}])\n instance.terminate(fast_path_delete?) if instance\n raise e\n end\n end\n end", "def spawn( options = {} )\n assert_valid_keys( options, :wait )\n wait = options[:wait]\n\n if ! wait then\n Hive::Worker.spawn kind, registry: registry, policy: policy, name: name\n return\n end\n\n before = registry.checked_workers( policy ).live\n\n Hive::Worker.spawn kind, registry: registry, policy: policy, name: name\n\n Hive::Idler.wait_until( 10 ) do\n after = registry.checked_workers( policy ).live\n diff = ( after - before ).select { |k| k.host == Hive::Key.local_host }\n diff.size > 0\n end\n end", "def create_instance(credentials, image_id, opts)\n new_vapp = nil\n vapp_opts = {} #assemble options to pass to Fog::Terremark::Real.instantiate_vapp_template\n terremark_hwp = hardware_profiles(credentials, {:name => 'default'}).first #sanity check values against default\n name = opts[:name]\n if not name\n name = \"inst#{Time.now.to_i}\"\n end\n if name.length > USER_NAME_MAX\n raise \"Parameter name must be #{USER_NAME_MAX} characters or less\"\n end\n unless ( (terremark_hwp.include?(:cpu, opts[:hwp_cpu].to_i)) &&\n (terremark_hwp.include?(:memory, opts[:hwp_memory].to_i)) ) then\n raise Deltacloud::Exceptions::ValidationFailure.new(\n StandardError.new(\"Error with cpu and/or memory values. you said cpu->#{opts[:hwp_cpu]} and mem->#{opts[:hwp_memory]}\")\n )\n end\n vapp_opts['cpus'] = opts[:hwp_cpu]\n vapp_opts['memory'] = opts[:hwp_memory]\n safely do\n terremark_client = new_client(credentials)\n#######\n#FIXME# what happens if there is an issue getting the new vapp id? (eg even though created succesfully)\n#######\n vapp_id = terremark_client.instantiate_vapp_template(name, image_id, vapp_opts).body['href'].split('/').last\n new_vapp = terremark_client.get_vapp(vapp_id)\n return convert_instance(new_vapp, terremark_client, credentials.user) #return an Instance object\n end\n end", "def docker_wait_ready\n bash 'docker-wait-ready' do\n code <<-EOF\n timeout=0\n while [ $timeout -lt 20 ]; do\n ((timeout++))\n #{docker_cmd} ps | head -n 1 | grep ^CONTAINER\n if [ $? -eq 0 ]; then\n break\n fi\n sleep 1\n done\n [[ $timeout -eq 20 ]] && exit 1\n exit 0\n EOF\n not_if \"#{docker_cmd} ps | head -n 1 | grep ^CONTAINER\"\n end\n end", "def create_instances(count)\n fail DTK::Error::Usage, \"Attribute 'admin_state' cannot be set to powered_off if node not created\" if admin_state_powered_off?\n aws_api_operation(:create).create_instances(count)\n end", "def create_instance(credentials, image_id, opts)\n racks = new_client( credentials )\n hwp_id = opts[:hwp_id] || 1\n name = Time.now.to_s\n if (opts[:name]) then name = opts[:name] end\n safely do\n return convert_srv_to_instance(racks.start_server(image_id, hwp_id, name))\n end\n end", "def run_instances(num_vms, opts, roles, disks)\n # Make a copy (the options are a simple hash so shallow copy does the\n # trick) to not modify the original.\n options = opts.clone\n options['num_vms'] = num_vms.to_s\n\n uri = URI(\"http://#{@ip}:#{SERVER_PORT}/instances\")\n headers = {'Content-Type' => 'application/json',\n 'AppScale-Secret' => @secret}\n request = Net::HTTP::Post.new(uri.path, headers)\n\n request.body = JSON.dump(options)\n\n run_result = JSON.parse(make_call(request, uri))\n Djinn.log_debug(\"[IM] Run instances info says [#{run_result}]\")\n operation_id = run_result['operation_id']\n\n vm_info = {}\n loop {\n begin\n describe_result = describe_operation(operation_id)\n rescue Djinn::FailedNodeException => error\n Djinn.log_warn(\n \"[IM] Error describing run instances operation #{operation_id}. \" \\\n \"Error: #{error.message}\")\n next\n end\n Djinn.log_debug(\"[IM] Describe run operation has vm_info \" \\\n \"#{describe_result['vm_info'].inspect}.\")\n\n if describe_result['state'] == 'success'\n vm_info = describe_result['vm_info']\n break\n elsif describe_result['state'] == 'failed'\n raise AppScaleException.new(describe_result['reason'])\n end\n Kernel.sleep(SMALL_WAIT)\n }\n\n # ip:role:instance-id\n instances_created = []\n vm_info['public_ips'].each_index { |index|\n tmp_roles = roles[index]\n tmp_roles = 'open' if roles[index].nil?\n instances_created << {\n 'public_ip' => vm_info['public_ips'][index],\n 'private_ip' => vm_info['private_ips'][index],\n 'roles' => tmp_roles,\n 'instance_id' => vm_info['instance_ids'][index],\n 'disk' => disks[index],\n 'instance_type' => options['instance_type']\n }\n }\n\n instances_created\n end", "def create\n\t\tregion = resource[:availability_zone].to_s.gsub(/.$/,'') \n\t\tcompute = Fog::Compute.new(:provider => 'aws', :region => \"#{region}\")\n\t\tprint \"ebsvol[aws]->create: Region is #{region}\\n\" if $debug\n\t\tprint \"ebsvol[aws]->create: Availability_zone is #{resource[:availability_zone]}\\n\" if $debug\n\t\t# create the requested volume\n\t\tresponse = compute.create_volume(resource[:availability_zone],resource[:size],resource[:snapshot])\t\n\t\tif (response.status == 200)\n\t\t\tvolumeid = response.body['volumeId']\n\t\t\tprint \"ebsvol[aws]->create: I created volume #{volumeid}.\\n\" if $debug\n\t\t\t# now tag the volume with volumename so we can identify it by name\n\t\t\t# and not the volumeid\n\t\t\tresponse = compute.create_tags(volumeid,{ :Name => resource[:volume_name] })\n\t\t\tif (response.status == 200)\n\t\t\t\tprint \"ebsvol[aws]->create: I tagged #{volumeid} with Name = #{resource[:volume_name]}\\n\" if $debug\n\t\t\tend\n\t\t\t# Check if I need to attach it to an ec2 instance.\n\t\t\tattachto = resource[:attached_to].to_s\n\t\t\tprint \"attachto is #{attachto}\\n\" if $debug\n\t\t\tif ( attachto != '' )\n\t\t\t\tif ( attachto == 'me')\n\t\t\t\t\tinstance = instanceinfo(compute,myname(compute))\n\t\t\t\telse\n\t\t\t\t\tinstance = instanceinfo(compute,attachto)\n\t\t\t\tend\n\t\t\t\tif ( resource[:device] != nil )\n\t\t\t\t\t# try to attach the volume to requested instance\n\t\t\t\t\tprint \"attach the volume\\n\" if $debug\n\t\t\t\t\tvolume = volinfo(compute,resource[:volume_name])\n\t\t\t\t\tattachvol(compute,volume,instance,resource[:device])\n\t\t\t\telse\n\t\t\t\t\traise \"ebsvol[aws]->create: Sorry, I can't attach a volume with out a device to attach to!\"\n\t\t\t\tend\n\t\t\tend\n\t\telse\n\t\t\traise \"ebsvol[aws]->create: I couldn't create the ebs volume, sorry!\"\n\t\tend\n\tend", "def ec2\n @ec2 ||= aws_init { AWS::EC2.new }\n end", "def launch_configuration(asg, sg_tcp_80_priv, sg_tcp_22_priv, instance_type, ami)\n asg.create_launch_configuration(launch_configuration_name: 'lc-nginx_auto',\n associate_public_ip_address: false,\n # key_name: 'UbuntuKeyPair', # TODO: Change/Remove\n image_id: ami, # Ubuntu base AMI from ubuntu.com\n instance_type: instance_type,\n security_groups: [sg_tcp_80_priv], # sg_tcp_22_priv\n instance_monitoring: { enabled: true }, # true=CloudWatch monitoring (60sec)\n user_data: Base64.encode64(\"#!/bin/bash -ex\\n\"\\\n \"export DEBIAN_FRONTEND=noninteractive\\n\"\\\n \"apt-get -q=2 update && apt-get -q=2 upgrade\\n\"\\\n \"apt-get -q=2 install nginx\\n\"\\\n \"URL=http://169.254.169.254/latest/meta-data\\n\"\\\n \"cat >> /var/www/html/index.html <<EOF\\n\"\\\n \"<meta http-equiv=refresh content=2 /><br>\\n\"\\\n \"FROM: Launch Configuration / ASG<br>\\n\"\\\n \"INSTANCE ID: $(curl $URL/instance-id)<br>\\n\"\\\n \"PUBLIC IP: [NONE], using NAT instances<br>\\n\"\\\n \"INTERNAL IP: $(curl $URL/local-ipv4)<br>\\n\"\\\n 'EOF'))\n sleep 5\nend", "def aws_wait_for_peers\n loop do\n aws_find_peers\n\n Chef::Log.info(\"etcd_service[#{ new_resource.name }] Found \"\\\n \"#{ new_resource.peers.length + 1 }/#{ new_resource.quorum } AWS \"\\\n \"peers, #{ active_peers.length } active\")\n\n break if new_resource.peers.length >= (new_resource.quorum - 1)\n break if active_peers.length > 0\n sleep 5\n end\n end", "def expand_cloud_if_necessary(force=false)\n if can_start_a_new_instance? && should_expand_cloud?(force)\n vputs \"Expanding the cloud based on load\"\n @num = 1\n @num.times do |i|\n list_of_pending_instances.size == 0 ? request_launch_one_instance_at_a_time : wait(\"5.seconds\") \n reset!\n vputs \"request_launch_new_instances: #{@num}\"\n provision_slaves_from_n(@num)\n after_launched\n end\n end\n end", "def provision\n attempts = @options[:timeout].to_i / SLEEPWAIT\n start = Time.now\n\n test_group_identifier = \"beaker-#{start.to_i}-\"\n\n # get machineType resource, used by all instances\n machineType = @gce_helper.get_machineType(start, attempts)\n\n # set firewall to open pe ports\n network = @gce_helper.get_network(start, attempts)\n @firewall = test_group_identifier + generate_host_name\n @gce_helper.create_firewall(@firewall, network, start, attempts)\n\n @logger.debug(\"Created Google Compute firewall #{@firewall}\")\n\n\n @hosts.each do |host|\n if host[:image]\n gplatform = host[:image]\n elsif host[:platform]\n gplatform = Platform.new(host[:platform])\n else\n raise('You must specify either :image or :platform, or both as necessary')\n end\n\n img = @gce_helper.get_latest_image(gplatform, start, attempts)\n\n unique_host_id = test_group_identifier + generate_host_name\n\n host['diskname'] = unique_host_id\n disk = @gce_helper.create_disk(host['diskname'], img, start, attempts)\n @logger.debug(\"Created Google Compute disk for #{host.name}: #{host['diskname']}\")\n\n # create new host name\n host['vmhostname'] = unique_host_id\n #add a new instance of the image\n instance = @gce_helper.create_instance(host['vmhostname'], img, machineType, disk, start, attempts)\n @logger.debug(\"Created Google Compute instance for #{host.name}: #{host['vmhostname']}\")\n\n # add metadata to instance, if there is any to set\n mdata = format_metadata\n unless mdata.empty?\n @gce_helper.setMetadata_on_instance(host['vmhostname'], instance['metadata']['fingerprint'],\n mdata,\n start, attempts)\n @logger.debug(\"Added tags to Google Compute instance #{host.name}: #{host['vmhostname']}\")\n end\n\n # get ip for this host\n host['ip'] = instance['networkInterfaces'][0]['accessConfigs'][0]['natIP']\n\n # configure ssh\n default_user = host['user']\n host['user'] = 'google_compute'\n\n copy_ssh_to_root(host, @options)\n enable_root_login(host, @options)\n host['user'] = default_user\n\n # shut down connection, will reconnect on next exec\n host.close\n\n @logger.debug(\"Instance ready: #{host['vmhostname']} for #{host.name}}\")\n end\n end", "def build(instance, env)\n unless vm = env.vms[instance]\n puts \"invalid VM: #{instance}\"\n else\n if vm.created?\n puts \"VM: #{instance} was already created\"\n else\n # be very fault tolerant :)\n begin\n # this will always fail\n vm.up(:provision => true)\n rescue Exception => e\n puts e.class\n puts e\n end\n end\n end\nend", "def build(instance, env)\n unless vm = env.vms[instance]\n puts \"invalid VM: #{instance}\"\n else\n if vm.created?\n puts \"VM: #{instance} was already created\"\n else\n # be very fault tolerant :)\n begin\n # this will always fail\n vm.up(:provision => true)\n rescue Exception => e\n puts e.class\n puts e\n end\n end\n end\nend", "def groomEc2(instance)\n\t\t\treturn MU::Server.groomEc2(@server, instance, @deploy.keypairname, environment: @deploy.environment, sync_wait: @server['dns_sync_wait'])\n\t\tend", "def wait_until_ready!\n Timeout.timeout(timeout) do\n loop do\n result = shell_out('docker info')\n break if Array(result.valid_exit_codes).include?(result.exitstatus)\n Chef::Log.debug(\"Docker daemon is not running - #{result.stdout}\\n#{result.stderr}\")\n sleep(0.5)\n end\n end\n rescue Timeout::Error\n raise DockerNotReady.new(timeout), 'docker timeout exceeded'\n end", "def wait_until_ready!\n Timeout.timeout(timeout) do\n loop do\n result = shell_out('docker info')\n break if Array(result.valid_exit_codes).include?(result.exitstatus)\n Chef::Log.debug(\"Docker daemon is not running - #{result.stdout}\\n#{result.stderr}\")\n sleep(0.5)\n end\n end\n rescue Timeout::Error\n raise DockerNotReady.new(timeout), 'docker timeout exceeded'\n end", "def exists?\n @aws_instance.exists? && @aws_instance.state.name != 'terminated'\n end", "def add_instance(instance)\n register_response = client.register_instances_with_load_balancer(load_balancer_name: name,\n instances: [{instance_id: instance.ec2_instance_id}])\n remaining_instance_count = register_response.instances.size\n puts \"Added #{instance.hostname} to ELB #{name}. Attached instances: #{remaining_instance_count}\".light_blue\n _wait_for_instance_health_check(instance)\n end", "def create(state)\n # we are checking the config here instead of when the class is loaded because\n # test-kitchen doesn't have the platform object instantiated until we call create\n raise Kitchen::UserError, /Error. Only windows is supported./ unless windows_os?\n newhost = poolsclosed_machine\n raise Kitchen::InstanceFailure, /Error, no available instances in poolsclosed/ if newhost.nil?\n state[:hostname] = newhost\n end", "def start(num_requested=1, workitem_id=nil, user_data=nil)\n \n # find instances with that image id\n logger.info \"Considering request to start #{num_requested} #{role.name} instances \"\n\n total_running = (instances.select{ |i| i.running? }).size\n \n node_array = instances.select{ |i| i.available? }\n\n num_to_start = 0\n \n if total_running >= max\n logger.info \"Instance limit for #{num_requested} reached. Will not start any more instances.\"\n #lets reserve all nodes so they don't get shutdown in the meantime.\n node_array.each do |node|\n node.state = 'reserved'\n node.save\n end\n WorkitemHelper.send_reply(workitem_id) unless workitem_id.nil?\n \n else\n logger.info \"Reserving for: #{num_requested} #{role.name} instances \"\n # reserve those nodes that are running, and then start / create the balance\n node_array.each do |node|\n break if num_requested < 1\n node.state = 'reserved'\n node.save\n num_requested = num_requested.to_i - 1\n \n end\n\n #now lets start the rest of the nodes needed, assuming we don't go past the max limit!\n #first lets get a count of how many nodes are LAUNCHED, IDLE, BUSY, or RESERVED\n total_left = max - total_running\n num_to_start = total_left < num_requested.to_i ? total_left : num_requested.to_i\n\n \n #start num_to_start instances via Instance. Enqueue these in delayed job because they may take a while\n start_and_create_instances(num_to_start, user_data) unless num_to_start < 1\n \n #now also enqueue the workitem reply if needed\n WorkItemHelper.send_reply(workitem_id) unless workitem_id.nil?\n \n logger.info \"Starting #{num_to_start} more #{ami_id} instances. Note that this may take a moment. \"\n EventLog.info \"Starting #{num_to_start} more #{ami_id} instances. Note that this may take a moment. \"\n \n end\n \n return num_to_start\n \n \n end", "def can_start_a_new_instance?\n maximum_number_of_instances_are_not_running? && list_of_pending_instances.size == 0\n end", "def register_hailstorm_ami(instance)\n new_ami = ec2.images.create(\n name: ami_id,\n instance_id: instance.instance_id,\n description: 'AMI for distributed performance testing with Hailstorm'\n )\n wait_for(\"Hailstorm AMI #{ami_id} to be created\") { new_ami.state == :available }\n raise(Hailstorm::AmiCreationFailure.new(self.region, new_ami.state_reason)) unless new_ami.state == :available\n new_ami.id\n end", "def create_agent_ami\n return unless ami_creation_needed?\n # AMI does not exist\n logger.info(\"Creating agent AMI for #{self.region}...\")\n clean_instance = ec2.instances.create(new_ec2_instance_attrs(base_ami, [find_security_group.id]))\n begin\n perform_instance_checks(clean_instance)\n build_ami(clean_instance)\n rescue Exception => ex\n logger.error(\"Failed to create instance on #{self.region}: #{ex.message}, terminating temporary instance...\")\n raise(ex)\n ensure\n terminate_instance(clean_instance) if clean_instance\n end\n end", "def clc_test_server\n puts \"clc_test_server\"\n server = clc_service.servers.find { |s| s.Name == clc_server_name }\n unless server\n server = clc_service.servers.create({\n :Name => clc_server_name\n }.merge(clc_set_test_server_attributes))\n server.wait_for { ready? }\n end\n server\nend", "def not_cloud_start(cloud_type, vm_ips, vm_ip_roles, vm_img_roles, pm_up)\n \n # Try to find one virtual machine that is already running\n vm_ips.each do |vm|\n if alive?(vm)\n # This machine is running\n puts \"#{vm} is up\"\n vm_leader = vm\n\n # Inform the user of this machine\n puts \"#{vm_leader} is already running\"\n puts \"Do 'puppet apply manifest.pp' on #{vm_leader}\"\n return\n end\n end\n \n # No machines are running\n puts \"All virtual machines are stopped\"\n puts \"Starting one of them...\"\n \n # Start one of the virtual machines\n vm = vm_ips[rand(vm_ips.count)] # Choose one randomly\n puts \"Starting #{vm} ...\"\n \n @vm_manager.start_vm(vm, vm_ip_roles, vm_img_roles, pm_up)\n \n # That virtual machine will be the \"leader\" (actually the chosen one)\n vm_leader = vm\n \n # Copy important files to it\n #copy_cloud_files(vm_leader, cloud_type)\n \n puts \"#{vm_leader} is being started\"\n puts \"Once started, do 'puppet apply manifest.pp' on #{vm_leader}\"\n\n end", "def set_ec2_instance\n @ec2_instance = Ec2Instance.find(params[:id])\n end", "def create_volume(snapshot_id, size, availability_zone, timeout, volume_type, piops)\n availability_zone ||= instance_availability_zone\n\n # Sanity checks so we don't shoot ourselves.\n raise \"Invalid volume type: #{volume_type}\" unless ['standard', 'gp2', 'io1'].include?(volume_type)\n\n # PIOPs requested. Must specify an iops param and probably won't be \"low\".\n if volume_type == 'io1'\n raise 'IOPS value not specified.' unless piops >= 100\n end\n\n # Shouldn't see non-zero piops param without appropriate type.\n if piops > 0\n raise 'IOPS param without piops volume type.' unless volume_type == 'io1'\n end\n\n create_volume_opts = { :volume_type => volume_type }\n # TODO: this may have to be casted to a string. rightaws vs aws doc discrepancy.\n create_volume_opts[:iops] = piops if volume_type == 'io1'\n\n nv = ec2.create_volume(snapshot_id, size, availability_zone, create_volume_opts)\n Chef::Log.debug(\"Created new volume #{nv[:aws_id]}#{snapshot_id ? \" based on #{snapshot_id}\" : \"\"}\")\n\n # block until created\n begin\n Timeout::timeout(timeout) do\n while true\n vol = volume_by_id(nv[:aws_id])\n if vol && vol[:aws_status] != \"deleting\"\n if [\"in-use\", \"available\"].include?(vol[:aws_status])\n Chef::Log.info(\"Volume #{nv[:aws_id]} is available\")\n break\n else\n Chef::Log.debug(\"Volume is #{vol[:aws_status]}\")\n end\n sleep 3\n else\n raise \"Volume #{nv[:aws_id]} no longer exists\"\n end\n end\n end\n rescue Timeout::Error\n raise \"Timed out waiting for volume creation after #{timeout} seconds\"\n end\n\n nv[:aws_id]\n end", "def create_server\n return nil if created? # only create a server if it does not already exist\n\n fog_description = fog_description_for_launch\n Chef::Log.debug(JSON.generate(fog_description)) # .dup.tap{|hsh| hsh[:user_data] = \"...\" }\n @fog_server = ClusterChef.connection.servers.create(fog_description)\n end", "def create_server(name, region = :'eu-central')\n response = API::Server.create(token, name, region)\n id = JSON.parse(response)['id'].to_i\n sleep 0.1 until (server = @servers[id])\n debug \"Successfully created server #{server.id} with name #{server.name}\"\n server\n end", "def request_termination\n instance_id = AlgRunner.fetch_url(INSTANCE_ID_URL)\n\n logger.info { \"Requesting terminating for #{instance_id}\" }\n @bunny.queue INSTANCE_SERVICE_QUEUE\n @bunny.exchange('').publish(\n { :instance_id => instance_id.to_s, :action => :termination }.to_yaml, \n :key => INSTANCE_SERVICE_QUEUE, :routing_key => INSTANCE_SERVICE_QUEUE\n )\n end", "def start_cloud(resource, vm_ips, vm_ip_roles)\n\n puts \"Starting the cloud\"\n \n # SSH keys have already been distributed when machines were monitorized,\n # so we do not have to distribute them again\n \n # Start torque cloud\n return torque_cloud_start(resource, vm_ip_roles)\n\nend", "def boot_aws_inception_vm\n say \"\" # glowing whitespace\n\n unless settings[\"inception\"][\"ip_address\"]\n say \"Provisioning IP address for inception VM...\"\n settings[\"inception\"][\"ip_address\"] = acquire_ip_address\n save_settings!\n end\n\n unless settings[\"inception\"] && settings[\"inception\"][\"server_id\"]\n username = \"ubuntu\"\n size = \"m1.small\"\n ip_address = settings[\"inception\"][\"ip_address\"]\n key_name = settings[\"inception\"][\"key_pair\"][\"name\"]\n say \"Provisioning #{size} for inception VM...\"\n inception_vm_attributes = {\n :groups => [settings[\"inception\"][\"security_group\"]],\n :key_name => key_name,\n :private_key_path => inception_vm_private_key_path,\n :flavor_id => size,\n :bits => 64,\n :username => \"ubuntu\",\n :public_ip_address => ip_address\n }\n if vpc?\n raise \"must create subnet before creating VPC inception VM\" unless settings[\"subnet\"] && settings[\"subnet\"][\"id\"]\n inception_vm_attributes[:subnet_id] = settings[\"subnet\"][\"id\"]\n inception_vm_attributes[:private_ip_address] = \"10.0.0.5\"\n end\n server = provider.bootstrap(inception_vm_attributes)\n unless server\n error \"Something mysteriously cloudy happened and fog could not provision a VM. Please check your limits.\"\n end\n\n settings[\"inception\"].delete(\"create_new\")\n settings[\"inception\"][\"server_id\"] = server.id\n settings[\"inception\"][\"username\"] = username\n save_settings!\n end\n\n server ||= fog_compute.servers.get(settings[\"inception\"][\"server_id\"])\n\n unless settings[\"inception\"][\"disk_size\"]\n disk_size = DEFAULT_INCEPTION_VOLUME_SIZE # Gb\n device = \"/dev/sdi\"\n provision_and_mount_volume(server, disk_size, device)\n\n settings[\"inception\"][\"disk_size\"] = disk_size\n settings[\"inception\"][\"disk_device\"] = device\n save_settings!\n end\n\n # settings[\"inception\"][\"host\"] is used externally to determine\n # if an inception VM has been assigned already; so we leave it\n # until last in this method to set this setting.\n # This way we can always rerun the CLI and rerun this method\n # and idempotently get an inception VM\n unless settings[\"inception\"][\"host\"]\n settings[\"inception\"][\"host\"] = server.dns_name\n save_settings!\n end\n\n confirm \"Inception VM has been created\"\n display_inception_ssh_access\n end", "def start_instance()\n system(\"sh wso2wsas-SNAPSHOT/bin/wso2server.sh\")\nend", "def start_pvm_instance(instance_id)\n post(\n \"cloud-instances/#{guid}/pvm-instances/#{instance_id}/action\",\n {\"action\" => \"start\"}.to_json\n )\n end" ]
[ "0.80714214", "0.7141925", "0.69733226", "0.68613166", "0.6840715", "0.67878777", "0.6746286", "0.66830003", "0.65129304", "0.64713836", "0.6448636", "0.64054894", "0.63889223", "0.6353564", "0.6341768", "0.63023835", "0.6283159", "0.6266867", "0.6251926", "0.6223107", "0.6221768", "0.6221522", "0.61737025", "0.6150497", "0.6147481", "0.61231077", "0.6082532", "0.60806394", "0.6049867", "0.6048321", "0.60215193", "0.59815717", "0.59814554", "0.59765893", "0.59707814", "0.5969033", "0.5947549", "0.5947549", "0.5938321", "0.5912914", "0.589854", "0.589854", "0.5896786", "0.5888876", "0.58786154", "0.587842", "0.5873991", "0.58549476", "0.5851012", "0.57839006", "0.5775371", "0.57666487", "0.5756208", "0.5752757", "0.574375", "0.56790304", "0.56757796", "0.5675714", "0.56582993", "0.56559014", "0.56512386", "0.5648604", "0.562829", "0.56166613", "0.56165886", "0.56006813", "0.5562243", "0.55614185", "0.5542418", "0.55181044", "0.55133295", "0.5510776", "0.5504646", "0.550274", "0.5498318", "0.54956484", "0.5494474", "0.54869086", "0.54869086", "0.5482713", "0.54817766", "0.54817766", "0.5474745", "0.5460692", "0.5436362", "0.5430043", "0.5427949", "0.54244035", "0.5423858", "0.54156446", "0.54066724", "0.54015505", "0.5401357", "0.53994215", "0.5397237", "0.53912544", "0.5385893", "0.5384926", "0.5383914", "0.53767115" ]
0.53971845
95
Delete EC2 instance ("terminate" in AWS language) and wait until it reports as terminated
def delete_vm(instance_id) with_thread_name("delete_vm(#{instance_id})") do logger.info("Deleting instance '#{instance_id}'") @instance_manager.find(instance_id).terminate(fast_path_delete?) end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cleanup(vpc_id, client, asg, elbv1, elbv2, region)\n begin\n response = client.describe_vpcs(vpc_ids: [vpc_id])\n rescue Aws::EC2::Errors::InvalidVpcIDNotFound => e\n puts \"Error: vpc_id [#{vpc_id}] does not exist... exiting.\"\n puts \"Make sure you passed the correct region on the command-line if it's not in the default us-west-2\"\n exit 0\n end\n\n # 1) Delete Auto Scaling group\n begin\n asg.delete_auto_scaling_group(auto_scaling_group_name: 'asg-nginx_auto', force_delete: true)\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n\n sleep 2\n # TODO: properly wait here until ASG is fully deleted before proceeding...\n begin\n asg.delete_launch_configuration(launch_configuration_name: 'lc-nginx_auto')\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n sleep 2\n\n # 2) instance handling\n term_error = 0\n instances_to_term = []\n terminate_states = %w[pending running shutting-down stopping stopped]\n puts 'Checking for nginx and nat instances, of all states...'\n begin\n response = client.describe_instances(filters: [{name: 'tag:Name', values: ['autoASG nginx server',\n 'nat instance']}])\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n response.reservations.each do |reservation|\n reservation.instances.each do |instance|\n puts \"Check #1: instance-id=[#{instance.instance_id}] AMI=[#{instance.image_id}] state=[#{instance.state.name}]\"\n instances_to_term.push(instance.instance_id) if terminate_states.include? instance.state.name\n end\n end\n\n if instances_to_term.any? # If array has content, proceed.\n term_error = 0\n instance_cnt = instances_to_term.length # TODO: Check before/after termination attempts and ensure = 0\n before_terminate = Time.now\n begin\n client.wait_until(:instance_terminated,instance_ids: instances_to_term) do |wait|\n wait.interval = 8 # Seconds between polling attempts. Same as wait.delay\n wait.max_attempts = 15 # Polling attempts before giving up. Wait time is 15*8=120 seconds.\n puts \"Attempting to terminate [#{instance_cnt}] instance(s), please wait up to 120 seconds...\"\n begin\n client.terminate_instances(instance_ids: instances_to_term)\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n end\n rescue Aws::Waiters::Errors::WaiterFailed => error\n term_error = 1 # TODO: Do something more reliable if this ever occurs.\n puts \"Exception: failed waiting for instance running: #{error.message}\"\n end\n puts \"#{Time.now - before_terminate.to_time} seconds elapsed while terminating.\" if term_error.zero?\n end\n\n if term_error.zero?\n # Debug with final instance check... this shouldn't print anything aside from terminated instances.\n begin\n response = client.describe_instances(filters: [{name: 'tag:Name', values: ['autoASG nginx server',\n 'nat instance']}])\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n response.reservations.each do |reservation|\n reservation.instances.each do |instance|\n puts \"Check #2: instance-id=[#{instance.instance_id}] AMI=[#{instance.image_id}] state=[#{instance.state.name}]\"\n end\n end\n end\n\n puts 'Sleeping for 5 seconds...'\n sleep 5\n\n # 3) Delete listeners\n printf 'Deleting ALB listeners... '\n begin\n response = elbv2.describe_load_balancers(names: ['AutoALB'])\n alb_arn = response.load_balancers[0].load_balancer_arn\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n sleep 2\n begin\n response = elbv2.describe_listeners(load_balancer_arn: alb_arn)\n listener_arn = response.listeners[0].listener_arn\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n begin\n elbv2.delete_listener(listener_arn: listener_arn)\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n sleep 2\n puts 'done.'\n\n # 4) Delete target groups\n printf 'Deleting ALB target groups... '\n begin\n response = elbv2.describe_target_groups(names: ['AutoALBTargetGroup'])\n target_group_arn = response.target_groups[0].target_group_arn\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n begin\n elbv2.delete_target_group(target_group_arn: target_group_arn)\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n sleep 5\n puts 'done.'\n\n # 5) Delete load balancers\n printf 'Deleting application and classic load balancers... '\n begin\n elbv1.delete_load_balancer(load_balancer_name: 'AutoCLB')\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n begin\n elbv2.delete_load_balancer(load_balancer_arn: alb_arn)\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n puts 'done.'\n\n puts 'Sleeping for 120 seconds, enough time for the ASG to fully disappear before deleting subnets.'\n sleep 120\n\n # 6) subnets\n begin\n response = client.describe_subnets(filters: [{name: 'vpc-id', values: [vpc_id]}])\n response.subnets.each do |sn|\n printf \"Removing subnet: #{sn.subnet_id}, #{sn.vpc_id}, #{sn.cidr_block}, #{sn.availability_zone}; \"\n client.delete_subnet(subnet_id: sn.subnet_id)\n puts 'Done.'\n end\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n\n # 7) security groups\n begin\n response = client.describe_security_groups(filters: [{name: 'vpc-id', values: [vpc_id]}])\n response.security_groups.each do |sg|\n next if sg.group_name == 'default' # This name is reserved by aws and cannot be removed.\n printf \"Removing security group: #{sg.group_id}, #{sg.vpc_id}, #{sg.group_name}, Desc='#{sg.description}'; \"\n client.delete_security_group(group_id: sg.group_id)\n puts 'Done.'\n end\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n\n # 8) Internet gateway\n begin\n response = client.describe_internet_gateways(filters: [{name: 'attachment.vpc-id', values: [vpc_id]}])\n response.internet_gateways.each do |igw|\n printf \"Detaching Internet gateway: #{igw.internet_gateway_id} <-> #{igw.attachments[0].vpc_id}; \"\n client.detach_internet_gateway(internet_gateway_id: igw.internet_gateway_id, vpc_id: igw.attachments[0].vpc_id)\n printf \"Removing Internet gateway: #{igw.internet_gateway_id} <-> #{igw.attachments[0].vpc_id}; \"\n client.delete_internet_gateway(internet_gateway_id: igw.internet_gateway_id)\n puts 'Done.'\n end\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n\n # 9) Route tables\n begin\n response = client.describe_route_tables(filters: [{name: 'vpc-id', values: [vpc_id]}])\n response.route_tables.each do |rtl|\n if rtl.associations[0] != nil\n if rtl.associations[0].route_table_association_id != nil\n puts \"Skipping #{rtl.associations[0].route_table_association_id}, causes exception.\"\n end\n next\n end\n printf \"Removing route table: #{rtl.route_table_id}, #{rtl.vpc_id}; \"\n client.delete_route_table(route_table_id: rtl.route_table_id)\n puts 'Done.'\n end\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n\n # 10) Delete VPC\n begin\n puts \"Attempting to delete VPC [#{vpc_id}]\"\n client.delete_vpc(vpc_id: vpc_id)\n rescue StandardError => e\n puts \"Exception caught: #{e}, attempting to complete.\"\n end\n\n begin\n response = client.describe_vpcs(vpc_ids: [vpc_id])\n rescue Aws::EC2::Errors::InvalidVpcIDNotFound => e\n puts \"Deletion of vpc_id=[#{vpc_id}] was a success.\"\n exit 0\n end\n\n # Shouldn't get here, but sometimes does due to the ASG taking too long to\n # release subnet dependencies, or if running a cleanup before a launch has\n # fully completed, etc.\n puts 'Oops. Termination sometimes fails for various reasons.'\n puts 'Note: running this twice usually clears up any lingering dependencies.'\n puts ' With that said, there is still work to be done. Execute again.'\n exit 0\nend", "def terminate_instance!(instance_id=nil)\n ec2.terminate_instances(:instance_id => instance_id)\n end", "def terminate(instance, decrement=false)\n Log.log \"Detaching #{instance.instance_id.light_yellow} from ASG\"\n client.detach_instances(\n instance_ids: [ instance.instance_id ],\n auto_scaling_group_name: asg_name,\n should_decrement_desired_capacity: false)\n\n # need to describe the instance status in the asg here so that we wait till connections have drained.\n count = 0\n Log.log \"Awaiting connection draining... \", newline: false\n while present?(instance.instance_id) && count < 120\n sleep 1\n count += 1\n end\n Log.log \"done\", timestamp: false\n\n Log.log \"Terminating #{instance.instance_id.light_red}... \", newline: false\n ec2_client.terminate_instances(instance_ids: [ instance.instance_id ])\n Log.log \"done\", timestamp: false\n end", "def shutdown()\n \n #shutdown all the instances we have.\n ids = id()\n \n @ec2.terminate_instances(ids)\n \n # wait for them to shut down for a couple of minutes\n attempts = 0\n stats = state_code()\n while (stats.any? {|s| s<=16 }) do\n if attempts > 6 \n raise CaTPAWS::EC2::Error::InstanceShutdown, \"Instances still running after a long wait. Check your EC2 account manually?\"\n end\n puts \"Terminating instances, please wait...\"\n sleep(10)\n attempts+=1\n get_instances(true)\n stats = state_code()\n end\n \n #and delete the associated security group\n @ec2.delete_security_group(@group_name)\n \n end", "def stop()\n self.destroy()\n\n # Stop the EC2 instance\n $ec2.terminate_instances([self.id])\n end", "def terminate_instance(instance_id)\n return if instance_id.nil? || !configured?\n\n @client.terminate_instances(instance_ids: [instance_id])\n nil\n end", "def destroy \n ec2 = self.class.new_ec2(@resource.value(:user), @resource.value(:password))\n ec2.terminate_instances({:instance_id => @property_hash[:instance_id]})\n ec2.delete_security_group({:group_name => @resource.value(:name)})\n end", "def aws_terminate_instance_and_ebs_volumes( iprops )\n ec2 = AWS::EC2.new.regions[ iprops[ :region ] ]\n inst = ec2.instances[ iprops[ :id ] ]\n unless inst.exists?\n raise \"Instance #{iprops[:id]} does not exist in #{iprops[:region]}\"\n end\n\n ebs_volumes = inst.block_devices.map do |dev|\n ebs = dev[ :ebs ]\n if ebs && dev[:device_name] =~ /dh\\d+$/ && !ebs[:delete_on_termination]\n ebs[ :volume_id ]\n end\n end.compact\n\n inst.terminate\n wait_until( \"termination of #{inst.id}\", 2.0 ) { inst.status == :terminated }\n\n ebs_volumes = ebs_volumes.map do |vid|\n volume = ec2.volumes[ vid ]\n if volume.exists?\n volume\n else\n puts \"WARN: #{volume} doesn't exist\"\n nil\n end\n end.compact\n\n ebs_volumes.each do |vol|\n wait_until( \"deletion of vol #{vol.id}\" ) do\n vol.status == :available || vol.status == :deleted\n end\n vol.delete if vol.status == :available\n end\n\n found = aws_find_instance( iprops )\n if found\n aws_instance_removed( found )\n aws_write_instances\n end\n end", "def terminateinstances\n if not checkRequirements([\"thezone\",\"theserver\"])\n return false\n end\n checkToken(@thezone)\n submit = queryGCE(:path => '/compute/v1beta15/projects/#{@thezone.name}/zones/#{@theserver.azone.name}/instances/#{@theserver.serial}', :method => 'delete', :options => '', :access_token => @thezone.token )\n checkQuery(:type => 'zone', :token => @thezone.token, :projectname => @thezone.name, :zonename => @theserver.azone.name, :operationname => submit[\"name\"] )\n end", "def start_terminating! options={}\n return true if away?\n return :wait if terminating? || busy?\n Log.info \"Terminating #{self}\"\n response = Wucluster.ec2.terminate_instances options.merge(:instance_id => [self.id])\n new_state = response.instancesSet.item.first.currentState.name rescue nil\n Log.warn \"Request returned funky status: #{new_state}\" unless (['shutting-down', 'terminated'].include? new_state)\n self.status = new_state.gsub(/-/,'_').to_sym\n dirty!\n response\n end", "def terminate_instances\n @task.unsafe(\"Stopping #{@name} Launch process\") do\n autoscaling_group.suspend_processes('Launch')\n end\n autoscaling_instances.each do |instance|\n @task.unsafe(\"Terminating instance #{instance.instance_id}\") do\n load_balancers.each do |elb|\n elb.instances.deregister(instance.instance_id)\n end\n instance.ec2_instance.terminate\n end\n end\n end", "def terminate_instances\n @task.unsafe(\"Stopping #{@name} Launch process\") do\n autoscaling_group.suspend_processes('Launch')\n end\n autoscaling_instances.each do |instance|\n @task.unsafe(\"Terminating instance #{instance.instance_id}\") do\n load_balancers.each do |elb|\n elb.instances.deregister(instance.instance_id)\n end\n instance.ec2_instance.terminate\n end\n end\n end", "def destroy\n @ec2_instance.destroy\n respond_to do |format|\n format.html { redirect_to ec2_instances_url, notice: 'Ec2 instance was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def terminate(server_name, access_key, secret, snapshot_removal = true, force = false)\n ##############################\n # Initialize AWS and create EC2 connection\n ##############################\n initialize_aws(access_key, secret)\n ec2 = AWS::EC2.new\n\n ##############################\n # Find instance\n ##############################\n instance = nil\n AWS.memoize do\n instances = ec2.instances.filter(\"tag:Name\", server_name)\n instances.each do |i|\n unless i.status == :shutting_down || i.status == :terminated\n instance = i\n break\n end # unless status\n end # instance loop\n end # memoize\n\n if instance\n environment_name = nil\n AWS.memoize do\n environment_name = instance.tags[\"environment\"].strip if instance.tags[\"environment\"]\n end\n\n ##############################\n # ENVIRONMENT\n ##############################\n if environment_name.nil? && ! force\n @log.fatal \"No environment tag found for host. Use the --force option to override and terminate.\"\n exit 3\n end\n\n if (! @environments.has_key?(environment_name)) && (! force)\n @log.fatal \"Environment not found: '#{environment_name}'\"\n exit 2\n end\n @environment = @environments[environment_name] if environment_name\n\n ##############################\n # Create Route53 connection\n ##############################\n aws_route53 = nil\n if @environment && @environment.route53_zone_id\n aws_route53 = AWS::Route53.new\n route53 = EC2Launcher::Route53.new(aws_route53, @environment.route53_zone_id, @log)\n end\n\n ##############################\n # EBS Volumes\n ##############################\n # Find EBS volumes\n attachments = nil\n AWS.memoize do\n attachments = instance.block_device_mappings.values\n\n # Remove snapshots\n remove_snapshots(ec2, attachments) if snapshot_removal\n\n # Remove volumes, if necessary\n remove_volumes(ec2, attachments)\n end\n\n private_ip_address = instance.private_ip_address\n \n run_with_backoff(30, 1, \"terminating instance: #{server_name} [#{instance.instance_id}]\") do\n instance.terminate\n end\n\n if route53\n @log.info(\"Deleting A record from Route53: #{server_name} => #{private_ip_address}\")\n route53.delete_record_by_name(server_name, 'A')\n end\n\n @log.info(\"Deleting node/client from Chef: #{server_name}\")\n node_result = `echo \"Y\" |knife node delete #{server_name}`\n client_result = `echo \"Y\" |knife client delete #{server_name}`\n @log.debug(\"Deleted Chef node: #{node_result}\")\n @log.debug(\"Deleted Chef client: #{client_result}\")\n else\n @log.error(\"Unable to find instance: #{server_name}\")\n end\n end", "def destroy\n @instance = @provider.instances.find(params[:id])\n InstanceOperations.terminate_instances(@provider.connect!, @instance)\n @instance.destroy\n redirect_to cloud_provider_path(@provider) \n end", "def terminate_instance!(instance_id)\n instances.select {|a| a[:instance_id] == instance_id}[0][:status] = \"terminating\"\n end", "def terminate_instances(parameters, secret)\n if @secret != secret\n return BAD_SECRET_RESPONSE\n end\n\n TERMINATE_INSTANCES_REQUIRED_PARAMS.each { |required_param|\n if parameters[required_param].nil? or parameters[required_param].empty?\n return {\"success\" => false, \"reason\" => \"no #{required_param}\"}\n end\n }\n\n Thread.new {\n HelperFunctions.set_creds_in_env(parameters['credentials'], \"1\")\n HelperFunctions.terminate_vms(parameters['instance_ids'], \n parameters['infrastructure'])\n }\n\n return SUCCESSFUL_TERMINATE_RESPONSE\n end", "def remove_instance(instance)\n deregister_response = client.deregister_instances_from_load_balancer(load_balancer_name: name,\n instances: [{instance_id: instance.ec2_instance_id}])\n remaining_instance_count = deregister_response.instances.size\n puts \"Removed #{instance.hostname} from ELB #{name}. Remaining instances: #{remaining_instance_count}\".light_blue\n _wait_for_connection_draining\n end", "def shutdown\n Bj.submit \"#{XEN_CMD_RUNNER} shutdown_instance #{name} true\", :tag => \"#{name}.shutdown_instance\"\n end", "def stop\n MU.log \"Stopping #{@cloud_id}\"\n MU::Cloud::Google.compute(credentials: @config['credentials']).stop_instance(\n @project_id,\n @config['availability_zone'],\n @cloud_id\n )\n begin\n sleep 5\n end while cloud_desc(use_cache: false).status != \"TERMINATED\" # means STOPPED\n end", "def stop_instance instance\n begin\n instance.terminate\n rescue\n end \n end", "def terminate_instance_by_name(node_name)\n return unless configured?\n\n terminate_instance(get_aws_instance_id_by_node_name(node_name))\n end", "def delete_pvm_instance(instance_id)\n delete(\"cloud-instances/#{guid}/pvm-instances/#{instance_id}\")\n end", "def delete_instance(instance_name)\n return if !configured? || !instance_exists?(instance_name)\n\n @service.delete_instance(@gcp_config['project'], @gcp_config['zone'], instance_name)\n rescue StandardError => e\n @logger.error(e.message)\n end", "def terminate\n logger.info \"Terminating any instance with GroupUUID: #{@os_aws.group_uuid}\"\n\n terminate_instances_by_group_id(@os_aws.group_uuid)\n end", "def delete_instance instance_id\n execute do\n instances.delete_instance(\n instance_path(instance_id)\n )\n end\n end", "def destroy(state)\n info(\"Destroying instance #{instance.name}\")\n return if state[:server_id].nil?\n instance.transport.connection(state).close\n domain = load_domain(state[:server_id])\n destroy_domain(domain) unless domain.nil?\n info(\"Libvirt instance #{state[:server_id]} destroyed.\")\n state.delete(:server_id)\n state.delete(:hostname)\n end", "def request_termination\n instance_id = AlgRunner.fetch_url(INSTANCE_ID_URL)\n\n logger.info { \"Requesting terminating for #{instance_id}\" }\n @bunny.queue INSTANCE_SERVICE_QUEUE\n @bunny.exchange('').publish(\n { :instance_id => instance_id.to_s, :action => :termination }.to_yaml, \n :key => INSTANCE_SERVICE_QUEUE, :routing_key => INSTANCE_SERVICE_QUEUE\n )\n end", "def stop(force = true)\n return if ! active_instance_dir_exists?\n return if ! running?\n puts \"Stopping #{@resource[:name]}\"\n cmd = [command(:instance_manager)]\n cmd += [\"stop\"]\n cmd += [@resource[:name]]\n cmd += [\"force\"] if force\n run(cmd)\n end", "def immediate_shutdown_pvm_instance(instance_id)\n post(\n \"cloud-instances/#{guid}/pvm-instances/#{instance_id}/action\",\n {\"action\" => \"immediate-shutdown\"}.to_json\n )\n end", "def aws_instance_volumes_delete_on_termination_set(instance)\n log \"AWS: setting Instance '#{self.driver_id}' volumes deleteOnTermination\"\n aws_call('aws_instance_block_devices_get', instance: instance).each do |block_device|\n aws_call('aws_instance_block_device_ebs_delete_on_termination_set', instance: instance, block_device: block_device)\n end\n end", "def shutdown(group)\n cfmshutdown = AWS::CloudFormation.new\n ec2shutdown = AWS::EC2.new\n puts \"XXXXXXXXXX DAILY CHECKING HERE SHUT DOWN XXXXXXXXXXXX\"\n puts group.name\n puts \"Shut down Job is working now\"\n puts group.users.length\n puts \"-------------\"\n if group.users.length > 0\n allinstanceids = []\n group.users.each do |user|\n if user.stacks.length > 0\n user.stacks.each do |stack|\n if stack.ec2instance_ids.length > 0 \n allinstanceids.concat stack.ec2instance_ids\n end\n end\n end\n end\n puts \"BUBBBUBUBBBB\"\n puts allinstanceids.class\n allinstanceids = allinstanceids.uniq\n puts allinstanceids.class\n puts \"BUBBBUBUBBBB\"\n if allinstanceids.length > 0\n $ec2_resource.instances({instance_ids: allinstanceids}).batch_stop # To job stop\n end\n end \n puts \"YYYYYYYYYYYYYYYYYYYYYYYY\"\n end", "def abort_instance(instance, interfaces, wait, unlock=false)\n @mgr.unlock() if unlock\n if interfaces.size > 0\n interfaces.each() do |iface|\n iface.delete()\n end\n end\n return unless instance\n yield \"#{@mgr.timestamp()} Aborting instance #{instance.id()}\"\n instance.block_device_mappings().each() do |b|\n v = @resource.volume(b.ebs.volume_id)\n # Volumes without a Name should be deleted. Note that if an\n # instance is aborted after tagging, the volume will get left\n # behind. This should be extremely rare, since the most likely\n # collision is two people creating an instance with the same\n # name at the same time.\n unless get_tag(v, \"Name\")\n yield \"#{@mgr.timestamp()} Marking new unnamed volume #{b.ebs.volume_id} (#{b.device_name}) for automatic deletion\"\n instance.modify_attribute({\n attribute: \"blockDeviceMapping\",\n block_device_mappings: [\n {\n device_name: b.device_name,\n ebs: {\n volume_id: b.ebs.volume_id,\n delete_on_termination: true,\n },\n },\n ],\n })\n end\n end\n yield \"#{@mgr.timestamp()} Sending termination command\"\n instance.terminate()\n return unless wait\n yield \"#{@mgr.timestamp()} Waiting for instance to terminate...\"\n instance.wait_until_terminated()\n yield \"#{@mgr.timestamp()} Terminated\"\n end", "def terminate\n self.destroy\n end", "def terminate()\n connection.nodes.terminate(uuid).tap{ self.forget }\n end", "def stop_instances\n @task.unsafe(\"Stopping #{@name} processes\") do\n save_to_s3(@task.bucket)\n autoscaling_group.suspend_all_processes\n end\n autoscaling_instances.each do |instance|\n @task.unsafe(\"Stopping instance #{instance.instance_id}\") do\n load_balancers.each do |elb|\n elb.instances.deregister(instance.instance_id)\n end\n instance.ec2_instance.stop\n end\n end\n end", "def stop_node_instance(node_instance, options={})\n super(node_instance, options)\n node_instance.wait_for { status.downcase == 'down' }\n end", "def allow_instance_termination(&block)\n with_instance_profile if instance_profile.nil?\n\n term_policy = Model::Template::Resource::IAMPolicy.new(\"#{ name }TerminationPolicy\", @template)\n term_policy.policy_name('allow-instance-termination')\n\n parent_role = self\n term_policy.allow do\n action 'ec2:TerminateInstances'\n resource '*'\n condition :StringEquals => {\n 'ec2:InstanceProfile' => get_att(parent_role.instance_profile.name, 'Arn')\n }\n end\n term_policy.role(self)\n term_policy.depends_on(instance_profile)\n\n term_policy.instance_exec(&block) if block\n @template.resources[term_policy.name] = term_policy\n end", "def delete_instances(count)\n instances_to_delete = current_instances.last(count) \n parent.terminate_instances(Group.instance_ids(instances_to_delete))\n # remaining_instances under alive\n Output.new(current_instances.first(current_instances.size - count), instances_to_delete)\n end", "def stop_pvm_instance(instance_id)\n post(\n \"cloud-instances/#{guid}/pvm-instances/#{instance_id}/action\",\n {\"action\" => \"stop\"}.to_json\n )\n end", "def stop_instances\n @task.unsafe(\"Stopping #{@name} processes\") do\n autoscaling_group.suspend_all_processes\n end\n autoscaling_instances.each do |instance|\n @task.unsafe(\"Stopping instance #{instance.instance_id}\") do\n load_balancers.each do |elb|\n elb.instances.deregister(instance.instance_id)\n end\n instance.ec2_instance.stop\n end\n end\n end", "def stop_kvm(name)\n unless(system(\"virsh destroy #{name}\"))\n raise \"Failed to stop node: #{name}\"\n end\nend", "def destroy!\n response = @connection.dbreq(\"DELETE\", @lbmgmthost, \"#{@lbmgmtpath}/instances/#{CloudDB.escape(@id.to_s)}\",@lbmgmtport,@lbmgmtscheme)\n CloudDB::Exception.raise_exception(response) unless response.code.to_s.match(/^202$/)\n true\n end", "def terminate\n\t\tself.request( :terminate )\n\tend", "def delete_vm(instance_id)\n with_thread_name(\"delete_vm(#{instance_id}):v2\") do\n logger.info(\"Deleting instance '#{instance_id}'\")\n @cloud_core.delete_vm(instance_id) do |instance_id|\n @registry.delete_settings(instance_id) if @stemcell_api_version < 2\n end\n end\n end", "def deleteInstance(iController, ioInstance)\n # Clean up everything that was registered before destruction\n ioInstance.unregisterAll\n # Wait for any timer event that has to finish\n ioInstance.killTimers\n # Quit everything\n ioInstance.destroy\n end", "def delete\n stop\n [ @resource['instances_dir'] + \"/\" + @resource[:name],\n @resource['instances_dir'] + \"/\" + \"_\" + @resource[:name]\n ].each do |dir|\n FileUtils.rm_rf(dir) if File.directory?(dir)\n end\n end", "def terminate()\n connection.node_clusters.terminate(uuid).tap{ self.forget }\n end", "def terminate\n end", "def terminate\n @thread.raise Zoidberg::DeadException.new('Instance in terminated state', object_id)\n end", "def delete_instance instance_id\n instances.delete_instance name: instance_path(instance_id)\n end", "def request_termination_of_non_master_instance\n inst = nonmaster_nonterminated_instances.last\n terminate_instance!(inst.instance_id) if inst\n end", "def terminate_instance!(o={})\n thost = o[:name] || running_hosts.last\n available_hosts << thost.clone\n SshInstance.new(instance_options(o.merge(:name=>thost, :status=>'terminated')))\n end", "def destroy_server(connection, server)\n disks = server.disks.select{|d| d[\"type\"] == \"PERSISTENT\"}\n server.destroy\n destroy_disks = get_boolean_field('destroy_disks')\n if destroy_disks\n return if disks.empty?\n\n # We need to wait for instance to be terminated before destroying disks\n start = Time.now\n msg = \"Waiting for server to be terminated: #{server.name}\"\n Maestro.log.debug(msg)\n write_output(\"#{msg}...\")\n\n # we need to wait until the server is removed from GCE\n # state == TERMINATED doesn't let us delete the disk yet\n begin\n server.wait_for { false }\n rescue Fog::Errors::NotFound => e\n end\n Maestro.log.debug(\"Server is terminated: #{server.name} (#{Time.now - start}s)\")\n write_output(\"done (#{Time.now - start}s)\\n\")\n\n # Delete the disks\n start = Time.now\n disks_to_delete = []\n disks.each do |d|\n match = d[\"source\"].match(%r{projects/(.*)/zones/(.*)/disks/(.*)})\n disks_to_delete << {:project => match[1], :zone => match[2], :disk => match[3]}\n end\n\n msg = \"Deleting disks: #{disks_to_delete.map{|d| d[:disk]}}\"\n Maestro.log.debug(msg)\n write_output(\"#{msg}...\")\n\n disks_to_delete.each do |d|\n disk = connection.disks.get(d[:disk],d[:zone])\n disk.destroy\n end\n\n Maestro.log.debug(\"Deleted disks: #{disks_to_delete.map{|d| d[:disk]}} (#{Time.now - start}s)\")\n write_output(\"done (#{Time.now - start}s)\\n\")\n end\n end", "def terminate_instance!(id=nil)\n raise RemoteException.new(:method_not_defined, \"terminate_instance!\")\n end", "def terminate\n end", "def terminate_instances( options = {} )\n options = { :instance_id => [] }.merge(options)\n raise ArgumentError, \"No :instance_id provided\" if options[:instance_id].nil? || options[:instance_id].empty?\n params = pathlist(\"InstanceId\", options[:instance_id])\n return response_generator(:action => \"TerminateInstances\", :params => params)\n end", "def terminate_instances(*instance_ids)\n action = 'TerminateInstances'\n params = {\n 'Action' => action\n }\n params.merge!(array_to_params(instance_ids, 'InstanceId'))\n\n response = send_query_request(params)\n response.is_a?(Net::HTTPSuccess)\n end", "def remove_instance(instance)\n unless instance\n raise ArgumentError, \"A argument of type Instance was expected. Got #{instance.inspect}\"\n end\n\n # Check to make sure that we have a valid instance role here first.\n unless %w[app util].include?(instance.role)\n raise InvalidInstanceRole, \"Removing instances is only supported for app, util instances\"\n end\n\n # Check to be sure that instance is actually provisioned\n # TODO: Rip out the amazon_id stuff when we have IaaS agnosticism nailed down\n unless instance.amazon_id && instance.provisioned?\n raise InstanceNotProvisioned, \"Instance is not provisioned or is in unusual state.\"\n end\n\n response = api.post(\"/environments/#{id}/remove_instances\", :request => {\n :provisioned_id => instance.amazon_id,\n :role => instance.role,\n :name => instance.name\n })\n\n # Reset instances so they are fresh if they are requested again.\n @instances = nil\n\n # Return the response.\n return response\n end", "def terminate app\n app.perform :terminate\n end", "def before_destroy_load_agent(load_agent)\n agent_ec2_instance = ec2.instances[load_agent.identifier]\n if agent_ec2_instance.exists?\n logger.info(\"Terminating agent##{load_agent.identifier}...\")\n agent_ec2_instance.terminate\n wait_for(\"#{agent_ec2_instance.id} to terminate\") { agent_ec2_instance.status.eql?(:terminated) }\n else\n logger.warn(\"Agent ##{load_agent.identifier} does not exist on EC2\")\n end\n end", "def kill(secret)\n if !valid_secret?(secret)\n return BAD_SECRET_MSG\n end\n @kill_sig_received = true\n \n if is_hybrid_cloud? \n Thread.new {\n Kernel.sleep(5)\n HelperFunctions.terminate_hybrid_vms(creds)\n }\n elsif is_cloud?\n Thread.new {\n Kernel.sleep(5)\n infrastructure = creds[\"infrastructure\"]\n keyname = creds[\"keyname\"]\n HelperFunctions.terminate_all_vms(infrastructure, keyname)\n }\n else\n # in xen/kvm deployments we actually want to keep the boxes\n # turned on since that was the state they started in\n\n if my_node.is_login?\n stop_ejabberd \n end\n\n if my_node.is_shadow? or my_node.is_appengine?\n ApiChecker.stop \n end\n\n maybe_stop_taskqueue_worker(\"apichecker\")\n maybe_stop_taskqueue_worker(AppDashboard::APP_NAME)\n\n jobs_to_run = my_node.jobs\n commands = {\n \"load_balancer\" => \"stop_app_dashboard\",\n \"appengine\" => \"stop_appengine\",\n \"db_master\" => \"stop_db_master\",\n \"db_slave\" => \"stop_db_slave\",\n \"zookeeper\" => \"stop_zookeeper\"\n }\n\n my_node.jobs.each { |job|\n if commands.include?(job)\n Djinn.log_info(\"About to run [#{commands[job]}]\")\n send(commands[job].to_sym)\n else\n Djinn.log_info(\"Unable to find command for job #{job}. Skipping it.\")\n end\n }\n\n if has_soap_server?(my_node)\n stop_soap_server\n stop_datastore_server\n end\n\n TaskQueue.stop if my_node.is_taskqueue_master?\n TaskQueue.stop if my_node.is_taskqueue_slave?\n TaskQueue.stop_flower if my_node.is_login?\n\n stop_app_manager_server\n stop_infrastructure_manager\n end\n\n MonitInterface.shutdown\n FileUtils.rm_rf(STATE_FILE)\n\n if @creds['alter_etc_resolv'].downcase == \"true\"\n HelperFunctions.restore_etc_resolv()\n end\n\n return \"OK\" \n end", "def terminate\n @should_terminate = true\n end", "def destroy_instance(instance)\n tenant = @identity.find_tenant(instance.tenant_id)\n fog_options = @fog_options[:storage].merge(:hp_tenant_id => tenant.id)\n\n storage = VCAP::Services::Swift::Storage.new(@logger, fog_options)\n storage.delete_account\n\n @logger.debug \"Account meta data (should be 'Recently deleted'): \" + storage.get_account_meta_data.body.to_s\n\n @identity.delete_users_by_tenant_id(instance.tenant_id, @fog_options[:name_suffix])\n @identity.delete_tenant(instance.tenant_id)\n raise SwiftError.new(SwiftError::SWIFT_DESTROY_INSTANCE_FAILED, instance.inspect) unless instance.destroy\n end", "def ensure_destroy\n ensure_stop\n destroy if exist?\n end", "def delete_vm(instance_id)\n with_thread_name(\"delete_vm(#{instance_id})\") do\n logger.info(\"Deleting instance '#{instance_id}'\")\n\n @cloud_core.delete_vm(instance_id) do |instance_id|\n @registry.delete_settings(instance_id)\n end\n end\n end", "def force_terminate\n @puppet_thread.exit unless @puppet_thread.nil?\n end", "def stop(action)\n if !autoscaling_group.exists?\n @task.warn { \"Autoscaling group #{@name} doesn't exist\" }\n return\n end\n\n if autoscaling_group.suspended_processes.empty?\n case action\n when :default, :terminate\n terminate_instances\n when :stop\n stop_instances\n else\n raise Cloud::Cycler::TaskFailure.new(\"Unrecognised autoscaling action #{action}\")\n end\n else\n @task.debug { \"Scaling group #{@name} already suspended\" }\n end\n end", "def terminate_instances_by_group_id(group_id)\n raise 'Group ID not defined' unless group_id\n\n instances = @os_aws.describe_running_instances(group_id)\n logger.info instances\n ids = instances.map { |k, _| k[:instance_id] }\n\n logger.info \"Terminating the following instances #{ids}\"\n resp = []\n resp = @os_aws.terminate_instances(ids).to_hash unless ids.empty?\n\n resp[:terminating_instances].first[:current_state][:name] == 'shutting-down'\n end", "def terminate!\n DRb.stop_service\nend", "def aws_instance_wait_till_status_equals(obj, status, time)\n log \"AWS: waiting for #{obj.class.to_s.split(\"::\").last} '#{obj.id}' status to change to ':#{status}'\"\n begin\n Timeout.timeout(time) do \n sleep 1 while aws_call(\n 'aws_instance_status', \n instance: obj,\n errs: { AWS::EC2::Errors::InvalidInstanceID::NotFound => 60 }\n ) != status\n end\n rescue Timeout::Error => e\n raise \"AWS: timeout while waiting for #{obj.class.to_s.split(\"::\").last} '#{obj.id} status to change to ':#{status}'\"\n end\n end", "def terminate\n run_chain(:before_stop)\n\n EventMachine.stop_event_loop\n\n run_chain(:after_stop)\n end", "def terminate\n set_or_terminate do\n @terminated = true\n end\n end", "def stop(action)\n if !autoscaling_group.exists?\n @task.warn { \"Autoscaling group #{@name} doesn't exist\" }\n return\n end\n\n # FIXME: This won't work if we reinstate suspended processes...\n #if autoscaling_group.suspended_processes.empty?\n case action\n when :default, :terminate\n terminate_instances\n when :stop\n stop_instances\n else\n raise Cloud::Cycler::TaskFailure.new(\"Unrecognised autoscaling action #{action}\")\n end\n #else\n #@task.debug { \"Scaling group #{@name} already suspended\" }\n #end\n end", "def stop force=nil\n if force\n conn.get 'StopInstances', :'instances.1' => id, :force => 1\n else\n conn.get 'StopInstances', :'instances.1' => id\n end\n promise(timeout:60){ wait_for :stopped }\n end", "def terminate_instance_by_config_id(configuration_id, node_name)\n return unless configured?\n\n terminate_instance(get_aws_instance_id_by_config_id(configuration_id, node_name))\n end", "def terminate_server(server)\n options = { \"EBS_MOUNT_POINT\" => \"text:#{@mount_point}\",\n \"EBS_TERMINATE_SAFETY\" => \"text:off\" }\n audit = server.run_executable(@scripts_to_run['terminate'], options)\n audit.wait_for_completed\n end", "def terminate() end", "def scale_down\n \n num_stopped = 0\n # lets figure out what we can shut down.\n logger.info \"Looking for unused instances to scale down...\"\n EventLog.info \"Looking for unused instances to scale down...\"\n instances.each do |i|\n #this is actually pretty complicated. we have to figure out the exact range for each instance, based on the instance launch time\n lt = i.launch_time\n lt_diff = 60 - lt.min\n lower_range = HOUR_MOD - lt_diff #careful, it could be negative!\n lower_range = lower_range + 60 if lower_range < 0 # adjust for negative!\n\n upper_range = lower_range + (60 - HOUR_MOD) #upper range for mins, could be > 59!\n upper_range = upper_range - 60 if upper_range > 59 #correct for over 59\n\n now_min = DateTime.now.min\n \n ### DEBUG shutdown logic\n # puts \"Instance: #{i.aws_instance_id}\"\n # puts \"Now: #{now_min}\"\n # puts \"Upper: #{upper_range}\"\n # puts \"Lower: #{lower_range}\"\n\n if (now_min > lower_range && now_min < upper_range) || ((upper_range < lower_range) && (now_min < upper_range || now_min > lower_range))\n #so lets shutdown, but only if it won't bring us below the min_running threshold\n\n #first find out how many instances are running of this type\n total_running = (instances.select{ |j| j.running? }).size\n unless ((total_running - 1) < min || (! i.available? && ! i.error? ) || (farm_type.eql?('admin')))\n # for now we shutdown via aws but this will change as we figure out a better way\n logger.info \"Shutting down #{i.farm.ami_id} -- #{i.instance_id} due to IDLE timeout.\"\n EventLog.info \"Shutting down #{i.farm.ami_id} -- #{i.instance_id} due to IDLE timeout.\"\n i.terminate\n num_stopped += 1\n end\n end\n end\n\n return num_stopped\n\n end", "def teardown\n with_vsphere_connection do |dc|\n nodes.each do |k,v|\n storage = RSpec.configuration.rs_storage[:nodes][k]\n\n if storage.nil?\n log.info \"No entry for node #{k}, no teardown necessary\"\n next\n end\n\n ssh = storage[:ssh]\n unless ssh.nil? or ssh.closed?\n ssh.close\n end\n\n if destroy\n log.info \"Destroying instance #{k}\"\n vm_name = storage[:vm]\n if vm_name == nil\n log.error \"No vm object for #{k}\"\n next\n end\n\n # Traverse folders to find target folder for new vm's\n vm_folder = dc.vmFolder.traverse(vmconf[:dest_dir], RbVmomi::VIM::Folder)\n raise \"VirtualMachine folder #{vmconf[:dest_dir]} not found\" if vm_folder.nil?\n vm = vm_folder.find(vm_name, RbVmomi::VIM::VirtualMachine)\n raise \"VirtualMachine #{vm_name} not found in #{vmconf[:dest_dir]}\" if vm.nil?\n\n begin\n vm.PowerOffVM_Task.wait_for_completion\n rescue RbVmomi::Fault => e\n log.error \"Fault attempting to power off node #{k}, #{e.message}\"\n ensure\n begin\n vm.Destroy_Task.wait_for_completion\n rescue RbVmomi::Fault => e\n log.error \"Fault attempting to destroy node #{k}, #{e.message}\"\n end\n end\n else\n next\n end\n end\n end\n\n nil\n end", "def stop\n puts \"Stoping any instance with group ID: #{@os_aws.group_uuid}\"\n\n stop_instances_by_group_id(@os_aws.group_uuid)\n end", "def terminate\n @process.stop\n end", "def shutdown\n requires :id\n begin\n response = service.post_shutdown_vapp(id)\n rescue Fog::VcloudDirector::Compute::BadRequest => ex\n Fog::Logger.debug(ex.message)\n return false\n end\n service.process_task(response.body)\n end", "def stop\n system(\"ps -aux | grep rackup\")\n puts \"Stoping clusters...\"\n for app in @apps\n if @env == :deployment\n pid_file = \"#{APP_PATH}/log/doozer.#{app[:port]}.pid\"\n puts \"=> Reading pid from #{pid_file}\" \n if File.exist?(pid_file)\n File.open(pid_file, 'r'){ | f | \n pid = f.gets.to_i\n puts \"=> Shutting down process #{pid}\"\n system(\"kill -9 #{pid}\")\n\n }\n File.delete(pid_file) \n else\n puts \"ERROR => pid file doesn't exist\"\n end\n end\n end\n sleep(1)\n system(\"ps -aux | grep rackup\")\nend", "def delete\n ensure_service!\n service.delete_instance path\n true\n end", "def destroy\n destroy!\n rescue AnsibleTowerClient::Error\n false\n end", "def delete_vm_instance(name, namespace)\n @conn.vminstances.destroy(name, namespace)\n end", "def terminate\n \n\tend", "def request_termination\n @sigterm_timer.cancel if @sigterm_timer\n ::Process.kill('TERM', @pid) rescue nil\n end", "def dissociate(instance_id)\n conn.service 'get', 'DissociateEips',\n :instance => instance_id,\n :'eips.1' => id\n promise(timeout:60){ wait_for :available }\n end", "def destroy\n Process.kill(9, pid)\n end", "def kill\n @executor.shutdownNow\n nil\n end", "def terminate\n @@terminated = true\n end", "def delete_network(network)\n attempt = 0\n begin\n network.destroy\n rescue Fog::Compute::RackspaceV2::ServiceError => e\n if attempt == 3\n puts \"Unable to delete #{network.label}\"\n return false\n end\n puts \"Network #{network.label} Delete Fail Attempt #{attempt}- #{e.inspect}\"\n attempt += 1\n sleep 60\n retry\n end\n return true\nend", "def destroy\n @instance = Instance.find(params[:id])\n @instance.destroy\n\n respond_to do |format|\n format.html { redirect_to(instances_url) }\n format.xml { head :ok }\n end\n end", "def openvz_fog_test_cleanup\n at_exit do\n unless Fog.mocking?\n server = openvz_service.servers.find { |s| s.name == '104' }\n if server\n server.wait_for(120) do\n reload rescue nil; ready?\n end\n end\n server.stop\n openvz_fog_test_server_destroy\n end\n end\nend", "def destroy_instance(credentials, id)\n safely do\n terremark_client = new_client(credentials)\n return terremark_client.delete_vapp(id)\n end\nend", "def destroy(_state)\n workflow do\n run_destroy.bind do\n remove_instance_directory\n end\n end\n end", "def kill \n remote_data = @repo.to_s\n deleter = Deleter.new S3_STORE\n deleter.delete remote_data, @threads, &REPORTER\n end" ]
[ "0.7584375", "0.73998404", "0.7372983", "0.7333556", "0.71941835", "0.7008991", "0.6973817", "0.6926044", "0.6887981", "0.68776006", "0.6773368", "0.6773368", "0.67183524", "0.6684249", "0.66786456", "0.66781765", "0.65989", "0.65977275", "0.6567925", "0.65571505", "0.653775", "0.6488547", "0.64112675", "0.63984877", "0.63912857", "0.6338504", "0.6316832", "0.6300064", "0.62901485", "0.6248322", "0.6246128", "0.6245597", "0.62165546", "0.62102866", "0.62077737", "0.61925024", "0.6190337", "0.61891484", "0.6163552", "0.6125314", "0.6099447", "0.60950977", "0.606023", "0.6049959", "0.6043525", "0.6037353", "0.6028745", "0.60272074", "0.6014733", "0.6004409", "0.5998223", "0.59813625", "0.5981182", "0.5962902", "0.596066", "0.59430206", "0.5937449", "0.59303737", "0.5913351", "0.5903019", "0.5886831", "0.58754677", "0.5863511", "0.58435476", "0.5835654", "0.58233416", "0.5812871", "0.5810922", "0.5810482", "0.58069766", "0.580117", "0.5799354", "0.5792425", "0.5791104", "0.57907057", "0.5786203", "0.5779672", "0.5753231", "0.57509387", "0.57493275", "0.57485974", "0.5719765", "0.57184744", "0.5715408", "0.570517", "0.5692408", "0.5690121", "0.56810373", "0.56764996", "0.5676343", "0.56625724", "0.56571287", "0.5647155", "0.56298035", "0.5626192", "0.56217813", "0.56159765", "0.56101", "0.5599598" ]
0.6502307
21
Creates a new EBS volume
def create_disk(size, cloud_properties, instance_id = nil) raise ArgumentError, 'disk size needs to be an integer' unless size.kind_of?(Integer) with_thread_name("create_disk(#{size}, #{instance_id})") do volume_properties = VolumeProperties.new( size: size, type: cloud_properties['type'], iops: cloud_properties['iops'], az: @az_selector.select_availability_zone(instance_id), encrypted: cloud_properties['encrypted'], kms_key_arn: cloud_properties['kms_key_arn'] ) resp = @ec2_client.client.create_volume(volume_properties.persistent_disk_config) volume = AWS::EC2::Volume.new_from(:create_volume, resp, resp.volume_id, config: @ec2_client.config) logger.info("Creating volume '#{volume.id}'") ResourceWait.for_volume(volume: volume, state: :available) volume.id end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create\n\t\tregion = resource[:availability_zone].to_s.gsub(/.$/,'') \n\t\tcompute = Fog::Compute.new(:provider => 'aws', :region => \"#{region}\")\n\t\tprint \"ebsvol[aws]->create: Region is #{region}\\n\" if $debug\n\t\tprint \"ebsvol[aws]->create: Availability_zone is #{resource[:availability_zone]}\\n\" if $debug\n\t\t# create the requested volume\n\t\tresponse = compute.create_volume(resource[:availability_zone],resource[:size],resource[:snapshot])\t\n\t\tif (response.status == 200)\n\t\t\tvolumeid = response.body['volumeId']\n\t\t\tprint \"ebsvol[aws]->create: I created volume #{volumeid}.\\n\" if $debug\n\t\t\t# now tag the volume with volumename so we can identify it by name\n\t\t\t# and not the volumeid\n\t\t\tresponse = compute.create_tags(volumeid,{ :Name => resource[:volume_name] })\n\t\t\tif (response.status == 200)\n\t\t\t\tprint \"ebsvol[aws]->create: I tagged #{volumeid} with Name = #{resource[:volume_name]}\\n\" if $debug\n\t\t\tend\n\t\t\t# Check if I need to attach it to an ec2 instance.\n\t\t\tattachto = resource[:attached_to].to_s\n\t\t\tprint \"attachto is #{attachto}\\n\" if $debug\n\t\t\tif ( attachto != '' )\n\t\t\t\tif ( attachto == 'me')\n\t\t\t\t\tinstance = instanceinfo(compute,myname(compute))\n\t\t\t\telse\n\t\t\t\t\tinstance = instanceinfo(compute,attachto)\n\t\t\t\tend\n\t\t\t\tif ( resource[:device] != nil )\n\t\t\t\t\t# try to attach the volume to requested instance\n\t\t\t\t\tprint \"attach the volume\\n\" if $debug\n\t\t\t\t\tvolume = volinfo(compute,resource[:volume_name])\n\t\t\t\t\tattachvol(compute,volume,instance,resource[:device])\n\t\t\t\telse\n\t\t\t\t\traise \"ebsvol[aws]->create: Sorry, I can't attach a volume with out a device to attach to!\"\n\t\t\t\tend\n\t\t\tend\n\t\telse\n\t\t\traise \"ebsvol[aws]->create: I couldn't create the ebs volume, sorry!\"\n\t\tend\n\tend", "def create_volume(availability_zone, options = {})\n raise ArgumentError.new('You must specify a size if not creating a volume from a snapshot') if options[:snapshot_id].blank? && options[:size].blank?\n\n action = 'CreateVolume'\n params = {\n 'Action' => action,\n 'AvailabilityZone' => availability_zone\n }\n params['Size'] = options[:size] unless options[:size].blank?\n params['SnapshotId'] = options[:snapshot_id] unless options[:snapshot_id].blank?\n\n response = send_query_request(params)\n parser = Awsum::Ec2::VolumeParser.new(self)\n volume = parser.parse(response.body)[0]\n if options[:tags] && options[:tags].size > 0\n create_tags volume.id, options[:tags]\n end\n volume\n end", "def create_storage_volume(create_opts={})\n create_resource :storage_volume, create_opts\n end", "def volume_create(name)\n @log.info \"Creating volume #{name} from offering id #{DISK_OFFERING}...\"\n ret = @cloud_stack.create_volume(name, ZONE, DISK_OFFERING)\n id = ret[\"createvolumeresponse\"][\"jobid\"]\n wait_for_job id\n vol_id = ret[\"createvolumeresponse\"][\"id\"]\n @log.info \"Created volume id: #{vol_id}\"\n vol_id\n end", "def create_volume(options)\n # Creating the volume is part of the server creation\n end", "def create_volume(options)\n # Creating the volume is part of the server creation\n end", "def createvolume\n if not checkRequirements([\"thezone\",\"thevolume\"])\n return false\n end\n checkToken(@thezone)\n req = {}\n req[\"name\"] = \"oe-#{@thevolume.name}\"\n req[\"description\"] = @thevolume.description\n req[\"sizeGb\"] = @thevolume.size\n submit = queryGCE(:path => '/compute/v1beta15/projects/#{@thezone.name}/zones/#{@thevolume.azone.name}/disks', :method => 'post', :options => '', :data => req.to_json, :access_token => @thezone.toekn )\n d = checkQuery(:type => 'zone', :token => @thezone.token, :projectname => @thezone.name, :zonename => @thevolume.azone.name, :operationname => submit[\"name\"])\n data = queryGCE(:path => '/compute/v1beta15/projects/#{@thezone.name}/zones/#{@thevolume.azone.name}/disks/#{req[\"name\"]}', :method => 'get', :options => '', :access_token => @thezone.token) if d\n data ? data[\"name\"] : false\n end", "def create_volume( options = {} )\n options = { :availability_zone => '' }.merge(options)\n raise ArgumentError, \"No :availability_zone provided\" if options[:availability_zone].nil? || options[:availability_zone].empty?\n options = { :size => '' }.merge(options)\n options = { :snapshot_id => '' }.merge(options)\n params = {\n \"AvailabilityZone\" => options[:availability_zone],\n \"Size\" => options[:size],\n \"SnapshotId\" => options[:snapshot_id]\n }\n return response_generator(:action => \"CreateVolume\", :params => params)\n end", "def create_ec2_ebs_volume opts\n Ec2EbsVolume.create opts.merge :account => self\n end", "def create_volume(options={}) \n raise \"Volume nickname required\" unless options[:nickname]\n params = {:nickname => options[:nickname],:size => options[:size], :api_version => 1.0}\n params[:description] = options[:description] if options[:description]\n #STDERR.puts \"HERE IS THE URL: #{@api_url}/create_ebs_volume.js (PARAMS: #{params.inspect})\"\n body = RestClient.post @api_url+\"/create_ebs_volume.js\",params\n json = JSON.load(body)\n STDERR.puts \"CREATED_VOLUME: #{json}\"\n json\n rescue => e\n display_exception(e, \"create_volume: #{options.inspect}\")\n end", "def create_volume(size_or_snapshot_id, options = {})\n options = {:device => '/dev/sdh'}.merge(options)\n if size_or_snapshot_id.is_a?(Numeric)\n volume = @ec2.create_volume availability_zone, :size => size_or_snapshot_id\n else\n volume = @ec2.create_volume availability_zone, :snapshot_id => size_or_snapshot_id\n end\n if options[:tags]\n @ec2.create_tags(volume.id, options[:tags])\n end\n while volume.status != 'available'\n volume.reload\n end\n if options[:device]\n attach volume, options[:device]\n end\n volume\n end", "def create_volume(snapshot_id, size, availability_zone, timeout, volume_type, piops)\n availability_zone ||= instance_availability_zone\n\n # Sanity checks so we don't shoot ourselves.\n raise \"Invalid volume type: #{volume_type}\" unless ['standard', 'gp2', 'io1'].include?(volume_type)\n\n # PIOPs requested. Must specify an iops param and probably won't be \"low\".\n if volume_type == 'io1'\n raise 'IOPS value not specified.' unless piops >= 100\n end\n\n # Shouldn't see non-zero piops param without appropriate type.\n if piops > 0\n raise 'IOPS param without piops volume type.' unless volume_type == 'io1'\n end\n\n create_volume_opts = { :volume_type => volume_type }\n # TODO: this may have to be casted to a string. rightaws vs aws doc discrepancy.\n create_volume_opts[:iops] = piops if volume_type == 'io1'\n\n nv = ec2.create_volume(snapshot_id, size, availability_zone, create_volume_opts)\n Chef::Log.debug(\"Created new volume #{nv[:aws_id]}#{snapshot_id ? \" based on #{snapshot_id}\" : \"\"}\")\n\n # block until created\n begin\n Timeout::timeout(timeout) do\n while true\n vol = volume_by_id(nv[:aws_id])\n if vol && vol[:aws_status] != \"deleting\"\n if [\"in-use\", \"available\"].include?(vol[:aws_status])\n Chef::Log.info(\"Volume #{nv[:aws_id]} is available\")\n break\n else\n Chef::Log.debug(\"Volume is #{vol[:aws_status]}\")\n end\n sleep 3\n else\n raise \"Volume #{nv[:aws_id]} no longer exists\"\n end\n end\n end\n rescue Timeout::Error\n raise \"Timed out waiting for volume creation after #{timeout} seconds\"\n end\n\n nv[:aws_id]\n end", "def create_volume(options = {})\n options[:capacity] = options[:capacity] * GB if options[:capacity] < 100000\n vol = pool.create_volume_xml(Volume.to_xml(options))\n Volume.new vol, self\n end", "def create\n properties = [ resource[:name],\n resource[:user],\n resource[:group],\n resource[:config],\n resource[:mode],\n ]\n\n qmgmt(['volume', 'create'] + properties)\n end", "def createVolume\n require 'rest_client'\n require 'uri'\n\n if @role.nil? and !current_actor.superadmin\n json_response({ message: \"You don't have permission to view the clusters in this project\" }, :unauthorized)\n return\n end\n\n # Service name in the query\n volumeName = params[\"volume_name\"]\n\n # Env variables for Manager host and port\n serviceManagerHost = Settings.service_manager_host\n serviceManagerPort = Settings.service_manager_port.to_s\n serviceManagerURI = 'http://' + serviceManagerHost + ':' + serviceManagerPort + '/v1/volume'\n\n # Create request for Service Manager\n stack = {\n 'name' => volumeName,\n 'engine-url' => @cluster.endpoint,\n 'ca-cert' => @cluster.ca,\n 'cert' => @cluster.cert,\n 'cert-key' => @cluster.key\n }.to_json\n\n begin\n response = RestClient.post(\n serviceManagerURI,\n stack,\n 'Accept' => 'application/json',\n 'Content-Type' => 'application/json'\n )\n puts \"Deploy Response: \" + response\n json_response(response, :created)\n rescue Exception => e\n # If error, respond with it\n puts e\n json_response({message: e}, :unprocessable_entity)\n end\n end", "def create_ebs_stripe(nickname, new_volume_size_in_gb, options = {})\n self.execute_terminate_volumes if options[:force]\n devices = @disk.generate_physical_device_names(options[:stripe])\n each_volume_size = (new_volume_size_in_gb / options[:stripe].to_f).ceil\n devices.each do |d| \n vol = self.create_volume(:nickname => \"#{nickname}-#{d}\", \n :description => \"Created by RS tools to initialize new EBS stripe volume\",\n :size => each_volume_size)\n raise vol unless vol['aws_id']\n \"Attaching new EBS volume: #{vol['aws_id']}\"\n att = self.attach_volume(vol['aws_id'], d)\n end\n devices.each {|d| self.wait_for_attachment(d) }\n\n @disk.initialize_stripe(devices)\n end", "def addVolume(dev, size, type: \"gp2\", delete_on_termination: false)\n\n if setDeleteOntermination(dev, delete_on_termination)\n MU.log \"A volume #{dev} already attached to #{self}, skipping\", MU::NOTICE\n return\n end\n\n MU.log \"Creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n creation = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).create_volume(\n availability_zone: cloud_desc.placement.availability_zone,\n size: size,\n volume_type: type\n )\n\n MU.retrier(wait: 3, loop_if: Proc.new {\n creation = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).describe_volumes(volume_ids: [creation.volume_id]).volumes.first\n if ![\"creating\", \"available\"].include?(creation.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n creation.state != \"available\"\n })\n\n\n if @deploy\n MU::Cloud::AWS.createStandardTags(\n creation.volume_id,\n region: @region,\n credentials: @credentials,\n optional: @config['optional_tags'],\n nametag: @mu_name+\"-\"+dev.upcase,\n othertags: @config['tags']\n )\n end\n\n MU.log \"Attaching #{creation.volume_id} as #{dev} to #{@cloud_id} in #{@region} (credentials #{@credentials})\"\n attachment = nil\n MU.retrier([Aws::EC2::Errors::IncorrectState], wait: 15, max: 4) {\n attachment = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).attach_volume(\n device: dev,\n instance_id: @cloud_id,\n volume_id: creation.volume_id\n )\n }\n\n begin\n att_resp = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).describe_volumes(volume_ids: [attachment.volume_id])\n if att_resp and att_resp.volumes and !att_resp.volumes.empty? and\n att_resp.volumes.first.attachments and\n !att_resp.volumes.first.attachments.empty?\n attachment = att_resp.volumes.first.attachments.first\n if !attachment.nil? and ![\"attaching\", \"attached\"].include?(attachment.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n end\n end while attachment.nil? or attachment.state != \"attached\"\n\n # Set delete_on_termination, which for some reason is an instance\n # attribute and not on the attachment\n setDeleteOntermination(dev, delete_on_termination)\n end", "def create_volume(volume_name, config:, **kwargs)\n object = { name: volume_name }.merge(config).merge(kwargs)\n log.info \"Creating volume: #{object}\"\n volumes << object\n end", "def addVolume(dev, size, type: \"gp2\")\n if @cloud_id.nil? or @cloud_id.empty?\n MU.log \"#{self} didn't have a cloud id, couldn't determine 'active?' status\", MU::ERR\n return true\n end\n az = nil\n MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_instances(\n instance_ids: [@cloud_id]\n ).reservations.each { |resp|\n if !resp.nil? and !resp.instances.nil?\n resp.instances.each { |instance|\n az = instance.placement.availability_zone\n instance.block_device_mappings.each { |vol|\n if vol.device_name == dev\n MU.log \"A volume #{dev} already attached to #{self}, skipping\", MU::NOTICE\n return\n end\n }\n }\n end\n }\n MU.log \"Creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n creation = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).create_volume(\n availability_zone: az,\n size: size,\n volume_type: type\n )\n begin\n sleep 3\n creation = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_volumes(volume_ids: [creation.volume_id]).volumes.first\n if ![\"creating\", \"available\"].include?(creation.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n end while creation.state != \"available\"\n\n if @deploy\n MU::MommaCat.listStandardTags.each_pair { |key, value|\n MU::MommaCat.createTag(creation.volume_id, key, value, region: @config['region'], credentials: @config['credentials'])\n }\n MU::MommaCat.createTag(creation.volume_id, \"Name\", \"#{MU.deploy_id}-#{@config[\"name\"].upcase}-#{dev.upcase}\", region: @config['region'], credentials: @config['credentials'])\n end\n\n attachment = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).attach_volume(\n device: dev,\n instance_id: @cloud_id,\n volume_id: creation.volume_id\n )\n\n begin\n sleep 3\n attachment = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_volumes(volume_ids: [attachment.volume_id]).volumes.first.attachments.first\n if ![\"attaching\", \"attached\"].include?(attachment.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n end while attachment.state != \"attached\"\n end", "def create_default_volume()\n # Create a default application_volume using the volume attributes from the cookbook\n create_node_volume(:application_volume)\n end", "def attach_blank_volume opts\n device = opts.delete :device\n opts = {:ec2_availability_zone => ec2_availability_zone }.merge opts\n volume = account.create_ec2_ebs_volume opts\n attach_volume volume, device\n end", "def create body = {}\n @connection.request(method: :post, path: \"/volumes/create\", headers: {\"Content-Type\": \"application/json\"}, body: body.to_json)\n end", "def create_disk(size, cloud_properties, server_id = nil)\n volume_service_client = @openstack.volume\n with_thread_name(\"create_disk(#{size}, #{cloud_properties}, #{server_id})\") do\n raise ArgumentError, 'Disk size needs to be an integer' unless size.is_a?(Integer)\n cloud_error('Minimum disk size is 1 GiB') if size < 1024\n\n unique_name = generate_unique_name\n volume_params = {\n # cinder v1 requires display_ prefix\n display_name: \"volume-#{unique_name}\",\n display_description: '',\n # cinder v2 does not require prefix\n name: \"volume-#{unique_name}\",\n description: '',\n size: mib_to_gib(size),\n }\n\n if cloud_properties.key?('type')\n volume_params[:volume_type] = cloud_properties['type']\n elsif !@default_volume_type.nil?\n volume_params[:volume_type] = @default_volume_type\n end\n\n if server_id && @az_provider.constrain_to_server_availability_zone?\n server = @openstack.with_openstack { @openstack.compute.servers.get(server_id) }\n volume_params[:availability_zone] = server.availability_zone if server&.availability_zone\n end\n\n @logger.info('Creating new volume...')\n new_volume = @openstack.with_openstack { volume_service_client.volumes.create(volume_params) }\n\n @logger.info(\"Creating new volume `#{new_volume.id}'...\")\n @openstack.wait_resource(new_volume, :available)\n\n new_volume.id.to_s\n end\n end", "def create_disk(size, cloud_properties, instance_id = nil)\n raise ArgumentError, 'disk size needs to be an integer' unless size.kind_of?(Integer)\n with_thread_name(\"create_disk(#{size}, #{instance_id})\") do\n volume_properties = VolumeProperties.new(\n size: size,\n type: cloud_properties['type'],\n iops: cloud_properties['iops'],\n az: @az_selector.select_availability_zone(instance_id),\n encrypted: cloud_properties['encrypted'],\n kms_key_arn: cloud_properties['kms_key_arn']\n )\n\n volume_resp = @ec2_client.create_volume(volume_properties.persistent_disk_config)\n volume = Aws::EC2::Volume.new(\n id: volume_resp.volume_id,\n client: @ec2_client,\n )\n\n logger.info(\"Creating volume '#{volume.id}'\")\n ResourceWait.for_volume(volume: volume, state: 'available')\n\n volume.id\n end\n end", "def create\n @volume = Volume.new(params[:volume])\n\n respond_to do |format|\n if @volume.save\n format.html { redirect_to(@volume, :notice => 'Volume was successfully created.') }\n format.xml { render :xml => @volume, :status => :created, :location => @volume }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @volume.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @volume = Volume.new(volume_params)\n\n respond_to do |format|\n if @volume.save\n format.html { redirect_to @volume, notice: 'Volume was successfully created.' }\n format.json { render :show, status: :created, location: @volume }\n else\n format.html { render :new }\n format.json { render json: @volume.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @volume = Volume.new(volume_params)\n\n respond_to do |format|\n if @volume.save\n format.html { redirect_to @volume, notice: 'Volume was successfully created.' }\n format.json { render action: 'show', status: :created, location: @volume }\n else\n format.html { render action: 'new' }\n format.json { render json: @volume.errors, status: :unprocessable_entity }\n end\n end\n end", "def provision_storage host, vm\n if host['volumes']\n # Lazily create the volume client if needed\n volume_client_create\n host['volumes'].keys.each_with_index do |volume, index|\n @logger.debug \"Creating volume #{volume} for OpenStack host #{host.name}\"\n\n # The node defintion file defines volume sizes in MB (due to precedent\n # with the vagrant virtualbox implementation) however OpenStack requires\n # this translating into GB\n openstack_size = host['volumes'][volume]['size'].to_i / 1000\n\n # Create the volume and wait for it to become available\n vol = @volume_client.volumes.create(\n :size => openstack_size,\n :display_name => volume,\n :description => \"Beaker volume: host=#{host.name} volume=#{volume}\",\n )\n vol.wait_for { ready? }\n\n # Fog needs a device name to attach as, so invent one. The guest\n # doesn't pay any attention to this\n device = \"/dev/vd#{('b'.ord + index).chr}\"\n vm.attach_volume(vol.id, device)\n end\n end\n end", "def volume_client_create\n options = {\n :provider => :openstack,\n :openstack_api_key => @options[:openstack_api_key],\n :openstack_username => @options[:openstack_username],\n :openstack_auth_url => @options[:openstack_auth_url],\n :openstack_tenant => @options[:openstack_tenant],\n :openstack_region => @options[:openstack_region],\n }\n @volume_client ||= Fog::Volume.new(options)\n unless @volume_client\n raise \"Unable to create OpenStack Volume instance\"\\\n \" (api_key: #{@options[:openstack_api_key]},\"\\\n \" username: #{@options[:openstack_username]},\"\\\n \" auth_url: #{@options[:openstack_auth_url]},\"\\\n \" tenant: #{@options[:openstack_tenant]})\"\n end\n end", "def create_disk(size)\n @logger.info(\"create_disk(#{size})\")\n disk_name = \"bosh-disk-#{SecureRandom.uuid}\"\n logger.info(\"Start to create an empty vhd blob: blob_name: #{disk_name}.vhd\")\n @blob_manager.create_empty_vhd_blob(container_name, \"#{disk_name}.vhd\", size)\n disk_name\n end", "def addVolume(dev, size, type: \"pd-standard\", delete_on_termination: false)\n devname = dev.gsub(/.*?\\/([^\\/]+)$/, '\\1')\n resname = MU::Cloud::Google.nameStr(@mu_name+\"-\"+devname)\n MU.log \"Creating disk #{resname}\"\n\n description = @deploy ? @deploy.deploy_id : @mu_name+\"-\"+devname\n\n newdiskobj = MU::Cloud::Google.compute(:Disk).new(\n size_gb: size,\n description: description,\n zone: @config['availability_zone'],\n# type: \"projects/#{config['project']}/zones/#{config['availability_zone']}/diskTypes/pd-ssd\",\n type: \"projects/#{@project_id}/zones/#{@config['availability_zone']}/diskTypes/#{type}\",\n# Other values include pd-ssd and local-ssd\n name: resname\n )\n\n begin\n newdisk = MU::Cloud::Google.compute(credentials: @config['credentials']).insert_disk(\n @project_id,\n @config['availability_zone'],\n newdiskobj\n )\n rescue ::Google::Apis::ClientError => e\n if e.message.match(/^alreadyExists: /)\n MU.log \"Disk #{resname} already exists, ignoring request to create\", MU::WARN\n return\n else\n raise e\n end\n end\n\n attachobj = MU::Cloud::Google.compute(:AttachedDisk).new(\n device_name: devname,\n source: newdisk.self_link,\n type: \"PERSISTENT\",\n auto_delete: delete_on_termination\n )\n\n MU.log \"Attaching disk #{resname} to #{@cloud_id} at #{devname}\"\n MU::Cloud::Google.compute(credentials: @config['credentials']).attach_disk(\n @project_id,\n @config['availability_zone'],\n @cloud_id,\n attachobj\n )\n\n end", "def create_vbox_hdd(client_name,vbox_disk_name)\n message = \"Creating:\\tVM hard disk for \"+client_name\n command = \"VBoxManage createhd --filename \\\"#{vbox_disk_name}\\\" --size \\\"#{$default_vm_size}\\\"\"\n execute_command(message,command)\n return\nend", "def create_volume request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_create_volume_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Longrunning::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def create_volumes(volume_definitions)\n volume_definitions.each { |volume| client.volumes.create(volume) }\n end", "def create_lvm_volumes( opts = {} )\n opts = deep_merge_hashes( @aws_default_instance_options, opts )\n unless exist?( opts[ :lvm_volumes ].first[1] )\n create_lvm_volumes!( opts )\n end\n end", "def createEBSSnapshot(client=nil,description='',volume_id=nil)\n return false if volume_id.nil? || client.nil?\n # Fetch the Volume Name. This will be used in the description of the snapshot\n resp = client.describe_volumes({dry_run: false, volume_ids: [volume_id] })\n resp.volumes[0].tags.each do |t|\n if t.key=='Name'\n description = t.value unless t.value.empty?\n break\n end\n end\n # puts \"Taking snapshot of volume #{volume_id}...\"\n return client.create_snapshot({\n dry_run: false,\n volume_id: volume_id,\n description: description\n })\nend", "def attach_volume( options = {} )\n options = { :volume_id => '' }.merge(options)\n options = { :instance_id => '' }.merge(options)\n options = { :device => '' }.merge(options)\n raise ArgumentError, \"No :volume_id provided\" if options[:volume_id].nil? || options[:volume_id].empty?\n raise ArgumentError, \"No :instance_id provided\" if options[:instance_id].nil? || options[:instance_id].empty?\n raise ArgumentError, \"No :device provided\" if options[:device].nil? || options[:device].empty?\n\n params = {\n \"VolumeId\" => options[:volume_id],\n \"InstanceId\" => options[:instance_id],\n \"Device\" => options[:device]\n }\n return response_generator(:action => \"AttachVolume\", :params => params)\n end", "def create\n tmp = Puppet::FileSystem::Uniquefile.new('quobyte_volume_config')\n tmp.write(resource[:content])\n tmp.flush()\n\n qmgmt(['volume', 'config', 'import', [resource[:name]], tmp.path])\n end", "def attachvol(compute,volume,instance,device)\n\t\tprint \"Running attachvol\\n\" if $debug\n\t\traise ArgumentError \"ebsvol[aws]->attachvol: Sorry, you must specify a valid device matching /dev/sd[a-m].\" if (device !~ /^\\/dev\\/sd[a-m]/)\n\t\tif (volume['status'] != \"in-use\" )\n\t\t\t# check instance is in the same availability zone\n\t\t\tif ( volume['availabilityZone'] != instance['placement']['availabilityZone'])\n\t\t\t\traise \"ebsvol[aws]->attachvol: Sorry, volumes must be in the same availability zone as the instance to be attached to.\\nThe volume #{volume['tagSet']['Name']} is in availability zone #{volume['availabilityZone']} and the instance is in #{instance['placement']['availabilityZone']}\" \n\t\t\telse\n\t\t\t\t# check that the device is available\n\t\t\t\tinuse = false\n\t\t\t\tinstance['blockDeviceMapping'].each { |x| inuse=true if x['deviceName'] == device }\n\t\t\t\tif ( inuse )\n\t\t\t\t\traise \"ebsvol[aws]->attachvol: Sorry, the device #{device} is already in use on #{instance['tagSet']['Name']}\" \n\t\t\t\telse\n\t\t\t\t\tresp = compute.attach_volume(instance['instanceId'],volume['volumeId'],device)\n\t\t\t\t\tif (resp.status == 200)\n\t\t\t\t\t\t# now wait for it to attach!\n\t\t\t\t\t\tcheck = volinfo(compute,volume['tagSet']['Name'])\n\t\t\t\t\t\twhile ( check['status'] !~ /(attached|in-use)/ ) do\n\t\t\t\t\t\t\tprint \"ebsvol[aws]->attachvol: status is #{check['status']}\\n\" if $debug\n\t\t\t\t\t\t\tsleep 5\n\t\t\t\t\t\t\tcheck = volinfo(compute,volume['tagSet']['Name'])\n\t\t\t\t\t\tend\n\t\t\t\t\t\tsleep 5 # allow aws to propigate the fact\n\t\t\t\t\t\tprint \"ebsvol[aws]->attachvol: volume is now attached\\n\" if $debug\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\telse\n\t\t\traise \"ebsvol[aws]->attachvol: Sorry, I could not attach #{volume['volumeId']} because it is in use!\"\n\t\tend\n\tend", "def create_vbd(vm_ref, vdi_ref, position, boot = true)\n vbd_record = {\n 'VM' => vm_ref,\n 'VDI' => vdi_ref,\n 'empty' => false,\n 'other_config' => { 'owner' => '' },\n 'userdevice' => position.to_s,\n 'bootable' => boot,\n 'mode' => 'RW',\n 'qos_algorithm_type' => '',\n 'qos_algorithm_params' => {},\n 'qos_supported_algorithms' => [],\n 'type' => 'Disk'\n }\n\n task = xapi.Async.VBD.create(vbd_record)\n ui.msg 'Waiting for VBD create'\n vbd_ref = get_task_ref(task)\n vbd_ref\n end", "def create_disk(size, cloud_properties, instance_id = nil)\n raise ArgumentError, 'disk size needs to be an integer' unless size.is_a?(Integer)\n\n with_thread_name(\"create_disk(#{size}, #{instance_id})\") do\n props = @props_factory.disk_props(cloud_properties)\n\n volume_properties = VolumeProperties.new(\n size: size,\n type: props.type,\n iops: props.iops,\n throughput: props.throughput,\n az: @az_selector.select_availability_zone(instance_id),\n encrypted: props.encrypted,\n kms_key_arn: props.kms_key_arn\n )\n volume = @volume_manager.create_ebs_volume(**volume_properties.persistent_disk_config)\n\n volume.id\n end\n end", "def create_vdi(name, sr_ref, size)\n vdi_record = {\n \"name_label\" => \"#{name}\",\n \"name_description\" => \"Root disk for #{name} created by knfie xapi\",\n \"SR\" => sr_ref,\n \"virtual_size\" => input_to_bytes(size).to_s,\n \"type\" => \"system\",\n \"sharable\" => false,\n \"read_only\" => false,\n \"other_config\" => {},\n }\n \n # Async create the VDI\n task = xapi.Async.VDI.create(vdi_record)\n ui.msg \"waiting for VDI Create\"\n vdi_ref = get_task_ref(task)\n end", "def provision_and_mount_volume(server, disk_size, device)\n unless provider.find_server_device(server, device)\n say \"Provisioning #{disk_size}Gb persistent disk for inception VM...\"\n provider.create_and_attach_volume(\"Inception Disk\", disk_size, server, device)\n end\n\n # Format and mount the volume\n if aws?\n say \"Skipping volume mounting on AWS 12.10 inception VM until its fixed\", [:yellow, :bold]\n run_ssh_command_until_successful server, \"sudo mkdir -p /var/vcap/store\"\n else\n say \"Mounting persistent disk as volume on inception VM...\"\n run_ssh_command_until_successful server, \"sudo mkfs.ext4 #{device} -F\"\n run_ssh_command_until_successful server, \"sudo mkdir -p /var/vcap/store\"\n run_ssh_command_until_successful server, \"sudo mount #{device} /var/vcap/store\"\n end\n end", "def attach_volumes!(server, volumes_count, size)\n #create a new block storage connection obj\n volume_service = Fog::Volume::OpenStack.new(\n :openstack_api_key => @os_password,\n :openstack_username => @os_username,\n :openstack_auth_url => @os_auth_url,\n :openstack_tenant => @os_tenant,\n )\n base = 'sdd'\n volumes_count.times do |i|\n base = base.next!\n #create a new volume\n vol = volume_service.volumes.create(\n :size => size,\n :display_name => \"#{server.name}-#{i}\",\n :description => \"Volume attached to #{server.name} - managed by ankus\"\n )\n vol.reload\n vol.wait_for { status == 'available' }\n server.attach_volume(vol.id, \"/dev/#{base}\")\n vol.wait_for { status == 'in-use' }\n end\n end", "def create\n @sm_volume = SmVolume.new(params[:sm_volume])\n\n respond_to do |format|\n if @sm_volume.save\n format.html { redirect_to @sm_volume, notice: 'Sm volume was successfully created.' }\n format.json { render json: @sm_volume, status: :created, location: @sm_volume }\n else\n format.html { render action: \"new\" }\n format.json { render json: @sm_volume.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_vdi(name, sr_ref, size)\n vdi_record = {\n 'name_label' => \"#{name}\",\n 'name_description' => \"Root disk for #{name} created by #{ENV['USER']} with knfie xapi\",\n 'SR' => sr_ref,\n 'virtual_size' => input_to_bytes(size).to_s,\n 'type' => 'system',\n 'sharable' => false,\n 'read_only' => false,\n 'other_config' => {}\n }\n\n # Async create the VDI\n task = xapi.Async.VDI.create(vdi_record)\n ui.msg 'waiting for VDI Create'\n vdi_ref = get_task_ref(task)\n vdi_ref\n end", "def create_volume(id, volume, opts = {})\n data, _status_code, _headers = create_volume_with_http_info(id, volume, opts)\n data\n end", "def create_vbd(vm_ref, vdi_ref, position)\n vbd_record = {\n \"VM\" => vm_ref,\n \"VDI\" => vdi_ref,\n \"empty\" => false,\n \"other_config\" => {\"owner\"=>\"\"},\n \"userdevice\" => position.to_s,\n \"bootable\" => true,\n \"mode\" => \"RW\",\n \"qos_algorithm_type\" => \"\",\n \"qos_algorithm_params\" => {},\n \"qos_supported_algorithms\" => [],\n \"type\" => \"Disk\"\n }\n\n task = xapi.Async.VBD.create(vbd_record)\n ui.msg \"Waiting for VBD create\"\n vbd_ref = get_task_ref(task) \n end", "def create\n @actual_volume = ActualVolume.new(actual_volume_params)\n\n respond_to do |format|\n if @actual_volume.save\n format.html { redirect_to @actual_volume, notice: 'Actual volume was successfully created.' }\n format.json { render :show, status: :created, location: @actual_volume }\n else\n format.html { render :new }\n format.json { render json: @actual_volume.errors, status: :unprocessable_entity }\n end\n end\n end", "def volume_create\n help = [\n '',\n \"Use: #{me} volume create --name=VOLUME --git=URL\",\n \"Use: #{me} volume create --name=VOLUME --dir=PATH\",\n '',\n 'Creates a volume named VOLUME from a git clone of URL',\n 'Creates a volume named VOLUME from a copy of PATH'\n ]\n # asked for help?\n if [nil,'help','--help'].include? ARGV[2]\n show help\n exit failed\n end\n # unknown arguments?\n knowns = ['name','git','dir']\n unknown = ARGV[2..-1].select do |argv|\n knowns.none? { |known| argv.start_with?('--' + known + '=') }\n end\n if unknown != []\n show help\n unknown.each { |arg| puts \"FAILED: unknown argument [#{arg.split('=')[0]}]\" }\n exit failed\n end\n # required known arguments\n args = ARGV[2..-1]\n vol = get_arg('--name', args)\n url = get_arg('--git', args)\n dir = get_arg('--dir', args)\n if vol.nil? || (url.nil? && dir.nil?)\n show help\n exit failed\n end\n if vol.length == 1\n msg = 'volume names must be at least two characters long. See https://github.com/docker/docker/issues/20122'\n puts \"FAILED: [volume create --name=#{vol}] #{msg}\"\n exit failed\n end\n if volume_exists? vol\n msg = \"#{vol} already exists\"\n puts \"FAILED: [volume create --name=#{vol}] #{msg}\"\n exit failed\n end\n # cyber-dojo.sh does actual [volume create]\nend", "def add_volume(bucket,mount,options=nil)\n s3fs_volumes << { :bucket => bucket, :mount => mount, :options => options }\n end", "def create\n @volume_type = VolumeType.new(params[:volume_type])\n\n respond_to do |format|\n if @volume_type.save\n format.html { redirect_to @volume_type, notice: 'Volume type was successfully created.' }\n format.json { render json: @volume_type, status: :created, location: @volume_type }\n else\n format.html { render action: \"new\" }\n format.json { render json: @volume_type.errors, status: :unprocessable_entity }\n end\n end\n end", "def add_hdd_to_vbox_vm(client_name,vbox_disk_name)\n message = \"Attaching:\\tStorage to VM \"+client_name\n command = \"VBoxManage storageattach \\\"#{client_name}\\\" --storagectl \\\"#{$vbox_disk_type}\\\" --port 0 --device 0 --type hdd --medium \\\"#{vbox_disk_name}\\\"\"\n execute_command(message,command)\n return\nend", "def create\n @vdisk = Vdisk.new(params[:vdisk])\n\n respond_to do |format|\n if @vdisk.save\n format.html { redirect_to @vdisk, notice: 'Vdisk was successfully created.' }\n format.json { render json: @vdisk, status: :created, location: @vdisk }\n else\n format.html { render action: \"new\" }\n format.json { render json: @vdisk.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_disk(size, cloud_properties, vm_id = nil)\n with_thread_name(\"create_disk(#{size})\") do\n begin\n @logger.debug(\"Persistent Disk Size: #{size}\")\n @logger.debug(\"Cloud Properties: #{cloud_properties}\")\n @logger.debug(\"VM Id: #{vm_id}\")\n # Form a name for the volume group\n vol_group_name = \"bosh-peristent-disk-#{vm_id}-#{rand(1000)}\"\n # Create the volume group\n volume_uuid = @vol_group_manager.create_volume_group(vol_group_name)\n @logger.info(\"New volume group created [#{vol_group_name}]\")\n # Create a volume disk\n @vol_group_manager.create_volume_disk(volume_uuid, size,\n @container_uuid)\n @logger.info(\"New volume disk created on volume #{vol_group_name}.\")\n # Return volume group's uuid\n volume_uuid\n rescue => e\n logger.error(e)\n cloud_error(e.message)\n end\n end\n end", "def create(size)\n disk_id = uuid\n sh \"zfs create -o reservation=1024 -o quota=1024 #{base}/#{disk_id}\"\n disk_id\n end", "def create\n begin\n # Set the partition (/dev/sdb1), device (/dev/sdb) and alignment (optimal,minimal,none etc.) variables\n partition= resource[:name]\n device=partition[0,(partition.length-1)]\n alignment= resource[:alignment]\n\n # Now we can create the partition\n partitions = parted('-a', resource[:alignment],'--script',device,'mklabel',resource[:part_label],'mkpart', resource[:part_type],resource[:fs_type],resource[:p_begin],resource[:p_end])\n rescue Puppet::ExecutionFailure => e\n false\n end\n end", "def create_snapshot(options)\n snapshot = ec2.snapshots.new\n snapshot.volume_id = options['volume_id']\n snapshot.description = options['description']\n\n attempts = 0\n\n begin\n snapshot.save\n snapshot.reload\n rescue Fog::Compute::AWS::Error\n sleep 5\n attempts += 1\n if attempts == 5\n log \"Error communicating with API; Unable to save volume `#{options['volume_id']}` (Desc: #{options['description']})\"\n end\n return unless attempts == 5\n end\n\n options['tags'].each do |k,v|\n begin\n ec2.tags.create({resource_id: snapshot.id, key: k, value: v})\n rescue Errno::EINPROGRESS , Errno::EISCONN\n log \"API Connection Error\"\n sleep 1\n retry\n rescue Fog::Compute::AWS::Error\n log \"Failed attaching tag `'#{k}' => #{v}` to #{options['snapshot_type']} snapshot #{snapshot.id}\"\n sleep 1\n retry\n end\n end\n\n end", "def create_snapshot(volume_id, options = {})\n action = 'CreateSnapshot'\n params = {\n 'Action' => action,\n 'VolumeId' => volume_id\n }\n params['Description'] = options[:description] unless options[:description].blank?\n\n response = send_query_request(params)\n parser = Awsum::Ec2::SnapshotParser.new(self)\n snapshot = parser.parse(response.body)[0]\n if options[:tags] && options[:tags].size > 0\n create_tags snapshot.id, options[:tags]\n end\n snapshot\n end", "def mount_kvm_volume(name)\n dev = available_dev\n enable_netblockdev(name, dev)\n vol_grp = lvm_volume_group(\n lvm_partition(dev)\n )\n root = lvm_root(vol_grp)\n lvm_enable(vol_grp) unless lvm_enabled?(root)\n mount(name, root)\n dev\nend", "def create\n @admissive_volume = AdmissiveVolume.new(params[:admissive_volume])\n\n respond_to do |format|\n if @admissive_volume.save\n format.html { redirect_to @admissive_volume, notice: 'Admissive volume was successfully created.' }\n format.json { render json: @admissive_volume, status: :created, location: @admissive_volume }\n else\n format.html { render action: \"new\" }\n format.json { render json: @admissive_volume.errors, status: :unprocessable_entity }\n end\n end\n end", "def ebs(device_name, type: 'gp2', size: 8)\n {\n device_name: device_name,\n ebs: {\n delete_on_termination: true,\n volume_size: size,\n volume_type: type\n },\n no_device: ''\n }\nend", "def attach_volume volume, device\n if running?\n post '/attach_volume', :query => {\n :server => {\n :ec2_ebs_volume_href => volume.uri,\n :device => device\n }\n }\n else\n volume.attach_to_server self, device, 'boot'\n end\n end", "def volume(volume_name, attrs={}, &block)\n volumes[volume_name] ||= Ironfan::Volume.new(:parent => self, :name => volume_name)\n volumes[volume_name].configure(attrs, &block)\n volumes[volume_name]\n end", "def add_cdrom_to_vbox_vm(client_name)\n message = \"Attaching:\\tCDROM to VM \"+client_name\n command = \"VBoxManage storagectl \\\"#{client_name}\\\" --name \\\"cdrom\\\" --add \\\"sata\\\" --controller \\\"IntelAHCI\\\"\"\n execute_command(message,command)\n if File.exist?($vbox_additions_iso)\n message = \"Attaching:\\tISO \"+$vbox_additions_iso+\" to VM \"+client_name\n command = \"VBoxManage storageattach \\\"#{client_name}\\\" --storagectl \\\"cdrom\\\" --port 0 --device 0 --type dvddrive --medium \\\"#{$vbox_additions_iso}\\\"\"\n execute_command(message,command)\n end\n return\nend", "def create_disk(disk_id, size)\n @logger.info(\"create_disk(#{disk_id}, #{size})\")\n storage_account_name = disk_id.storage_account_name\n disk_name = disk_id.disk_name\n @logger.info(\"Start to create an empty vhd blob: blob_name: #{disk_name}.vhd\")\n @blob_manager.create_empty_vhd_blob(storage_account_name, DISK_CONTAINER, \"#{disk_name}.vhd\", size)\n end", "def create_stripe_volume(server)\n options = { \"EBS_MOUNT_POINT\" => \"text:#{@mount_point}\",\n \"EBS_STRIPE_COUNT\" => \"text:#{@stripe_count}\",\n \"EBS_TOTAL_VOLUME_GROUP_SIZE\" => \"text:#{@volume_size}\",\n \"EBS_LINEAGE\" => \"text:#{@lineage}\" }\n audit = server.run_executable(@scripts_to_run['create_stripe'], options)\n audit.wait_for_completed\n end", "def attach_volumes(node, disk_sizes)\n if $provider == :virtualbox\n node.vm.provider :virtualbox do |v, override|\n disk_num = 0\n disk_sizes.each do |disk_size|\n disk_num += 1\n diskname = File.join(File.dirname(File.expand_path(__FILE__)), \".virtualbox\", \"#{node.vm.hostname}-#{disk_num}.vdi\")\n unless File.exist?(diskname)\n v.customize ['createhd', '--filename', diskname, '--size', disk_size * 1024]\n end\n v.customize ['storageattach', :id, '--storagectl', 'SATA Controller', '--port', disk_num, '--device', 0, '--type', 'hdd', '--medium', diskname]\n end\n end\n end\n\n if $provider == :vmware_fusion\n node.vm.provider :vmware_fusion do |v, override|\n vdiskmanager = '/Applications/VMware\\ Fusion.app/Contents/Library/vmware-vdiskmanager'\n unless File.exist?(vdiskmanager)\n dir = File.join(File.dirname(File.expand_path(__FILE__)), \".vmware\")\n unless File.directory?( dir )\n Dir.mkdir dir\n end\n\n disk_num = 0\n disk_sizes.each do |disk_size|\n disk_num += 1\n diskname = File.join(dir, \"#{node.vm.hostname}-#{disk_num}.vmdk\")\n unless File.exist?(diskname)\n `#{vdiskmanager} -c -s #{disk_size}GB -a lsilogic -t 1 #{diskname}`\n end\n\n v.vmx[\"scsi0:#{disk_num}.filename\"] = diskname\n v.vmx[\"scsi0:#{disk_num}.present\"] = 'TRUE'\n v.vmx[\"scsi0:#{disk_num}.redo\"] = ''\n end\n end\n end\n end\n\n if $provider == :parallels\n node.vm.provider :parallels do |v, override|\n disk_sizes.each do |disk_size|\n v.customize ['set', :id, '--device-add', 'hdd', '--size', disk_size * 1024]\n end\n end\n end\n\nend", "def create_volume_with_http_info(id, volume, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: VolumesApi.create_volume ...'\n end\n # verify the required parameter 'id' is set\n if @api_client.config.client_side_validation && id.nil?\n fail ArgumentError, \"Missing the required parameter 'id' when calling VolumesApi.create_volume\"\n end\n # verify the required parameter 'volume' is set\n if @api_client.config.client_side_validation && volume.nil?\n fail ArgumentError, \"Missing the required parameter 'volume' when calling VolumesApi.create_volume\"\n end\n # resource path\n local_var_path = '/projects/{id}/storage'.sub('{' + 'id' + '}', CGI.escape(id.to_s))\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:debug_body] || @api_client.object_to_http_body(volume)\n\n # return_type\n return_type = opts[:debug_return_type] || 'Volume'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['x_auth_token']\n\n new_options = opts.merge(\n :operation => :\"VolumesApi.create_volume\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VolumesApi#create_volume\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def clone_volume(source, target)\n debug(\"Creating Libvirt volume #{target}\")\n debug(\"Cloning volume from #{source}\")\n\n # Attempt to locate the target or source volume\n source_image = client.volumes.get(source)\n if source_image.name =~ /^fog-\\d+/\n error(\"Could not find target image: #{source}.\")\n end\n\n # Clone the source volume\n source_image.clone_volume(target)\n client.volumes.all.find { |vol| vol.name == target }\n end", "def create_disk(name, size_mb, vm = nil, retries = @retries[\"default\"])\n new_disk = Xml::WrapperFactory.create_instance(\"DiskCreateParams\")\n new_disk.name = name\n new_disk.size_bytes = size_mb * 1024 * 1024 # VCD expects bytes\n new_disk.bus_type = Xml::HARDWARE_TYPE[:SCSI_CONTROLLER]\n new_disk.bus_sub_type = Xml::BUS_SUB_TYPE[:LSILOGIC]\n new_disk.add_locality(vm) if vm\n vdc = get_ovdc\n @logger.info(\"Creating independent disk #{name} of #{size_mb}MB.\")\n @logger.info(\"Disk locality ist set to #{vm.name} #{vm.urn}.\") if vm\n disk = @connection.post(vdc.add_disk_link, new_disk,\n Xml::MEDIA_TYPE[:DISK_CREATE_PARAMS])\n raise ApiRequestError unless disk.respond_to?(:running_tasks)\n # Creating a disk returns a disk with tasks inside\n retries.times do |try|\n return disk if disk.running_tasks.nil? || disk.running_tasks.empty?\n @logger.info(\"Disk #{disk.urn} has running tasks. Waiting for \" +\n \"tasks to finish. Try: #{try}/#{retries} .\" )\n disk.running_tasks.each do |t|\n monitor_task(t)\n end\n disk = @connection.get(disk)\n end\n end", "def create_snapshot_bundle\n # we shouldn't specify -k $EC2_PRIVATE_KEY since we assume private keys are already appended to /root/.ssh/authorized_keys\n # but it's a required parameter -- doh!\n run \"#{ec2_cmd('ec2-bundle-vol')} -v #{volume_to_bundle} -d #{bundling_directory} -k $EC2_PRIVATE_KEY -u #{@ec2_user_id} -s #{volume_size}\"\n end", "def add_volume(container_name: nil, volume_name: nil, volume_config:, mount_path: nil,\n mount_config: {}, block: false, timeout: 60, polling: 5)\n\n create_volume(volume_name, config: volume_config)\n mount_volume(container_name,\n volume_name: volume_name,\n mount_path: mount_path,\n **mount_config)\n\n update\n sleep polling\n wait_for_deployments(timeout: timeout, polling: polling) if block\n reload(true)\n end", "def create_kvm(kvm_name, template, options)\n ensure_name_availability!(kvm_name)\n clone_kvm(kvm_name, template)\n device = mount_kvm_volume(kvm_name)\n address = update_kvm_ip(kvm_name)\n update_hostname(kvm_name)\n make_console_accessible(kvm_name)\n unmount_kvm_volume(kvm_name, device)\n create_kvm_instance(kvm_name, options)\n puts \"KVM Node #{kvm_name} available at: #{address}\"\nend", "def create_disk(size, storage_account_name, caching)\n @logger.info(\"create_disk(#{size}, #{storage_account_name}, #{caching})\")\n disk_name = generate_data_disk_name(storage_account_name, caching)\n @logger.info(\"Start to create an empty vhd blob: blob_name: #{disk_name}.vhd\")\n @blob_manager.create_empty_vhd_blob(storage_account_name, DISK_CONTAINER, \"#{disk_name}.vhd\", size)\n disk_name\n end", "def create!\n set_id = generate_set_id\n\n @volume_ids.each do |id|\n snapshot = @fog.snapshots.new\n\n snapshot.description = \"#{@hostname.split(\".\")[0]} #{@mount} (#{self.needed_types.join(\", \")}) (#{set_id})\"\n snapshot.volume_id = id\n\n # Actually do the snapshot\n snapshot.save\n\n # Reload to get snapshot.id so we can add tags\n snapshot.reload\n\n @fog.tags.create(:resource_id => snapshot.id, :key => \"Host\", :value => @hostname)\n @fog.tags.create(:resource_id => snapshot.id, :key => \"Mount\", :value => @mount)\n @fog.tags.create(:resource_id => snapshot.id, :key => \"SetID\", :value => set_id)\n @fog.tags.create(:resource_id => snapshot.id, :key => \"Type\", :value => self.needed_types.join(\",\"))\n end\n end", "def gen_pv\n pv_name = \"pv-#{gen_uuid}\"\n file = \"#{$templates_path}/#{pv_name}.yaml\"\n File.open(file, 'w') do |f|\n f.puts <<-PV\nkind: PersistentVolume\napiVersion: v1\nmetadata:\n name: #{pv_name}\nspec:\n capacity:\n storage: 1Gi\n accessModes:\n - ReadWriteOnce\n nfs:\n path: \"/\"\n server: \"10.1.1.1\"\n persistentVolumeReclaimPolicy: \"Retain\"\nPV\n end\n\n `oc create -f #{file}`\nend", "def attach(volume, device = '/dev/sdh')\n @ec2.attach_volume volume.id, id, device\n end", "def create_kvm_instance(name, opts={})\n cmd = \"virt-install -n #{name} --ram #{opts[:memory]} --disk \" <<\n \"path=#{File.join(KVM_HOME, 'storage', \"#{name}.qcow2\")},device=disk,bus=virtio,format=qcow2 \" <<\n \"-v --import --noautoconsole --vcpus=#{opts[:vcpus]},maxvcpus=#{opts[:maxvcpus]}\" \n unless(system(cmd))\n raise \"Failed to create KVM instance!\"\n end \nend", "def attach(instance_id, volume)# rubocop:disable Metrics/AbcSize\n inst_details = AttrFinder.new(@instanceparameters)\n @options[:inst] = volume\n inst_details.options = @options\n inst_details.validate = @validate\n inst_details.function = 'server'\n opts = {}\n BmcAuthenticate.new(@options)\n request = OracleBMC::Core::Models::AttachVolumeDetails.new\n request.instance_id = instance_id\n request.type = 'iscsi'\n request.volume_id = inst_details.volume\n api = OracleBMC::Core::ComputeClient.new\n response = api.attach_volume(request, opts)\n end", "def attach_storage_volume(volume_id, instance_id, device=nil)\n must_support! :storage_volumes\n result = connection.post(api_uri(\"/storage_volumes/#{volume_id}/attach\")) do |r|\n r.params = { :instance_id => instance_id, :device => device }\n end\n if result.status.is_ok?\n from_resource(:storage_volume, result)\n end\n end", "def create\n @volume_type_extra_spec = VolumeTypeExtraSpec.new(params[:volume_type_extra_spec])\n\n respond_to do |format|\n if @volume_type_extra_spec.save\n format.html { redirect_to @volume_type_extra_spec, notice: 'Volume type extra spec was successfully created.' }\n format.json { render json: @volume_type_extra_spec, status: :created, location: @volume_type_extra_spec }\n else\n format.html { render action: \"new\" }\n format.json { render json: @volume_type_extra_spec.errors, status: :unprocessable_entity }\n end\n end\n end", "def volume_create_from_snap(source, name, snapshot_id)\n retries = 3\n begin \n @log.info \"Creating volume #{name} from snapshot id #{snapshot_id}...\"\n ret = @cloud_stack.create_volume(name, ZONE, nil, snapshot_id)\n id = ret[\"createvolumeresponse\"][\"jobid\"]\n wait_for_job id\n rescue Exception => e\n retries -= 1\n if retries > 0\n @log.error \"Failed. #{e.message}. Retrying...\"\n retry\n end\n raise e\n end\n vol_id = ret[\"createvolumeresponse\"][\"id\"]\n @log.info \"Created volume id: #{vol_id}\"\n vol_id\n end", "def attach_volume(id, volume) \n data = { 'volumeAttachment' => { 'volumeId' => volume, 'device' => \"/dev/vdb\" } }\n return post_request(address(\"/servers/\" + id + \"/os-volume_attachments\"), data, @token)\n end", "def create # rubocop:disable Metrics/AbcSize\n attrcheck = { 'container name' => @options[:container] }\n @validate.attrvalidate(@options, attrcheck)\n newcontainer = ObjectStorage.new(@options[:id_domain], @options[:user_name], @options[:passwd])\n newcontainer = newcontainer.create(@options[:container])\n if newcontainer.code == '201'\n puts \"Container #{@options[:container]} created\"\n else\n @util.response_handler(newcontainer)\n end \n end", "def attach_node_volume (volume_label)\n # XXX should check whether this device name is already allocated,\n # and if so throw an exception\n # Helper method, attach an arbitrary volume using an arbitrary label that must be preconfigured in nodes\n Chef::Log.info(\"In attach_node_volume with volume_label #{volume_label}\")\n mount_device = node.application_attributes[volume_label].mount_device\n volume_id = node.application_attributes[volume_label].volume_id\n\n if mount_device.nil?\n Chef::Log.fatal(\"No mount device for volume label #{volume_label}.\tMust supply a volume label configured in nodes\")\n raise\n end\n\n attach_volume(volume_label, volume_id, mount_device)\n end", "def lv_create_size_in_kb(logical_volume_name, volume_group, size_in_kb)\n External.cmd(@server, \"#{@command} lvcreate -l #{size_in_kb} -n #{logical_volume_name} #{volume_group.name}\") if volume_group_check_space_in_kb(volume_group,size_in_kb)\n end", "def set_volume\n @volume = services.block_storage.get_volume(params[:id])\n end", "def create\n if @resource[:grow_fs] == :true\n fstabentry\n growfs\n mountfs\n else\n createfs()\n fstabentry\n mountfs\n end\n end", "def add_disk(server, size)\n host = server.to_s\n\n # Increment disk id\n if !DISKS.key?(host) then\n DISKS[host] = 0\n else\n DISKS[host] += 1\n end\n disk_id = DISKS[host]\n disk_filename = \".vagrant/disks/\" + host + \"_\" + disk_id.to_s + \".vdi\"\n\n server.vm.provider \"virtualbox\" do |v|\n # Create disk if it not exist\n unless File.exist?(disk)\n v.customize [\"createhd\", \"--filename\", disk_filename, \"--size\", size * 1024 * 1024]\n end\n v.customize ['storageattach', :id, '--storagectl', 'SATA Controller', '--port', disk_id, '--device', 0, '--type', 'hdd', '--medium', disk]\n end\nend", "def init_ebs_volumes\n @@client.describe_volumes.volumes\n end", "def create_server(options = {})\n begin\n add_custom_attributes(options[:server_def])\n server = connection.servers.create(options[:server_def])\n\n print \"\\nWaiting For Server\"\n server.wait_for(Integer(options[:server_create_timeout])) do\n print '.'\n !locked?\n end\n\n # attach/or create any volumes.\n options[:server_volumes].each do |voldef|\n Chef::Log.debug(\"Volume definition: #{voldef}\")\n if voldef.key?(:size) || voldef.key?(:size_gb)\n # create a new volume\n result = connection.add_volume(server.id, voldef)\n name = (result / 'disk/name').first.text\n elsif voldef.key? :id\n server.attach_volume(voldef)\n name = voldef[:id]\n else\n raise CloudExceptions::ServerCreateError, \"cannot handle volume definition #{voldef}\"\n end\n\n print \"\\nAttached #{name} volume\"\n end\n\n print \"\\nWaiting For Volumes\"\n server.wait_for(Integer(options[:server_create_timeout])) do\n print '.'\n !locked?\n end\n Chef::Log.debug(\"options: #{options}\")\n server.start_with_cloudinit(user_data: options[:cloud_init])\n rescue Excon::Error::BadRequest => e\n response = Chef::JSONCompat.from_json(e.response.body)\n message = if response['badRequest']['code'] == 400\n \"Bad request (400): #{response['badRequest']['message']}\"\n else\n \"Unknown server error (#{response['badRequest']['code']}): #{response['badRequest']['message']}\"\n end\n ui.fatal(message)\n raise CloudExceptions::ServerCreateError, message\n rescue Fog::Errors::Error => e\n raise CloudExceptions::ServerCreateError, e.message\n end\n\n print \"\\n#{ui.color(\"Waiting for server [wait time = #{options[:server_create_timeout]}]\", :magenta)}\"\n\n # wait for it to be ready to do stuff\n server.wait_for(Integer(options[:server_create_timeout])) do\n print '.'\n ready?\n end\n\n puts(\"\\n\")\n server\n end", "def create_filesystem(host, mount_name)\n fs_type = filesystem_type(host)\n\n case host['platform']\n when %r{aix}\n volume_group = on(host, 'lsvg').stdout.split(\"\\n\")[0]\n on(host, \"mklv -y #{mount_name} #{volume_group} 1M\")\n on(host, \"mkfs -V #{fs_type} -l #{mount_name} /dev/#{mount_name}\")\n when %r{el-|centos|fedora|sles|debian|ubuntu}\n on(host, \"dd if=/dev/zero of='/tmp/#{mount_name}' count=16384\", acceptable_exit_codes: [0, 1])\n on(host, \"yes | mkfs -t #{fs_type} -q '/tmp/#{mount_name}'\", acceptable_exit_codes: (0..254))\n else\n # TODO: Add Solaris and OSX support, as per PUP-5201 and PUP-4823\n fail_test(\"Creating filesystems on #{host['platform']} is not currently supported.\")\n end\n end", "def create_iscsi_disks(vbox, name)\n unless controller_exists(name, 'SATA Controller')\n vbox.customize ['storagectl', :id,\n '--name', 'SATA Controller',\n '--add', 'sata']\n end\n\n dir = \"#{ENV['HOME']}/VirtualBox\\ VMs/vdisks\"\n Dir.mkdir dir unless File.directory?(dir)\n\n osts = (1..20).map { |x| [\"OST#{x}\", '5120'] }\n\n [\n %w[mgt 512],\n %w[mdt0 5120]\n ].concat(osts).each_with_index do |(name, size), i|\n file_to_disk = \"#{dir}/#{name}.vdi\"\n port = (i + 1).to_s\n\n unless File.exist?(file_to_disk)\n vbox.customize ['createmedium',\n 'disk',\n '--filename',\n file_to_disk,\n '--size',\n size,\n '--format',\n 'VDI',\n '--variant',\n 'fixed']\n end\n\n vbox.customize ['storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', port,\n '--type', 'hdd',\n '--medium', file_to_disk,\n '--device', '0']\n\n vbox.customize ['setextradata', :id,\n \"VBoxInternal/Devices/ahci/0/Config/Port#{port}/SerialNumber\",\n name.ljust(20, '0')]\n end\nend", "def attach_volume(volume_id, instance_id, device = '/dev/sdh')\n action = 'AttachVolume'\n params = {\n 'Action' => action,\n 'VolumeId' => volume_id,\n 'InstanceId' => instance_id,\n 'Device' => device\n }\n\n response = send_query_request(params)\n response.is_a?(Net::HTTPSuccess)\n end", "def create(args = {})\n storageclass = {\n :apiVersion => \"storage.k8s.io/v1\",\n :kind => \"StorageClass\",\n :metadata => {\n :name => args[:name],\n },\n :parameters => args[:parameters],\n :mount_options => args[:mount_options],\n :provisioner => args[:provisioner],\n :volume_binding_mode => args[:volume_binding_mode],\n :reclaim_policy => args[:reclaim_policy]\n }\n\n service.create_storageclass(storageclass)\n end", "def determine_volume\n vol = currently_attached_volume(instance_id, new_resource.device)\n vol_id = new_resource.volume_id || volume_id_in_node_data || ( vol ? vol[:aws_id] : nil )\n raise \"volume_id attribute not set and no volume id is set in the node data for this resource (which is populated by action :create) and no volume is attached at the device\" unless vol_id\n\n # check that volume exists\n vol = volume_by_id(vol_id)\n raise \"No volume with id #{vol_id} exists\" unless vol\n\n vol\n end", "def create_gdom_disk(options)\n client_disk = options['q_struct']['gdom_disk'].value\n disk_size = options['q_struct']['gdom_size'].value\n disk_size = disk_size.downcase\n vds_disk = options['name']+\"_vdisk0\"\n if not client_disk.match(/\\/dev/)\n if not File.exist?(client_disk)\n message = \"Information:\\tCreating guest domain disk \"+client_disk+\" for client \"+options['name']\n command = \"mkfile -n #{disk_size} #{client_disk}\"\n output = execute_command(options,message,command)\n end\n end\n message = \"Information:\\tChecking Virtual Disk Server device doesn't already exist\"\n command = \"ldm list-services |grep 'primary-vds0' |grep '#{vds_disk}'\"\n output = execute_command(options,message,command)\n if not output.match(/#{options['name']}/)\n message = \"Information:\\tAdding disk device to Virtual Disk Server\"\n command = \"ldm add-vdsdev #{client_disk} #{vds_disk}@primary-vds0\"\n output = execute_command(options,message,command)\n end\n return\nend", "def create_disk(disk_id, location, size, storage_account_type)\n @logger.info(\"create_disk(#{disk_id}, #{location}, #{size}, #{storage_account_type})\")\n resource_group_name = disk_id.resource_group_name()\n disk_name = disk_id.disk_name()\n caching = disk_id.caching()\n tags = AZURE_TAGS.merge({\n \"caching\" => caching\n })\n disk_params = {\n :name => disk_name,\n :location => location,\n :tags => tags,\n :disk_size => size,\n :account_type => storage_account_type\n }\n @logger.info(\"Start to create an empty managed disk `#{disk_name}' in resource group `#{resource_group_name}'\")\n @azure_client2.create_empty_managed_disk(resource_group_name, disk_params)\n end", "def setup_lvm_on_partition(part)\n return unless part.lvm\n\n pvol = \"/dev/disk/by-partlabel/#{part.label}\"\n execute!(\"pvcreate -y #{pvol}\")\n execute!(\"vgcreate -y #{part.lvm.vg_name} #{pvol}\")\n\n # any \"open ended\" volumes (no size specified), we deal with last\n unspec_vol = nil\n\n notice(\"Creating LVM partitions\")\n part.lvm.volumes.each do |vol|\n if not vol.size_mb.is_a?(Integer)\n unspec_vol = vol\n next\n end\n\n info(\"Creating #{vol.label} volume\")\n execute!(\"lvcreate -y --name #{vol.label} --size #{vol.size_mb}MiB #{part.lvm.vg_name}\")\n next if not vol.fs\n\n create_filesystem(vol.fs, \"/dev/#{part.lvm.vg_name}/#{vol.label}\", vol.label)\n end\n\n if unspec_vol\n vol = unspec_vol\n info(\"Creating #{vol.label} volume\")\n execute!(\"lvcreate -y --name #{vol.label} -l 100%FREE #{part.lvm.vg_name}\")\n if vol.fs\n create_filesystem(vol.fs, \"/dev/#{part.lvm.vg_name}/#{vol.label}\", vol.label)\n end\n end\n end" ]
[ "0.8036493", "0.7529088", "0.75189686", "0.7499501", "0.7443673", "0.7443673", "0.74392515", "0.7435074", "0.7424645", "0.7199022", "0.7173891", "0.71394473", "0.7134112", "0.700443", "0.6923647", "0.6822279", "0.6817655", "0.6755411", "0.67133874", "0.6647261", "0.66386944", "0.6618827", "0.6519159", "0.64433855", "0.6437573", "0.6397487", "0.6312506", "0.62503844", "0.61982757", "0.6189713", "0.6182774", "0.6175715", "0.6151642", "0.6111658", "0.61064124", "0.6106253", "0.60943246", "0.6093012", "0.6089324", "0.605916", "0.60242575", "0.6022678", "0.5997962", "0.5981197", "0.59773314", "0.5976104", "0.59729624", "0.59667736", "0.59537345", "0.5946417", "0.591506", "0.5900419", "0.5882288", "0.5867242", "0.5864269", "0.58483386", "0.5831903", "0.5826531", "0.5818793", "0.5813545", "0.5809742", "0.57939327", "0.5780725", "0.57696337", "0.5768262", "0.5767326", "0.5746878", "0.56971264", "0.5695641", "0.5694664", "0.56910896", "0.5687642", "0.56850684", "0.56746036", "0.56709534", "0.566766", "0.56510305", "0.5644553", "0.5639788", "0.56271136", "0.5606529", "0.5596116", "0.5596073", "0.5588183", "0.5585309", "0.55686694", "0.5541303", "0.55291337", "0.5523044", "0.55202794", "0.5493827", "0.5473139", "0.54630786", "0.54572344", "0.544513", "0.54305226", "0.5397518", "0.5390141", "0.5390116", "0.53874266" ]
0.6454264
23
Check whether an EBS volume exists or not
def has_disk?(disk_id) with_thread_name("has_disk?(#{disk_id})") do @logger.info("Check the presence of disk with id `#{disk_id}'...") @ec2_client.volumes[disk_id].exists? end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def exists?\n\t\t# Look for volume\n\t\t@vol_id = DSMAPIVolume.find_volume(@resource[:name], @resource[:storagecenter])\n\t\tif @vol_id == nil\n\t\t\treturn false\n\t\telse\n\t\t\treturn true\n\t\tend\n\tend", "def exists?\n volumes = Hash.new\n qmgmt(['volume', 'list']).each_line { |l|\n if ( l.match(/^Name/) )\n next\n end\n line = l.split(/\\s{2,}/)\n volumes[line.shift()] = true\n }\n\n if ( volumes[resource[:name]] )\n return true\n else\n return false\n end\n end", "def check()\n # check if teh volume still exists\n begin\n volumes = $ec2.describe_volumes([self.id])\n rescue RightAws::AwsError\n if $!.errors[0][0] == \"InvalidVolume.NotFound\"\n puts \"WARN: Volume #{self.id} is not running\"\n delete()\n return\n else\n p $!.code\n end\n end\n\n # check that it is attached\n if volumes[0][:aws_attachment_status] == 'attached'\n if self.attached_instance != volumes[0][:aws_instance_id]\n self.attached_instance = volumes[0][:aws_instance_id]\n self.save()\n puts \"WARN: volume #{self.id} is now attached to #{self.attached_instance}\"\n end\n elsif self.attached_instance.nil?\n puts \"WARN: volume #{self.id} is no longer attached\"\n self.attached_instance = nil\n self.save()\n end\n end", "def exists?\n File.exists? vmx_path\n end", "def exists?\n #notice(\"DEBUG + \" + resource[:content])\n configs = Hash.new\n qmgmt(['volume', 'config', 'list']).each_line { |l|\n configs[l.chomp()] = true\n }\n\n # diff config is volume config exists\n if ( configs[resource[:name]] )\n diff_config\n else\n return false\n end\n end", "def exists?\n @actionable_vibs = [] # List of VIBs which are either a) fully qualified paths for VIBs to install,\n # OR b) VIB package name to remove\n @mounted_nfs_shares = {} # Map of NFS shares that are mounted on the ESX\n # key: \"nfs_hostname:/share\" representing NFS mounted share\n # value: Hash\n # :volume_name corresponding volume name\n # :new_mount boolean indicating if it was mounted by us\n @processed_vibs = {} # Map {id => true} of all installed, or to-be installed VIBs on the ESX host\n\n fetch_mounted_nfs_shares\n\n fetch_installed_vibs\n\n vibs = resource[:vibs].is_a?(Array) ? resource[:vibs] : [resource[:vibs]]\n Puppet.debug(\"VIBs to query : #{vibs}...\")\n\n # The type validation already validates proper format of fields for either install or uninstall\n # To determine the install mode, we simply need to do check if first element hash or not\n is_install = vibs.first.is_a?(Hash)\n\n vibs.each do |vib_data|\n unless is_install\n # For uninstall, add the VIB name to the actionable_vibs list\n @actionable_vibs.push(vib_data)\n else\n prepare_vib_for_install(vib_data)\n end\n end\n\n # For install mode: if there are any actionable VIBs, return false to invoke \"create\"\n # For uninstall mode: if there are any actionable VIBs, return true to invoke \"destroy\"\n is_install ? @actionable_vibs.length == 0 : @actionable_vibs.length > 0\n end", "def check_exists\n raise GlusterFS::Error, \"File does not exist: #{@path}\" unless exists?\n end", "def check_if_native(entity) #volumes or snapshots\n native = extension = false\n #check if 'native' volume API present:\n begin\n response = @connection.req(\"GET\", \"/#{entity}\")\n native = true if response.code.match(/^20.$/)\n return true, entity\n rescue OpenStack::Exception::ItemNotFound => not_found\n native = false\n end\n #check if available as extension:\n begin\n response = @connection.req(\"GET\", \"/os-#{entity}\")\n extension = true if response.code.match(/^20.$/)\n return false, \"os-#{entity}\"\n rescue OpenStack::Exception::ItemNotFound => not_found\n extension = false\n end\n raise OpenStack::Exception::NotImplemented.new(\"No Volumes support for this provider\", 501, \"No #{entity} Support\") unless (native || extension)\n end", "def find_volume(name)\n Volume.new pool.lookup_volume_by_name(name), self\n rescue Libvirt::RetrieveError\n nil\n end", "def determine_volume\n vol = currently_attached_volume(instance_id, new_resource.device)\n vol_id = new_resource.volume_id || volume_id_in_node_data || ( vol ? vol[:aws_id] : nil )\n raise \"volume_id attribute not set and no volume id is set in the node data for this resource (which is populated by action :create) and no volume is attached at the device\" unless vol_id\n\n # check that volume exists\n vol = volume_by_id(vol_id)\n raise \"No volume with id #{vol_id} exists\" unless vol\n\n vol\n end", "def raise_if_no_exists_in_vcenter\n raise 'vCenter device does not exist at the moment' unless exists?\n end", "def kvm_exists?(kvm_name)\n begin\n virt.lookup_domain_by_name(kvm_name)\n true\n rescue Libvirt::RetrieveError\n false\n end\nend", "def storage_exists?\n File.exists?(file_path)\n end", "def mounted?(name)\n mount_loc = File.join(KVM_MOUNT_POINT, name)\n system(\"mount | grep #{mount_loc}\")\nend", "def exists?\n begin\n partition= resource[:name]\n device=partition[0,(partition.length-1)]\n if File.exist?(partition)\n true\n else\n false\n end\n end\n end", "def present?\n\tFile.exist?(\"Berksfile\") and `berks`\n end", "def is_volume_wrapper?\n volumes = Bplmodels::Finder.getVolumeObjects(pid)\n volumes.present? ? true : false\n end", "def is_unmanaged_attached_volume?(mapping)\n case mapping[:volume_status]\n when 'attached', 'attaching'\n mapping[:management_status].nil?\n else\n false\n end\n end", "def check_vbox_vm_exists(client_name)\n message = \"Checking:\\tVM \"+client_name+\" exists\"\n command = \"VBoxManage list vms |grep -v 'inaccessible'\"\n host_list = execute_command(message,command)\n if !host_list.match(client_name)\n puts \"Information:\\tVirtualBox VM \"+client_name+\" does not exist\"\n exists = \"no\"\n else\n exists = \"yes\"\n end\n return exists\nend", "def volume_compatible_with_resource_definition?(volume)\n if new_resource.snapshot_id =~ /vol/\n new_resource.snapshot_id(find_snapshot_id(new_resource.snapshot_id, new_resource.most_recent_snapshot))\n end\n (new_resource.size.nil? || new_resource.size == volume[:aws_size]) &&\n (new_resource.availability_zone.nil? || new_resource.availability_zone == volume[:zone]) &&\n (new_resource.snapshot_id.nil? || new_resource.snapshot_id == volume[:snapshot_id])\n end", "def sshd_service_exists?\n # FIXME: We should probably check exit status rather than AIX-specific error codes.\n output=ssh_command(\"#{config[:sudo]} #{config[:clogin]} #{config[:wpar_name]} #{config[:lssrc]} -s sshd\", :stderr)\n if output.include?('0513-085') # 0513-085 The sshd Subsystem is not on file.\n return false\n end\n true\n end", "def vm_exists?(uuid)\n end", "def create\n\t\tregion = resource[:availability_zone].to_s.gsub(/.$/,'') \n\t\tcompute = Fog::Compute.new(:provider => 'aws', :region => \"#{region}\")\n\t\tprint \"ebsvol[aws]->create: Region is #{region}\\n\" if $debug\n\t\tprint \"ebsvol[aws]->create: Availability_zone is #{resource[:availability_zone]}\\n\" if $debug\n\t\t# create the requested volume\n\t\tresponse = compute.create_volume(resource[:availability_zone],resource[:size],resource[:snapshot])\t\n\t\tif (response.status == 200)\n\t\t\tvolumeid = response.body['volumeId']\n\t\t\tprint \"ebsvol[aws]->create: I created volume #{volumeid}.\\n\" if $debug\n\t\t\t# now tag the volume with volumename so we can identify it by name\n\t\t\t# and not the volumeid\n\t\t\tresponse = compute.create_tags(volumeid,{ :Name => resource[:volume_name] })\n\t\t\tif (response.status == 200)\n\t\t\t\tprint \"ebsvol[aws]->create: I tagged #{volumeid} with Name = #{resource[:volume_name]}\\n\" if $debug\n\t\t\tend\n\t\t\t# Check if I need to attach it to an ec2 instance.\n\t\t\tattachto = resource[:attached_to].to_s\n\t\t\tprint \"attachto is #{attachto}\\n\" if $debug\n\t\t\tif ( attachto != '' )\n\t\t\t\tif ( attachto == 'me')\n\t\t\t\t\tinstance = instanceinfo(compute,myname(compute))\n\t\t\t\telse\n\t\t\t\t\tinstance = instanceinfo(compute,attachto)\n\t\t\t\tend\n\t\t\t\tif ( resource[:device] != nil )\n\t\t\t\t\t# try to attach the volume to requested instance\n\t\t\t\t\tprint \"attach the volume\\n\" if $debug\n\t\t\t\t\tvolume = volinfo(compute,resource[:volume_name])\n\t\t\t\t\tattachvol(compute,volume,instance,resource[:device])\n\t\t\t\telse\n\t\t\t\t\traise \"ebsvol[aws]->create: Sorry, I can't attach a volume with out a device to attach to!\"\n\t\t\t\tend\n\t\t\tend\n\t\telse\n\t\t\traise \"ebsvol[aws]->create: I couldn't create the ebs volume, sorry!\"\n\t\tend\n\tend", "def exists?\n Dir.glob(@resource.value(:name) +\"*.rpm\").empty?\n end", "def file_exists?(path)\n result = transport.execute(\"ls -d #{path}\", :read_only => true)\n result.exitstatus == 0 && result.stdout != ''\n end", "def exists?\n @lxc.exists?(self.name)\n end", "def volinfo(compute,name)\n\t\tvolumes = compute.describe_volumes\n\t\tif (volumes.status == 200)\n\t\t\t# check each of the volumes in our availability zone which match our name.\n\t\t\tvolumes.body['volumeSet'].each {|x|\n\t\t\t\t# Match the name unless the volume is actually being deleted...\n\t\t\t\tif (x['tagSet']['Name'] == resource[:volume_name] )\n\t\t\t\t\t#print \"ebsvol[aws]->volinfo: Volume #{x['volumeId']} has Name = #{resource[:volume_name]}\\n\" if $debug\n\t\t\t\t\treturn x\n\t\t\t\tend\n\t\t\t}\n\t\telse\n\t\t\traise \"ebsvol[aws]->volinfo: I couldn't list the ebsvolumes\"\n\t\tend\n\t\tnil\n\tend", "def exists?\n @aws_instance.exists? && @aws_instance.state.name != 'terminated'\n end", "def zfs_exist?(name)\n cmd = Mixlib::ShellOut.new('zfs', 'get', 'mountpoint', name)\n cmd.environment['PATH'] = \"/usr/sbin:#{ENV['PATH']}\" if platform_family?('solaris2')\n cmd.run_command\n cmd.exitstatus == 0\nend", "def smb_file_exist?(file)\n begin\n fd = @smb.open(file, 'ro')\n rescue XCEPT::ErrorCode => e\n # If attempting to open the file results in a \"*_NOT_FOUND\" error,\n # then we can be sure the file is not there.\n #\n # Copy-pasted from smb/exceptions.rb to avoid the gymnastics\n # required to pull them out of a giant inverted hash\n #\n # 0xC0000034 => \"STATUS_OBJECT_NAME_NOT_FOUND\",\n # 0xC000003A => \"STATUS_OBJECT_PATH_NOT_FOUND\",\n # 0xC0000225 => \"STATUS_NOT_FOUND\",\n error_is_not_found = [ 0xC0000034, 0xC000003A, 0xC0000225 ].include?(e.error_code)\n # If the server returns some other error, then there was a\n # permissions problem or some other difficulty that we can't\n # really account for and hope the caller can deal with it.\n raise e unless error_is_not_found\n found = !error_is_not_found\n else\n # There was no exception, so we know the file is openable\n fd.close\n found = true\n end\n found\nend", "def file_exists?\n filepath.present? && s3_object(false).exists?\n end", "def file_exists?(path)\n result = transport.execute(\"ls -d #{path}\")\n result.exitstatus == 0 && result.stdout != ''\n end", "def is_root_device?(volume_id)\n vols = @ec2_api.describe_volumes(:volume_id => volume_id)\n if vols['volumeSet']['item'][0]['attachmentSet'] == nil || vols['volumeSet']['item'][0]['attachmentSet']['item'].size == 0\n #not linked to any instance, cannot be a root-device\n return false\n end\n instance_id = vols['volumeSet']['item'][0]['attachmentSet']['item'][0]['instanceId']\n res = @ec2_api.describe_instance_attribute(:instance_id => instance_id, :attributes => {:rootDeviceName => true})\n if res[\"rootDeviceName\"] == nil\n return false\n end\n rdn = res['rootDeviceName']['value']\n res = @ec2_api.describe_instances(:instance_id => instance_id)\n if res['reservationSet']['item'][0]['instancesSet']['item'][0]['blockDeviceMapping']['item'].size == 0\n # volume unattached in the meantime\n return false\n end\n attached = res['reservationSet']['item'][0]['instancesSet']['item'][0]['blockDeviceMapping']['item']\n attached.each() {|ebs|\n volume = ebs['ebs']['volumeId']\n device_name = ebs['deviceName']\n if volume == volume_id && rdn == device_name\n return true\n end\n }\n return false\n end", "def is_unmanageable_volume?(mapping)\n case mapping[:volume_status]\n when 'attached', 'attaching', 'detached', 'detaching'\n false\n else\n true\n end\n end", "def check_vbox_vm_config_exists(client_name)\n exists = \"no\"\n vbox_vm_dir = get_vbox_vm_dir(client_name)\n config_file = vbox_vm_dir+\"/\"+client_name+\".vbox\"\n if File.exist?(config_file)\n exists = \"yes\"\n else\n exists = \"no\"\n end\n return exists\nend", "def storage_exists?(storage_name)\n adapter.storage_exists?(storage_name)\n end", "def present?(key)\n @disk.present?(key)\n end", "def exists?\n hba\n end", "def ebs_volumes\n @ebs_volumes ||= init_ebs_volumes.reject do |vol|\n vol.attachments.any? do |att|\n attached_instance = id_instances[att.instance_id]\n attached_instance.root_device_name == att.device\n end\n end\n end", "def check_vbox_vm_doesnt_exist(client_name)\n message = \"Checking:\\tVM \"+client_name+\" doesn't exist\"\n command = \"VBoxManage list vms\"\n host_list = execute_command(message,command)\n if host_list.match(client_name)\n puts \"Information:\\tVirtualBox VM #{client_name} already exists\"\n exit\n end\n return\nend", "def exists?\n return File.exists?(\"/tmp/cloud-#{resource[:name]}\")\n end", "def exists?\n return File.exists?(\"/tmp/cloud-#{resource[:name]}\")\n end", "def exists?\n return File.exists?(\"/tmp/cloud-#{resource[:name]}\")\n end", "def vol_status(vs, vol_id)\n vol = vs.volumes.get(vol_id)\n unless vol.status == 'available'\n # puts \"vol status is not 'available' it is '#{status}' instead\"\n @log.error 'vol is not avaiable to delete'\n exit 2\n end\n end", "def instance_exists(path)\n result = $evm.instance_exists?(path)\n if result\n $evm.log('info',\"Instance:<#{path}> exists. Result:<#{result.inspect}>\") if @debug\n else\n $evm.log('info',\"Instance:<#{path}> does not exist. Result:<#{result.inspect}>\") if @debug\n end\n return result\n end", "def mmkv_file_exists(file)\n is_exist = false\n if File.methods.include?(:exists?)\n is_exist = File.exists? file\n else\n is_exist = File.exist? file\n end\n return is_exist\nend", "def suspend_file_exists?\n File.file? File.join(path, \"#{@name}.vmem\")\n end", "def suspend_file_exists?\n File.file? File.join(path, \"#{@name}.vmem\")\n end", "def init_ebs_volumes\n @@client.describe_volumes.volumes\n end", "def exists\n if @file && @mp3\n return true\n else\n return false\n end\n end", "def wait_vol_status(aws_id, timeout, status)\n puts \"waiting state: #{status.pretty_inspect}\"\n Timeout.timeout(timeout) do |i|\n loop do\n sleep 2\n wait_vol = @ec2.describe_volumes(aws_id).first\n break if wait_vol[status[:name]] == status[:value]\n puts \"EBS Volume state #{wait_vol[status[:name]]}: waiting...\"\n end\n if block_given?\n puts \"Check Created Block device\"\n loop do\n sleep 2\n break if yield\n end\n end\n end\nrescue Timeout::Error => e\n warn e.pretty_inspect\n warn e.backtrace\n puts \"EBS Volume #{aws_id} deleting...\"\n @ec2.delete_volume(aws_id)\n raise StandardError, \"waiting #{status.pretty_inspect} EBS Volume #{aws_id}: Timeout!!\"\nend", "def get_volume_id(sys_id,volume_name)\n response = request(:get, \"/devmgr/v2/storage-systems/#{sys_id}/volumes\")\n status(response, 200, 'Failed to get Volumes')\n volumes = JSON.parse(response.body)\n volumes.each do |vm|\n \t return vm['id'] if vm['name'] == volume_name\n \tend\n response = request(:get, \"/devmgr/v2/storage-systems/#{sys_id}/thin-volumes\")\n status(response, 200, 'Failed to get thin Volumes')\n volumes = JSON.parse(response.body)\n volumes.each do |vm|\n return vm['id'] if vm['name'] == volume_name\n end\n \tfalse\n end", "def s3_exists?\n self.s3_object.exists?\n end", "def check_exists?(path)\n # This can also be accomplished with ::File.exists? however, this demonstrates how a more complex shellout\n # command can be used to determine state\n #cmd = shell_out(\"ls #{path}\", { :returns => [0,1,2] })\n #cmd.stderr.empty? && (cmd.stdout !~ /^$/)\n ::File.exist?(path)\nend", "def instance_exists?(ec2_client, instance_id)\n ec2_client.describe_instances(instance_ids: [instance_id])\n return true\nrescue StandardError\n return false\nend", "def exists?\n vnic\n end", "def controller_exists(name, controller_name)\n return false if name.nil?\n\n out, err = Open3.capture2e(\"VBoxManage showvminfo #{name}\")\n raise out unless err.exitstatus === 0\n\n out.split(/\\n/)\n .select { |x| x.start_with? 'Storage Controller Name' }\n .map { |x| x.split(':')[1].strip }\n .any? { |x| x == controller_name }\nend", "def is_detached_volume?(mapping)\n # detached by volume status unless we have successfully requested attachment\n # and volume status does not yet reflect this change. an unmanaged volume\n # can also be detached.\n return 'detached' == mapping[:volume_status] && 'attached' != mapping[:management_status]\n end", "def controller_exists(name, controller_name)\n out, err = Open3.capture2e(\"VBoxManage showvminfo #{name}\")\n\n return false if err.exitstatus != 0\n\n out.split(/\\n/)\n .select { |x| x.start_with? 'Storage Controller Name' }\n .map { |x| x.split(':')[1].strip }\n .any? { |x| x == controller_name }\nend", "def controller_exists(name, controller_name)\n out, err = Open3.capture2e(\"VBoxManage showvminfo #{name}\")\n\n return false if err.exitstatus != 0\n\n out.split(/\\n/)\n .select { |x| x.start_with? 'Storage Controller Name' }\n .map { |x| x.split(':')[1].strip }\n .any? { |x| x == controller_name }\nend", "def find_volume_service(filter)\n select(&service?(filter))\n .find(&volume_mount?)\n end", "def exists?(vid)\n perform_request(:action => 'vserver-checkexists', :vserverid => vid)\n !statusmsg.match(/Virtual server exists/i).nil?\n end", "def is_mounted?(device)\n system(\"grep -q '#{device}' /proc/mounts\")\nend", "def has_disk?(disk_id)\n with_thread_name(\"has_disk?(#{disk_id})\") do\n @logger.info(\"Check the presence of disk with id `#{disk_id}'...\")\n volume = @openstack.with_openstack { @openstack.volume.volumes.get(disk_id) }\n\n !volume.nil?\n end\n end", "def file_exists\n end", "def exists?\n\t\tbegin\n\t\t\tdom\n\t\t\tdebug \"Domain %s exists? true\" % [resource[:name]]\n\t\t\ttrue\n\t\trescue Libvirt::RetrieveError => e\n\t\t\tdebug \"Domain %s exists? false\" % [resource[:name]]\n\t\t\tfalse # The vm with that name doesnt exist\n\t\tend\n\tend", "def one_volume_service?(filter)\n select(&service?(filter))\n .select(&volume_mount?)\n .one?\n end", "def exist?\n stat ? true : false\n end", "def exists?(key)\n s3_object(key).exists?\n end", "def readable?\n disk_filename.present? && self.s3_object(false).exists?\n end", "def active_instance_dir_exists?\n return File.directory?( @resource['instances_dir'] + \"/\" + @resource[:name] )\n end", "def exists?\n validate_directory_structure\n end", "def exists?\n @provider.get(:ensure) != :absent\n end", "def exists?(vid)\n perform_request(action: 'vserver-checkexists', vserverid: vid)\n !!statusmsg.match(/Virtual server exists/i)\n end", "def check_vbox_is_installed()\n app_dir = \"/Applications/VirtualBox.app\"\n if !File.directory?(app_dir)\n puts \"Warning:\\tVirtualbox not installed\"\n exit\n end\nend", "def exists?\n\n\t\tbegin\n\t\t\tdom\n\t\t\tdebug \"Domain %s exists? true\" % [resource[:name]]\n\t\t\ttrue\n\t\trescue Libvirt::RetrieveError => e\n\t\t\tdebug \"Domain %s exists? false\" % [resource[:name]]\n\t\t\tfalse # The vm with that name doesnt exist\n\t\tend\n\n\tend", "def check_vsc_existance(verbose=true)\n vscopies=[]\n cmd = \"vssadmin list shadows\"\n begin\n text = \"#{@tmp}\\\\#{Rex::Text.rand_text_alpha(16)}.txt\"\n bat = \"#{@tmp}\\\\#{Rex::Text.rand_text_alpha(16)}.bat\"\n cmdexec = \"%COMSPEC% /C echo #{cmd} ^> #{text} > #{bat} & %COMSPEC% /C start %COMSPEC% /C #{bat}\"\n smb_psexec(cmdexec, false)\n output = get_output(text, false)\n files = [ text, bat ]\n cleanup_after(files, false)\n output.split(\"\\n\").each do |line|\n if line =~ /No items found that satisfy the query/i\n return nil\n elsif line =~ /Shadow Copy Volume: (.+)/i\n puts \"[\".light_green + \"*\".white + \"]\".light_green + \" Found Existing Volume Shadow Copies:\".white if verbose\n puts output.to_s.cyan if verbose\n vscopies << $1.chomp\n end\n end\n vscopies = vscopies.uniq!\n return vscopies\n rescue Rex::Proto::SMB::Exceptions::InvalidCommand => e\n puts \"[\".light_red + \"*\".white + \"]\".light_red + \" Error checking for existing Volume Shadow Copies!\".white if verbose\n puts \"[\".light_red + \"*\".white + \"]\".light_red + \" Make sure you have enough privileges and that this is a Domain Controller.....\".white if verbose\n return nil\n end\nend", "def exchange_exists?(name)\n cmd = Mixlib::ShellOut.new(\"rabbitmqctl list_exchanges |grep '#{name}\\\\b'\")\n cmd.environment['HOME'] = ENV.fetch('HOME', '/root')\n cmd.run_command\n begin\n cmd.error!\n true\n rescue\n false\n end\nend", "def test_exists?\n skip 'requires root privs' unless Process.euid == 0\n\n c = Launch::Container.new('a-container-that-does-not-exist', fake_plist)\n refute c.exists?\n\n skip 'need to figure out a way to spawn a test container'\n #c = Launch::Container.new('a-container-that-exists')\n #assert c.exists?\n end", "def vm_exists?(uuid)\n 5.times do\n result = raw(@prlctl_path, 'list', uuid)\n return true if result.exit_code == 0\n\n # Sometimes this happens. In this case, retry.\n # If we don't see this text, the VM really doesn't exist.\n return false unless result.stderr.include?('Login failed:')\n\n # Sleep a bit though to give Parallels Desktop time to fix itself\n sleep 2\n end\n\n # If we reach this point, it means that we consistently got the\n # failure, do a standard prlctl now. This will raise an\n # exception if it fails again.\n execute_prlctl('list', uuid)\n true\n end", "def exists?\n persistent? && (filename && filename.exist?)\n end", "def has_logical_volume_management\n super\n end", "def is_managed_attached_unassigned_volume?(mapping)\n return 'attached' == mapping[:volume_status] && 'attached' == mapping[:management_status]\n end", "def ready?\n return false unless @status =~ /down/i\n volumes.each do |volume|\n return false if volume.status =~ /locked/i\n end\n true\n end", "def is_detaching_volume?(mapping)\n case mapping[:volume_status]\n when 'detaching'\n true\n when 'attached', 'attaching'\n # also detaching if we have successfully requested detachment but volume\n # status does not yet reflect this change.\n 'detached' == mapping[:management_status]\n else\n false\n end\n end", "def disabled?\n ExtManagementSystem.none? do |ems|\n \"#{ems.class}::CloudVolume\".safe_constantize&.supports?(:create)\n end\n end", "def looks_like_orionvm_v2?\n File.exists?('/etc/orion_base')\n end", "def exists_on_cloud?(name)\n ret_val = @connection.query_azure(\"storageservices/#{name}\")\n error_code, error_message = error_from_response_xml(ret_val) if ret_val\n if ret_val.nil? || error_code.length > 0\n Chef::Log.warn 'Unable to find storage account:' + error_message + ' : ' + error_message if ret_val\n false\n else\n true\n end\n end", "def check_fusion_vm_exists(options)\n set_vmrun_bin(options)\n if options['host-os-name'].to_s.match(/Linux/)\n fusion_vm_dir = options['fusiondir']+\"/\"+options['name']\n else\n fusion_vm_dir = options['fusiondir']+\"/\"+options['name']+\".vmwarevm\"\n end\n fusion_vmx_file = fusion_vm_dir+\"/\"+options['name']+\".vmx\"\n if not File.exist?(fusion_vmx_file)\n if options['verbose'] == true\n handle_output(options,\"Information:\\t#{options['vmapp']} VM #{options['name']} does not exist\")\n end\n exists = false\n else\n if options['verbose'] == true\n handle_output(options,\"Information:\\t#{options['vmapp']} VM #{options['name']} exists\")\n end\n exists = true\n end\n return exists\nend", "def addVolume(dev, size, type: \"gp2\")\n if @cloud_id.nil? or @cloud_id.empty?\n MU.log \"#{self} didn't have a cloud id, couldn't determine 'active?' status\", MU::ERR\n return true\n end\n az = nil\n MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_instances(\n instance_ids: [@cloud_id]\n ).reservations.each { |resp|\n if !resp.nil? and !resp.instances.nil?\n resp.instances.each { |instance|\n az = instance.placement.availability_zone\n instance.block_device_mappings.each { |vol|\n if vol.device_name == dev\n MU.log \"A volume #{dev} already attached to #{self}, skipping\", MU::NOTICE\n return\n end\n }\n }\n end\n }\n MU.log \"Creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n creation = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).create_volume(\n availability_zone: az,\n size: size,\n volume_type: type\n )\n begin\n sleep 3\n creation = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_volumes(volume_ids: [creation.volume_id]).volumes.first\n if ![\"creating\", \"available\"].include?(creation.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n end while creation.state != \"available\"\n\n if @deploy\n MU::MommaCat.listStandardTags.each_pair { |key, value|\n MU::MommaCat.createTag(creation.volume_id, key, value, region: @config['region'], credentials: @config['credentials'])\n }\n MU::MommaCat.createTag(creation.volume_id, \"Name\", \"#{MU.deploy_id}-#{@config[\"name\"].upcase}-#{dev.upcase}\", region: @config['region'], credentials: @config['credentials'])\n end\n\n attachment = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).attach_volume(\n device: dev,\n instance_id: @cloud_id,\n volume_id: creation.volume_id\n )\n\n begin\n sleep 3\n attachment = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_volumes(volume_ids: [attachment.volume_id]).volumes.first.attachments.first\n if ![\"attaching\", \"attached\"].include?(attachment.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n end while attachment.state != \"attached\"\n end", "def centos?\n File.exist?('/etc/centos-release')\nend", "def currently_attached_volume(instance_id, device)\n ec2.describe_volumes.find{|v| v[:aws_instance_id] == instance_id && v[:aws_device] == device}\n end", "def mounts_ebs_volumes settings\n has_role settings, \"ebs_volumes_mount\"\nend", "def file_exists?(path)\n parse_boolean(transport.execute(\"Test-Path #{escape(path)}\", :read_only => true).stdout)\n end", "def exist?\n request(:get)\n true\n rescue Stretcher::RequestError::NotFound\n false\n end", "def created?(vm_name, provider='virtualbox')\n\tFile.exist?(\".vagrant/machines/#{vm_name}/#{provider}/id\")\nend", "def get_volume(groupdir)\n volid = ask(\"Insert the volume and enter its number or zero to use a new volume: \") { |x| x.echo = true }\n # If volid=0, figure out the next label based on ~/.hdb\n if volid == '0'\n # TODO: This should be fancier, perhaps by loading FSG first and doing a search in memory.\n HDB.verbose and puts \"Searching #{groupdir} for next available volume id.\"\n i = 1\n while File.exist? File.join(groupdir, i.to_s)\n i += 1\n end\n volid = i.to_s\n HDB.verbose and puts \"Found #{volid} free - please label this medium appropriately!\"\n else\n if File.file? File.join(groupdir, volid)\n response = ask(\"Are you sure you have inserted volume #{volid} and want to overwrite it? \") { |x| x.echo = true }\n # NOTE: casecmp returns 0 if they are equal\n if \"yes\".casecmp(response) == 0 or \"y\".casecmp(response) == 0\n puts \"WARNING: Overwriting existing volume\"\n else\n raise \"Aborted by user decision\"\n end\n end\n end\n return volid\n end", "def aws_obj_exists?(opts)\n opts[:obj].exists?\n end", "def setup_volumes\n # managing planned volumes is currently only needed in Windows and only if\n # this is not a reboot scenario.\n if !RightScale::Platform.windows? || RightScale::InstanceState.reboot?\n boot\n else\n RightScale::AuditProxy.create(@agent_identity, 'Planned volume management') do |audit|\n @audit = audit\n manage_planned_volumes do\n @audit = nil\n boot\n end\n end\n end\n true\n end", "def file_exists?(bucket_name:, key:)\n !!get_bucket!(bucket_name).file(key)\n end" ]
[ "0.7644813", "0.7362493", "0.6975857", "0.65798783", "0.6441754", "0.63350564", "0.63269997", "0.61548686", "0.61523074", "0.61519414", "0.61339414", "0.6129923", "0.6063452", "0.6062445", "0.6060861", "0.60347795", "0.6015773", "0.6012775", "0.59822375", "0.5974805", "0.59537196", "0.5950403", "0.5947713", "0.5940361", "0.5938167", "0.5930019", "0.5925526", "0.59226453", "0.5900795", "0.5899896", "0.5892485", "0.58871365", "0.5877861", "0.5856583", "0.5851809", "0.5849584", "0.584947", "0.5839858", "0.5829641", "0.5827021", "0.58230656", "0.58230656", "0.58230656", "0.5810884", "0.5805691", "0.5793338", "0.5768385", "0.5768385", "0.5745945", "0.5741764", "0.5741752", "0.5737951", "0.57304674", "0.5710779", "0.57085353", "0.5704946", "0.5701797", "0.56690174", "0.5666658", "0.5666609", "0.5663244", "0.5661365", "0.56570905", "0.56532025", "0.56447875", "0.563542", "0.5628176", "0.5626675", "0.5620473", "0.5619674", "0.5618023", "0.55965173", "0.5596473", "0.5589046", "0.5587428", "0.5585511", "0.5561242", "0.55598634", "0.55507016", "0.554881", "0.55474615", "0.5542502", "0.5535157", "0.553011", "0.5529516", "0.5528215", "0.55229765", "0.55205023", "0.5512991", "0.5498774", "0.5497072", "0.54876065", "0.54790294", "0.5478971", "0.5474212", "0.54740477", "0.5472974", "0.5465891", "0.5461453", "0.5460783" ]
0.55434453
81
Attach an EBS volume to an EC2 instance
def attach_disk(instance_id, disk_id) with_thread_name("attach_disk(#{instance_id}, #{disk_id})") do instance = @ec2_client.instances[instance_id] volume = @ec2_client.volumes[disk_id] device_name = attach_ebs_volume(instance, volume) update_agent_settings(instance) do |settings| settings["disks"] ||= {} settings["disks"]["persistent"] ||= {} settings["disks"]["persistent"][disk_id] = device_name end logger.info("Attached `#{disk_id}' to `#{instance_id}'") end # log registry settings for debugging logger.debug("updated registry settings: #{registry.read_settings(instance_id)}") end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def attachvol(compute,volume,instance,device)\n\t\tprint \"Running attachvol\\n\" if $debug\n\t\traise ArgumentError \"ebsvol[aws]->attachvol: Sorry, you must specify a valid device matching /dev/sd[a-m].\" if (device !~ /^\\/dev\\/sd[a-m]/)\n\t\tif (volume['status'] != \"in-use\" )\n\t\t\t# check instance is in the same availability zone\n\t\t\tif ( volume['availabilityZone'] != instance['placement']['availabilityZone'])\n\t\t\t\traise \"ebsvol[aws]->attachvol: Sorry, volumes must be in the same availability zone as the instance to be attached to.\\nThe volume #{volume['tagSet']['Name']} is in availability zone #{volume['availabilityZone']} and the instance is in #{instance['placement']['availabilityZone']}\" \n\t\t\telse\n\t\t\t\t# check that the device is available\n\t\t\t\tinuse = false\n\t\t\t\tinstance['blockDeviceMapping'].each { |x| inuse=true if x['deviceName'] == device }\n\t\t\t\tif ( inuse )\n\t\t\t\t\traise \"ebsvol[aws]->attachvol: Sorry, the device #{device} is already in use on #{instance['tagSet']['Name']}\" \n\t\t\t\telse\n\t\t\t\t\tresp = compute.attach_volume(instance['instanceId'],volume['volumeId'],device)\n\t\t\t\t\tif (resp.status == 200)\n\t\t\t\t\t\t# now wait for it to attach!\n\t\t\t\t\t\tcheck = volinfo(compute,volume['tagSet']['Name'])\n\t\t\t\t\t\twhile ( check['status'] !~ /(attached|in-use)/ ) do\n\t\t\t\t\t\t\tprint \"ebsvol[aws]->attachvol: status is #{check['status']}\\n\" if $debug\n\t\t\t\t\t\t\tsleep 5\n\t\t\t\t\t\t\tcheck = volinfo(compute,volume['tagSet']['Name'])\n\t\t\t\t\t\tend\n\t\t\t\t\t\tsleep 5 # allow aws to propigate the fact\n\t\t\t\t\t\tprint \"ebsvol[aws]->attachvol: volume is now attached\\n\" if $debug\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\telse\n\t\t\traise \"ebsvol[aws]->attachvol: Sorry, I could not attach #{volume['volumeId']} because it is in use!\"\n\t\tend\n\tend", "def attach(volume, device = '/dev/sdh')\n @ec2.attach_volume volume.id, id, device\n end", "def attach_volume volume, device\n if running?\n post '/attach_volume', :query => {\n :server => {\n :ec2_ebs_volume_href => volume.uri,\n :device => device\n }\n }\n else\n volume.attach_to_server self, device, 'boot'\n end\n end", "def attach(instance)\n # Attach to the instance\n $ec2.attach_volume(self.id, instance.id, '/dev/sdh')\n\n # Wait for it to be attached\n while true\n done = false\n $ec2.describe_volumes([self.id]).each do |result|\n if result[:aws_attachment_status] == 'attached'\n done = true\n end\n end\n if done\n break\n end\n sleep(5)\n end\n\n # Update the database\n self.attached_instance = instance.id\n self.save()\n end", "def attach(instance_id, volume)# rubocop:disable Metrics/AbcSize\n inst_details = AttrFinder.new(@instanceparameters)\n @options[:inst] = volume\n inst_details.options = @options\n inst_details.validate = @validate\n inst_details.function = 'server'\n opts = {}\n BmcAuthenticate.new(@options)\n request = OracleBMC::Core::Models::AttachVolumeDetails.new\n request.instance_id = instance_id\n request.type = 'iscsi'\n request.volume_id = inst_details.volume\n api = OracleBMC::Core::ComputeClient.new\n response = api.attach_volume(request, opts)\n end", "def create\n\t\tregion = resource[:availability_zone].to_s.gsub(/.$/,'') \n\t\tcompute = Fog::Compute.new(:provider => 'aws', :region => \"#{region}\")\n\t\tprint \"ebsvol[aws]->create: Region is #{region}\\n\" if $debug\n\t\tprint \"ebsvol[aws]->create: Availability_zone is #{resource[:availability_zone]}\\n\" if $debug\n\t\t# create the requested volume\n\t\tresponse = compute.create_volume(resource[:availability_zone],resource[:size],resource[:snapshot])\t\n\t\tif (response.status == 200)\n\t\t\tvolumeid = response.body['volumeId']\n\t\t\tprint \"ebsvol[aws]->create: I created volume #{volumeid}.\\n\" if $debug\n\t\t\t# now tag the volume with volumename so we can identify it by name\n\t\t\t# and not the volumeid\n\t\t\tresponse = compute.create_tags(volumeid,{ :Name => resource[:volume_name] })\n\t\t\tif (response.status == 200)\n\t\t\t\tprint \"ebsvol[aws]->create: I tagged #{volumeid} with Name = #{resource[:volume_name]}\\n\" if $debug\n\t\t\tend\n\t\t\t# Check if I need to attach it to an ec2 instance.\n\t\t\tattachto = resource[:attached_to].to_s\n\t\t\tprint \"attachto is #{attachto}\\n\" if $debug\n\t\t\tif ( attachto != '' )\n\t\t\t\tif ( attachto == 'me')\n\t\t\t\t\tinstance = instanceinfo(compute,myname(compute))\n\t\t\t\telse\n\t\t\t\t\tinstance = instanceinfo(compute,attachto)\n\t\t\t\tend\n\t\t\t\tif ( resource[:device] != nil )\n\t\t\t\t\t# try to attach the volume to requested instance\n\t\t\t\t\tprint \"attach the volume\\n\" if $debug\n\t\t\t\t\tvolume = volinfo(compute,resource[:volume_name])\n\t\t\t\t\tattachvol(compute,volume,instance,resource[:device])\n\t\t\t\telse\n\t\t\t\t\traise \"ebsvol[aws]->create: Sorry, I can't attach a volume with out a device to attach to!\"\n\t\t\t\tend\n\t\t\tend\n\t\telse\n\t\t\traise \"ebsvol[aws]->create: I couldn't create the ebs volume, sorry!\"\n\t\tend\n\tend", "def attach_blank_volume opts\n device = opts.delete :device\n opts = {:ec2_availability_zone => ec2_availability_zone }.merge opts\n volume = account.create_ec2_ebs_volume opts\n attach_volume volume, device\n end", "def attach!(volume_or_id, device)\n @@ec2.attach_volume(volume_or_id, id, device)\n @volumes = nil\n end", "def attach_volume(volume_id, instance_id, device = '/dev/sdh')\n action = 'AttachVolume'\n params = {\n 'Action' => action,\n 'VolumeId' => volume_id,\n 'InstanceId' => instance_id,\n 'Device' => device\n }\n\n response = send_query_request(params)\n response.is_a?(Net::HTTPSuccess)\n end", "def create_ec2_ebs_volume opts\n Ec2EbsVolume.create opts.merge :account => self\n end", "def attach_volume(volume_id, instance_id, device, timeout)\n Chef::Log.debug(\"Attaching #{volume_id} as #{device}\")\n ec2.attach_volume(volume_id, instance_id, device)\n\n # block until attached\n begin\n Timeout::timeout(timeout) do\n while true\n vol = volume_by_id(volume_id)\n if vol && vol[:aws_status] != \"deleting\"\n if vol[:aws_attachment_status] == \"attached\"\n if vol[:aws_instance_id] == instance_id\n Chef::Log.info(\"Volume #{volume_id} is attached to #{instance_id}\")\n break\n else\n raise \"Volume is attached to instance #{vol[:aws_instance_id]} instead of #{instance_id}\"\n end\n else\n Chef::Log.debug(\"Volume is #{vol[:aws_status]}\")\n end\n sleep 3\n else\n raise \"Volume #{volume_id} no longer exists\"\n end\n end\n end\n rescue Timeout::Error\n raise \"Timed out waiting for volume attachment after #{timeout} seconds\"\n end\n end", "def attach_volume( options = {} )\n options = { :volume_id => '' }.merge(options)\n options = { :instance_id => '' }.merge(options)\n options = { :device => '' }.merge(options)\n raise ArgumentError, \"No :volume_id provided\" if options[:volume_id].nil? || options[:volume_id].empty?\n raise ArgumentError, \"No :instance_id provided\" if options[:instance_id].nil? || options[:instance_id].empty?\n raise ArgumentError, \"No :device provided\" if options[:device].nil? || options[:device].empty?\n\n params = {\n \"VolumeId\" => options[:volume_id],\n \"InstanceId\" => options[:instance_id],\n \"Device\" => options[:device]\n }\n return response_generator(:action => \"AttachVolume\", :params => params)\n end", "def attach_storage_volume(volume_id, instance_id, device=nil)\n must_support! :storage_volumes\n result = connection.post(api_uri(\"/storage_volumes/#{volume_id}/attach\")) do |r|\n r.params = { :instance_id => instance_id, :device => device }\n end\n if result.status.is_ok?\n from_resource(:storage_volume, result)\n end\n end", "def attach_volume(id, volume) \n data = { 'volumeAttachment' => { 'volumeId' => volume, 'device' => \"/dev/vdb\" } }\n return post_request(address(\"/servers/\" + id + \"/os-volume_attachments\"), data, @token)\n end", "def provision_and_mount_volume(server, disk_size, device)\n unless provider.find_server_device(server, device)\n say \"Provisioning #{disk_size}Gb persistent disk for inception VM...\"\n provider.create_and_attach_volume(\"Inception Disk\", disk_size, server, device)\n end\n\n # Format and mount the volume\n if aws?\n say \"Skipping volume mounting on AWS 12.10 inception VM until its fixed\", [:yellow, :bold]\n run_ssh_command_until_successful server, \"sudo mkdir -p /var/vcap/store\"\n else\n say \"Mounting persistent disk as volume on inception VM...\"\n run_ssh_command_until_successful server, \"sudo mkfs.ext4 #{device} -F\"\n run_ssh_command_until_successful server, \"sudo mkdir -p /var/vcap/store\"\n run_ssh_command_until_successful server, \"sudo mount #{device} /var/vcap/store\"\n end\n end", "def create_volume(size_or_snapshot_id, options = {})\n options = {:device => '/dev/sdh'}.merge(options)\n if size_or_snapshot_id.is_a?(Numeric)\n volume = @ec2.create_volume availability_zone, :size => size_or_snapshot_id\n else\n volume = @ec2.create_volume availability_zone, :snapshot_id => size_or_snapshot_id\n end\n if options[:tags]\n @ec2.create_tags(volume.id, options[:tags])\n end\n while volume.status != 'available'\n volume.reload\n end\n if options[:device]\n attach volume, options[:device]\n end\n volume\n end", "def addVolume(dev, size, type: \"gp2\", delete_on_termination: false)\n\n if setDeleteOntermination(dev, delete_on_termination)\n MU.log \"A volume #{dev} already attached to #{self}, skipping\", MU::NOTICE\n return\n end\n\n MU.log \"Creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n creation = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).create_volume(\n availability_zone: cloud_desc.placement.availability_zone,\n size: size,\n volume_type: type\n )\n\n MU.retrier(wait: 3, loop_if: Proc.new {\n creation = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).describe_volumes(volume_ids: [creation.volume_id]).volumes.first\n if ![\"creating\", \"available\"].include?(creation.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n creation.state != \"available\"\n })\n\n\n if @deploy\n MU::Cloud::AWS.createStandardTags(\n creation.volume_id,\n region: @region,\n credentials: @credentials,\n optional: @config['optional_tags'],\n nametag: @mu_name+\"-\"+dev.upcase,\n othertags: @config['tags']\n )\n end\n\n MU.log \"Attaching #{creation.volume_id} as #{dev} to #{@cloud_id} in #{@region} (credentials #{@credentials})\"\n attachment = nil\n MU.retrier([Aws::EC2::Errors::IncorrectState], wait: 15, max: 4) {\n attachment = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).attach_volume(\n device: dev,\n instance_id: @cloud_id,\n volume_id: creation.volume_id\n )\n }\n\n begin\n att_resp = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).describe_volumes(volume_ids: [attachment.volume_id])\n if att_resp and att_resp.volumes and !att_resp.volumes.empty? and\n att_resp.volumes.first.attachments and\n !att_resp.volumes.first.attachments.empty?\n attachment = att_resp.volumes.first.attachments.first\n if !attachment.nil? and ![\"attaching\", \"attached\"].include?(attachment.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n end\n end while attachment.nil? or attachment.state != \"attached\"\n\n # Set delete_on_termination, which for some reason is an instance\n # attribute and not on the attachment\n setDeleteOntermination(dev, delete_on_termination)\n end", "def attach(new_server_id, device)\n requires :id\n unless in_use?\n data = service.compute.attach_volume(new_server_id, id, device)\n merge_attributes(:attachments => attachments << data.body['volumeAttachment'])\n true\n else\n false\n end\n end", "def attach_volumes!(server, volumes_count, size)\n #create a new block storage connection obj\n volume_service = Fog::Volume::OpenStack.new(\n :openstack_api_key => @os_password,\n :openstack_username => @os_username,\n :openstack_auth_url => @os_auth_url,\n :openstack_tenant => @os_tenant,\n )\n base = 'sdd'\n volumes_count.times do |i|\n base = base.next!\n #create a new volume\n vol = volume_service.volumes.create(\n :size => size,\n :display_name => \"#{server.name}-#{i}\",\n :description => \"Volume attached to #{server.name} - managed by ankus\"\n )\n vol.reload\n vol.wait_for { status == 'available' }\n server.attach_volume(vol.id, \"/dev/#{base}\")\n vol.wait_for { status == 'in-use' }\n end\n end", "def attach_disk(instance, zone, disk_name, device_name=nil, disk_mode='READ_WRITE', disk_type='PERSISTENT')\n data = false\n conn = @ec2_main.environment.connection\n if conn != nil\n response = conn.attach_disk(instance, zone, disk_name, device_name, disk_mode, disk_type)\n if response.status == 200\n data = response.body\n else\n data = {}\n end\n else\n raise \"Connection Error\"\n end\n return data\n end", "def attach_disk(opts, disk_name, instance_id)\n Djinn.log_debug('Calling attach_disk with parameters ' \\\n \"#{opts.inspect}, with disk name #{disk_name} and instance id \" +\n instance_id.to_s)\n\n # Make a copy (the options are a simple hash so shallow copy does the\n # trick) to not modify the original.\n options = opts.clone\n options['instance_id'] = instance_id\n options['disk_name'] = disk_name\n\n uri = URI(\"http://#{@ip}:#{SERVER_PORT}/instance\")\n headers = {'Content-Type' => 'application/json',\n 'AppScale-Secret' => @secret}\n request = Net::HTTP::Post.new(uri.path, headers)\n\n request.body = JSON.dump(options)\n\n return JSON.parse(make_call(request, uri))['location']\n end", "def add_hdd_to_vbox_vm(client_name,vbox_disk_name)\n message = \"Attaching:\\tStorage to VM \"+client_name\n command = \"VBoxManage storageattach \\\"#{client_name}\\\" --storagectl \\\"#{$vbox_disk_type}\\\" --port 0 --device 0 --type hdd --medium \\\"#{vbox_disk_name}\\\"\"\n execute_command(message,command)\n return\nend", "def attach_volumes(node, disk_sizes)\n if $provider == :virtualbox\n node.vm.provider :virtualbox do |v, override|\n disk_num = 0\n disk_sizes.each do |disk_size|\n disk_num += 1\n diskname = File.join(File.dirname(File.expand_path(__FILE__)), \".virtualbox\", \"#{node.vm.hostname}-#{disk_num}.vdi\")\n unless File.exist?(diskname)\n v.customize ['createhd', '--filename', diskname, '--size', disk_size * 1024]\n end\n v.customize ['storageattach', :id, '--storagectl', 'SATA Controller', '--port', disk_num, '--device', 0, '--type', 'hdd', '--medium', diskname]\n end\n end\n end\n\n if $provider == :vmware_fusion\n node.vm.provider :vmware_fusion do |v, override|\n vdiskmanager = '/Applications/VMware\\ Fusion.app/Contents/Library/vmware-vdiskmanager'\n unless File.exist?(vdiskmanager)\n dir = File.join(File.dirname(File.expand_path(__FILE__)), \".vmware\")\n unless File.directory?( dir )\n Dir.mkdir dir\n end\n\n disk_num = 0\n disk_sizes.each do |disk_size|\n disk_num += 1\n diskname = File.join(dir, \"#{node.vm.hostname}-#{disk_num}.vmdk\")\n unless File.exist?(diskname)\n `#{vdiskmanager} -c -s #{disk_size}GB -a lsilogic -t 1 #{diskname}`\n end\n\n v.vmx[\"scsi0:#{disk_num}.filename\"] = diskname\n v.vmx[\"scsi0:#{disk_num}.present\"] = 'TRUE'\n v.vmx[\"scsi0:#{disk_num}.redo\"] = ''\n end\n end\n end\n end\n\n if $provider == :parallels\n node.vm.provider :parallels do |v, override|\n disk_sizes.each do |disk_size|\n v.customize ['set', :id, '--device-add', 'hdd', '--size', disk_size * 1024]\n end\n end\n end\n\nend", "def attach_disk(instance_id, disk_id)\n with_thread_name(\"attach_disk(#{instance_id}, #{disk_id})\") do\n instance = @ec2_resource.instance(instance_id)\n volume = @ec2_resource.volume(disk_id)\n\n device_name = attach_ebs_volume(instance, volume)\n\n update_agent_settings(instance) do |settings|\n settings[\"disks\"] ||= {}\n settings[\"disks\"][\"persistent\"] ||= {}\n settings[\"disks\"][\"persistent\"][disk_id] = device_name\n end\n logger.info(\"Attached `#{disk_id}' to `#{instance_id}'\")\n end\n\n # log registry settings for debugging\n logger.debug(\"updated registry settings: #{registry.read_settings(instance_id)}\")\n end", "def addVolume(dev, size, type: \"gp2\")\n if @cloud_id.nil? or @cloud_id.empty?\n MU.log \"#{self} didn't have a cloud id, couldn't determine 'active?' status\", MU::ERR\n return true\n end\n az = nil\n MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_instances(\n instance_ids: [@cloud_id]\n ).reservations.each { |resp|\n if !resp.nil? and !resp.instances.nil?\n resp.instances.each { |instance|\n az = instance.placement.availability_zone\n instance.block_device_mappings.each { |vol|\n if vol.device_name == dev\n MU.log \"A volume #{dev} already attached to #{self}, skipping\", MU::NOTICE\n return\n end\n }\n }\n end\n }\n MU.log \"Creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n creation = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).create_volume(\n availability_zone: az,\n size: size,\n volume_type: type\n )\n begin\n sleep 3\n creation = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_volumes(volume_ids: [creation.volume_id]).volumes.first\n if ![\"creating\", \"available\"].include?(creation.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n end while creation.state != \"available\"\n\n if @deploy\n MU::MommaCat.listStandardTags.each_pair { |key, value|\n MU::MommaCat.createTag(creation.volume_id, key, value, region: @config['region'], credentials: @config['credentials'])\n }\n MU::MommaCat.createTag(creation.volume_id, \"Name\", \"#{MU.deploy_id}-#{@config[\"name\"].upcase}-#{dev.upcase}\", region: @config['region'], credentials: @config['credentials'])\n end\n\n attachment = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).attach_volume(\n device: dev,\n instance_id: @cloud_id,\n volume_id: creation.volume_id\n )\n\n begin\n sleep 3\n attachment = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_volumes(volume_ids: [attachment.volume_id]).volumes.first.attachments.first\n if ![\"attaching\", \"attached\"].include?(attachment.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n end while attachment.state != \"attached\"\n end", "def attach_planned_volume(mapping)\n # preserve the initial list of disks/volumes before attachment for comparison later.\n vm = RightScale::Platform.volume_manager\n InstanceState.planned_volume_state.disks ||= vm.disks\n InstanceState.planned_volume_state.volumes ||= vm.volumes\n\n # attach.\n payload = {:agent_identity => @agent_identity, :volume_id => mapping[:volume_id], :device_name => mapping[:device_name]}\n Log.info(\"Attaching volume #{mapping[:volume_id]}.\")\n req = RetryableRequest.new(\"/storage_valet/attach_volume\", payload, :retry_delay => VolumeManagement::VOLUME_RETRY_SECONDS)\n \n req.callback do |res|\n # don't set :volume_status here as that should only be queried\n mapping[:management_status] = 'attached'\n mapping[:attempts] = nil\n yield if block_given?\n end\n\n req.errback do |res|\n # volume could already be attaching or have been deleted\n # which we can't see because of latency; go around again\n # and check state of volume later.\n Log.error(\"Failed to attach volume #{mapping[:volume_id]} (#{res})\")\n mapping[:attempts] ||= 0\n mapping[:attempts] += 1\n # retry indefinitely so long as core api instructs us to retry or else fail after max attempts.\n if mapping[:attempts] >= VolumeManagement::MAX_VOLUME_ATTEMPTS\n strand(\"Exceeded maximum of #{VolumeManagement::MAX_VOLUME_ATTEMPTS} attempts attaching volume #{mapping[:volume_id]} with error: #{res}\")\n else\n yield if block_given?\n end\n end\n\n req.run\n end", "def attach_volume(server, volume)\n @logger.info(\"Attaching volume `#{volume.id}' to server `#{server.id}'...\")\n volume_attachments = @openstack.with_openstack { server.volume_attachments }\n device = volume_attachments.find { |a| a['volumeId'] == volume.id }\n\n if device.nil?\n device_name = select_device_name(volume_attachments, first_device_name_letter(server))\n cloud_error('Server has too many disks attached') if device_name.nil?\n\n @logger.info(\"Attaching volume `#{volume.id}' to server `#{server.id}', device name is `#{device_name}'\")\n @openstack.with_openstack { server.attach_volume(volume.id, device_name) }\n @openstack.wait_resource(volume, :'in-use')\n else\n device_name = device['device']\n @logger.info(\"Volume `#{volume.id}' is already attached to server `#{server.id}' in `#{device_name}'. Skipping.\")\n end\n\n device_name\n end", "def attach_node_volume (volume_label)\n # XXX should check whether this device name is already allocated,\n # and if so throw an exception\n # Helper method, attach an arbitrary volume using an arbitrary label that must be preconfigured in nodes\n Chef::Log.info(\"In attach_node_volume with volume_label #{volume_label}\")\n mount_device = node.application_attributes[volume_label].mount_device\n volume_id = node.application_attributes[volume_label].volume_id\n\n if mount_device.nil?\n Chef::Log.fatal(\"No mount device for volume label #{volume_label}.\tMust supply a volume label configured in nodes\")\n raise\n end\n\n attach_volume(volume_label, volume_id, mount_device)\n end", "def create_volume(availability_zone, options = {})\n raise ArgumentError.new('You must specify a size if not creating a volume from a snapshot') if options[:snapshot_id].blank? && options[:size].blank?\n\n action = 'CreateVolume'\n params = {\n 'Action' => action,\n 'AvailabilityZone' => availability_zone\n }\n params['Size'] = options[:size] unless options[:size].blank?\n params['SnapshotId'] = options[:snapshot_id] unless options[:snapshot_id].blank?\n\n response = send_query_request(params)\n parser = Awsum::Ec2::VolumeParser.new(self)\n volume = parser.parse(response.body)[0]\n if options[:tags] && options[:tags].size > 0\n create_tags volume.id, options[:tags]\n end\n volume\n end", "def attach_disk(config, prefix, disk_num, size)\n filename = \"#{prefix}#{disk_num}.vdi\"\n config.vm.provider \"virtualbox\" do |vb|\n if !File.exist?(filename) \n vb.customize ['createhd', '--filename', filename, '--size', (size * 1024).floor, '--variant', 'fixed']\n vb.customize ['modifyhd', filename, '--type', 'shareable']\n end\n\n vb.customize ['storageattach', :id, '--storagectl', 'SATAController', '--port', disk_num + 2, '--device', 0, '--type', 'hdd', '--medium', filename]\n end\nend", "def attach_disk_to_vm(vm)\n begin\n start_time = Time.now.to_i\n OOLog.info(\"Attaching Storage disk ....\")\n vm_promise = @compute_client.virtual_machines.create_or_update(@rg_name, @instance_name, vm)\n my_vm = vm_promise.value!\n end_time = Time.now.to_i\n duration = end_time - start_time\n OOLog.info(\"Storage Disk attached #{duration} seconds\")\n OOLog.info(\"VM: #{my_vm.body.name} UPDATED!!!\")\n return true\n rescue MsRestAzure::AzureOperationError =>e\n OOLog.debug( e.body.inspect)\n if e.body.to_s =~ /InvalidParameter/ && e.body.to_s =~ /already exists/\n OOLog.debug(\"The disk is already attached\")\n else\n OOLog.fatal(e.body)\n end\n rescue MsRestAzure::CloudErrorData =>e\n OOLog.fatal(e.body.message)\n rescue Exception => ex\n OOLog.fatal(ex.message)\n end\n end", "def attach_disk(instance_id, disk_id)\n @logger.info(\"attach_disk(#{instance_id}, #{disk_id})\")\n disk_name = disk_id.disk_name()\n if instance_id.use_managed_disks?()\n disk_params = {\n :disk_name => disk_name,\n :caching => disk_id.caching(),\n :disk_bosh_id => disk_id.to_s,\n :disk_id => @azure_client2.get_managed_disk_by_name(disk_id.resource_group_name(), disk_name)[:id],\n :managed => true\n }\n else\n disk_params = {\n :disk_name => disk_name,\n :caching => disk_id.caching(),\n :disk_bosh_id => disk_id.to_s,\n :disk_uri => @disk_manager.get_data_disk_uri(disk_id),\n :disk_size => @disk_manager.get_disk_size_in_gb(disk_id),\n :managed => false\n }\n end\n lun = @azure_client2.attach_disk_to_virtual_machine(\n instance_id.resource_group_name(),\n instance_id.vm_name(),\n disk_params\n )\n \"#{lun}\"\n end", "def create_ebs_stripe(nickname, new_volume_size_in_gb, options = {})\n self.execute_terminate_volumes if options[:force]\n devices = @disk.generate_physical_device_names(options[:stripe])\n each_volume_size = (new_volume_size_in_gb / options[:stripe].to_f).ceil\n devices.each do |d| \n vol = self.create_volume(:nickname => \"#{nickname}-#{d}\", \n :description => \"Created by RS tools to initialize new EBS stripe volume\",\n :size => each_volume_size)\n raise vol unless vol['aws_id']\n \"Attaching new EBS volume: #{vol['aws_id']}\"\n att = self.attach_volume(vol['aws_id'], d)\n end\n devices.each {|d| self.wait_for_attachment(d) }\n\n @disk.initialize_stripe(devices)\n end", "def attach_disk(instance_id, disk_id)\n @logger.info(\"attach_disk(#{instance_id}, #{disk_id})\")\n disk_name = disk_id.disk_name\n disk_params = if instance_id.use_managed_disks?\n {\n disk_name: disk_name,\n caching: disk_id.caching,\n disk_bosh_id: disk_id.to_s,\n disk_id: @azure_client.get_managed_disk_by_name(disk_id.resource_group_name, disk_name)[:id],\n managed: true\n }\n else\n {\n disk_name: disk_name,\n caching: disk_id.caching,\n disk_bosh_id: disk_id.to_s,\n disk_uri: @disk_manager.get_data_disk_uri(disk_id),\n disk_size: @disk_manager.get_disk_size_in_gb(disk_id),\n managed: false\n }\n end\n lun = @azure_client.attach_disk_to_virtual_machine(\n instance_id.resource_group_name,\n instance_id.vm_name,\n disk_params\n )\n lun.to_s\n end", "def attach_disk(vm_id, disk_id)\n with_thread_name(\"attach_disk(#{vm_id}, #{disk_id})\") do\n begin\n # Don't go further if the volume group is already attached\n return if volume_group_attached?(vm_id, disk_id)\n @logger.debug(\"Attaching volume group #{disk_id} to VM #{vm_id}\")\n @vol_group_manager.attach_to_vm(disk_id, vm_id)\n update_agent_settings(vm_id) do |settings|\n settings['disks']['persistent'] ||= {}\n disks = settings['disks']\n drive = \"/dev/sd#{get_drive_letter(disks)}\"\n settings['disks']['persistent'][disk_id] = drive\n end\n rescue => e\n @logger.error(e)\n cloud_error(e.message)\n end\n end\n end", "def add_to(vm, io_bus, port, device)\n media_arg = case media\n when :disk\n 'hdd'\n when :dvd\n 'dvddrive'\n end\n VirtualBox.run_command! ['VBoxManage', '--nologo', 'storageattach',\n vm.uid, '--storagectl', io_bus.name, '--port', port.to_s,\n '--device', device.to_s, '--type', media_arg, '--medium', file]\n self\n end", "def attach_disk(instance_id, disk_id)\n with_thread_name(\"attach_disk(#{instance_id}, #{disk_id})\") do\n _ = @cloud_core.attach_disk(instance_id, disk_id) do |instance, device_name|\n update_agent_settings(instance_id) do |settings|\n settings['disks'] ||= {}\n settings['disks']['persistent'] ||= {}\n settings['disks']['persistent'][disk_id] = BlockDeviceManager.device_path(device_name, instance.instance_type, disk_id)\n end\n end\n end\n end", "def add_cdrom_to_vbox_vm(client_name)\n message = \"Attaching:\\tCDROM to VM \"+client_name\n command = \"VBoxManage storagectl \\\"#{client_name}\\\" --name \\\"cdrom\\\" --add \\\"sata\\\" --controller \\\"IntelAHCI\\\"\"\n execute_command(message,command)\n if File.exist?($vbox_additions_iso)\n message = \"Attaching:\\tISO \"+$vbox_additions_iso+\" to VM \"+client_name\n command = \"VBoxManage storageattach \\\"#{client_name}\\\" --storagectl \\\"cdrom\\\" --port 0 --device 0 --type dvddrive --medium \\\"#{$vbox_additions_iso}\\\"\"\n execute_command(message,command)\n end\n return\nend", "def add_volume(bucket,mount,options=nil)\n s3fs_volumes << { :bucket => bucket, :mount => mount, :options => options }\n end", "def add_disk(server, size)\n host = server.to_s\n\n # Increment disk id\n if !DISKS.key?(host) then\n DISKS[host] = 0\n else\n DISKS[host] += 1\n end\n disk_id = DISKS[host]\n disk_filename = \".vagrant/disks/\" + host + \"_\" + disk_id.to_s + \".vdi\"\n\n server.vm.provider \"virtualbox\" do |v|\n # Create disk if it not exist\n unless File.exist?(disk)\n v.customize [\"createhd\", \"--filename\", disk_filename, \"--size\", size * 1024 * 1024]\n end\n v.customize ['storageattach', :id, '--storagectl', 'SATA Controller', '--port', disk_id, '--device', 0, '--type', 'hdd', '--medium', disk]\n end\nend", "def attach options = {}\n opts = {\n :readonly => false,\n :nomount => false\n }.merge!(options)\n args = [@image_path, '-mount']\n args << (opts[:nomount] ? 'suppressed' : 'required')\n args << '-readonly' if opts[:readonly]\n args << '-mountpoint' << opts[:mountpoint] if opts.has_key?(:mountpoint)\n hdiutil 'attach', *args\n end", "def ebs(device_name, type: 'gp2', size: 8)\n {\n device_name: device_name,\n ebs: {\n delete_on_termination: true,\n volume_size: size,\n volume_type: type\n },\n no_device: ''\n }\nend", "def provision_storage host, vm\n if host['volumes']\n # Lazily create the volume client if needed\n volume_client_create\n host['volumes'].keys.each_with_index do |volume, index|\n @logger.debug \"Creating volume #{volume} for OpenStack host #{host.name}\"\n\n # The node defintion file defines volume sizes in MB (due to precedent\n # with the vagrant virtualbox implementation) however OpenStack requires\n # this translating into GB\n openstack_size = host['volumes'][volume]['size'].to_i / 1000\n\n # Create the volume and wait for it to become available\n vol = @volume_client.volumes.create(\n :size => openstack_size,\n :display_name => volume,\n :description => \"Beaker volume: host=#{host.name} volume=#{volume}\",\n )\n vol.wait_for { ready? }\n\n # Fog needs a device name to attach as, so invent one. The guest\n # doesn't pay any attention to this\n device = \"/dev/vd#{('b'.ord + index).chr}\"\n vm.attach_volume(vol.id, device)\n end\n end\n end", "def attach_disk(disk)\n # Adding a new disk in newer vSphere versions\n # automatically cleans all system snapshots\n # https://github.com/OpenNebula/one/issues/5409\n if snapshots? || one_snapshots?\n error_msg = 'Existing sytem snapshots, cannot change disks. '\n error_msg << 'Please remove all snapshots and try again'\n raise error_msg\n end\n\n spec_hash = {}\n device_change = []\n\n # Extract unmanaged_keys\n unmanaged_keys = disk_keys\n vc_disks = vcenter_disks_get\n\n # Check if we're dealing with a StoragePod SYSTEM ds\n storpod = disk['VCENTER_DS_REF'].start_with?('group-')\n\n # Check if disk being attached is already connected to the VM\n raise 'DISK is already connected to VM' if disk_attached_to_vm(\n disk, unmanaged_keys, vc_disks\n )\n\n # Generate vCenter spec and reconfigure VM\n add_spec = calculate_add_disk_spec(disk)\n device_change << add_spec\n raise 'Could not generate DISK spec' if device_change.empty?\n\n extra_key = \"opennebula.mdisk.#{disk['DISK_ID']}\"\n extra_value = add_spec[:device].key.to_s\n\n spec_hash[:deviceChange] = device_change\n spec_hash[:extraConfig] =\n [{ :key => extra_key, :value => extra_value }]\n spec = RbVmomi::VIM.VirtualMachineConfigSpec(spec_hash)\n\n begin\n if storpod\n # Ask for StorageDRS recommendation\n # to reconfigure VM (AddDisk)\n sm = storagemanager\n\n # Disk id is -1 as I don't know\n # what disk id is going to be set\n disk_locator = [RbVmomi::VIM.PodDiskLocator(:diskId => -1)]\n\n # Disk locator is required for AddDisk\n vmpod_hash = {}\n vmpod_hash[:storagePod] = get_ds\n vmpod_hash[:disk] = disk_locator\n vmpod_config =\n RbVmomi::VIM::VmPodConfigForPlacement(\n vmpod_hash\n )\n\n # The storage pod selection requires initialize\n spod_hash = {}\n spod_hash[:initialVmConfig] = [vmpod_config]\n spod_select =\n RbVmomi::VIM::StorageDrsPodSelectionSpec(\n spod_hash\n )\n storage_spec = RbVmomi::VIM.StoragePlacementSpec(\n :type => :reconfigure,\n :podSelectionSpec => spod_select,\n :vm => self['_ref'],\n :configSpec => spec\n )\n\n # Query a storage placement recommendation\n result = sm\n .RecommendDatastores(\n :storageSpec => storage_spec\n ) rescue nil\n\n if result.nil?\n raise 'Could not get placement '\\\n 'specification for StoragePod'\n end\n\n if !result.respond_to?(:recommendations) ||\n result.recommendations.empty?\n raise 'Could not get placement '\\\n 'specification for StoragePod'\n end\n\n # Get recommendation key to be applied\n key = result.recommendations.first.key ||= ''\n\n if key.empty?\n raise 'Missing Datastore recommendation for StoragePod'\n end\n\n # Apply recommendation\n sm.ApplyStorageDrsRecommendation_Task(\n :key => [key]\n ).wait_for_completion\n\n # Add the key for the volatile disk to the\n # unmanaged opennebula.disk.id variables\n unit_number =\n spec_hash[:deviceChange][0][:device]\n .unitNumber\n controller_key =\n spec_hash[:deviceChange][0][:device]\n .controllerKey\n key =\n get_vcenter_disk_key(\n unit_number,\n controller_key\n )\n spec_hash = {}\n reference = {}\n reference[:key] =\n \"opennebula.disk.#{disk['DISK_ID']}\"\n reference[:value] = key.to_s\n spec_hash[:extraConfig] = [reference]\n @item\n .ReconfigVM_Task(\n :spec => spec_hash\n ).wait_for_completion\n else\n @item\n .ReconfigVM_Task(\n :spec => spec\n ).wait_for_completion\n end\n # Modify extraConfig if disks has a bad key\n sync_extraconfig_disk(spec_hash)\n rescue StandardError => e\n error = \"Cannot attach DISK to VM: #{e.message}.\"\n\n if VCenterDriver::CONFIG[:debug_information]\n error += \"\\n\\n#{e.backtrace.join(\"\\n\")}\"\n end\n\n raise error\n end\n end", "def attach_file_to_parallels_vm(options)\n message = \"Information:\\tAttaching Image \"+options['file']+\" to \"+options['name']\n command = \"prlctl set \\\"#{options['name']}\\\" --device-set cdrom0 --image \\\"#{options['file']}\\\"\"\n execute_command(options,message,command)\n return\nend", "def restore_from_snap(last_snapshot, options = {})\n options[:device] = \"/dev/sdk\" unless options[:device]\n options[:vol_nickname] = last_snapshot[\"nickname\"] unless options[:vol_nickname]\n \n # 5 - Unmount and detach the current EBS volume (forcing to detach the device we're gonna need later for attching ours...)\n umount_and_detach_device({:device => options[:device]})\n # 6- Create the volume from the latest snapshot, attach it to the instance and then mount it\n STDERR.puts \"Creating new DB volume from snapshot #{last_snapshot['aws_id']}\"\n vol = ( options[:new_size_gb] ? create_volume_from_snap_size_gb(last_snapshot[\"aws_id\"],options[:vol_nickname],options[:new_size_gb] ) : create_volume_from_snap(last_snapshot[\"aws_id\"],options[:vol_nickname] ) )\n unless vol.nil?\n \tSTDERR.puts \"Attaching new DB volume: #{vol['aws_id']}\"\n \tatt = attach_volume(vol['aws_id'], options[:device])\n \twait_for_attachment(options[:device])\n \tFileUtils.mkdir_p self.MountPoint\n \tres = `mount -t xfs -o noatime #{options[:device]} #{self.MountPoint}`\n \traise EBSRemoteExecException.new(nil,$?,\"Error mounting newly created volume (#{vol['aws_id']}) on #{options[:device]}:\\n\"+res) if $? != 0 \n else\n\t raise \"create volume failed from snapshot\"\n end\n end", "def attach_volume_queue(userid, server_ems_ref, device = nil)\n task_opts = {\n :action => \"attaching Cloud Volume for user #{userid}\",\n :userid => userid\n }\n\n queue_opts = {\n :class_name => self.class.name,\n :method_name => 'attach_volume',\n :instance_id => id,\n :role => 'ems_operations',\n :queue_name => ext_management_system.queue_name_for_ems_operations,\n :zone => ext_management_system.my_zone,\n :args => [server_ems_ref, device]\n }\n\n MiqTask.generic_action_with_callback(task_opts, queue_opts)\n end", "def create_volume(snapshot_id, size, availability_zone, timeout, volume_type, piops)\n availability_zone ||= instance_availability_zone\n\n # Sanity checks so we don't shoot ourselves.\n raise \"Invalid volume type: #{volume_type}\" unless ['standard', 'gp2', 'io1'].include?(volume_type)\n\n # PIOPs requested. Must specify an iops param and probably won't be \"low\".\n if volume_type == 'io1'\n raise 'IOPS value not specified.' unless piops >= 100\n end\n\n # Shouldn't see non-zero piops param without appropriate type.\n if piops > 0\n raise 'IOPS param without piops volume type.' unless volume_type == 'io1'\n end\n\n create_volume_opts = { :volume_type => volume_type }\n # TODO: this may have to be casted to a string. rightaws vs aws doc discrepancy.\n create_volume_opts[:iops] = piops if volume_type == 'io1'\n\n nv = ec2.create_volume(snapshot_id, size, availability_zone, create_volume_opts)\n Chef::Log.debug(\"Created new volume #{nv[:aws_id]}#{snapshot_id ? \" based on #{snapshot_id}\" : \"\"}\")\n\n # block until created\n begin\n Timeout::timeout(timeout) do\n while true\n vol = volume_by_id(nv[:aws_id])\n if vol && vol[:aws_status] != \"deleting\"\n if [\"in-use\", \"available\"].include?(vol[:aws_status])\n Chef::Log.info(\"Volume #{nv[:aws_id]} is available\")\n break\n else\n Chef::Log.debug(\"Volume is #{vol[:aws_status]}\")\n end\n sleep 3\n else\n raise \"Volume #{nv[:aws_id]} no longer exists\"\n end\n end\n end\n rescue Timeout::Error\n raise \"Timed out waiting for volume creation after #{timeout} seconds\"\n end\n\n nv[:aws_id]\n end", "def attach(options = {}, &block)\n options = { stream: true, stdout: true }.merge(options)\n connection.post(\"/containers/#{id}/attach\", options, response_block: block)\n end", "def attach_disk(server_id, disk_id)\n with_thread_name(\"attach_disk(#{server_id}, #{disk_id})\") do\n server = @openstack.with_openstack { @openstack.compute.servers.get(server_id) }\n cloud_error(\"Server `#{server_id}' not found\") unless server\n\n volume = @openstack.with_openstack { @openstack.volume.volumes.get(disk_id) }\n cloud_error(\"Volume `#{disk_id}' not found\") unless volume\n\n device_name = attach_volume(server, volume)\n\n update_agent_settings(server) do |settings|\n settings['disks'] ||= {}\n settings['disks']['persistent'] ||= {}\n settings['disks']['persistent'][disk_id] = device_name\n end\n end\n end", "def attach_to_elb(instance, elb_name, subnet = nil)\n begin\n @log.info \"\"\n @log.info \"Adding to ELB: #{elb_name}\"\n elb = AWS::ELB.new\n AWS.memoize do\n unless subnet\n # Build list of availability zones for any existing instances\n zones = { }\n zones[instance.availability_zone] = instance.availability_zone\n elb.load_balancers[elb_name].instances.each do |elb_instance|\n zones[elb_instance.availability_zone] = elb_instance.availability_zone\n end\n \n # Build list of existing zones\n existing_zones = { }\n elb.load_balancers[elb_name].availability_zones.each do |zone|\n existing_zones[zone.name] = zone\n end\n \n # Enable zones\n zones.keys.each do |zone_name|\n elb.load_balancers[elb_name].availability_zones.enable(zones[zone_name])\n end\n \n # Disable zones\n existing_zones.keys.each do |zone_name|\n elb.load_balancers[elb_name].availability_zones.disable(existing_zones[zone_name]) unless zones.has_key?(zone_name)\n end\n end\n \n elb.load_balancers[elb_name].instances.register(instance)\n end\n rescue StandardError => bang\n @log.error \"Error adding to load balancers: \" + bang.to_s\n end\n end", "def addVolume(dev, size, type: \"pd-standard\", delete_on_termination: false)\n devname = dev.gsub(/.*?\\/([^\\/]+)$/, '\\1')\n resname = MU::Cloud::Google.nameStr(@mu_name+\"-\"+devname)\n MU.log \"Creating disk #{resname}\"\n\n description = @deploy ? @deploy.deploy_id : @mu_name+\"-\"+devname\n\n newdiskobj = MU::Cloud::Google.compute(:Disk).new(\n size_gb: size,\n description: description,\n zone: @config['availability_zone'],\n# type: \"projects/#{config['project']}/zones/#{config['availability_zone']}/diskTypes/pd-ssd\",\n type: \"projects/#{@project_id}/zones/#{@config['availability_zone']}/diskTypes/#{type}\",\n# Other values include pd-ssd and local-ssd\n name: resname\n )\n\n begin\n newdisk = MU::Cloud::Google.compute(credentials: @config['credentials']).insert_disk(\n @project_id,\n @config['availability_zone'],\n newdiskobj\n )\n rescue ::Google::Apis::ClientError => e\n if e.message.match(/^alreadyExists: /)\n MU.log \"Disk #{resname} already exists, ignoring request to create\", MU::WARN\n return\n else\n raise e\n end\n end\n\n attachobj = MU::Cloud::Google.compute(:AttachedDisk).new(\n device_name: devname,\n source: newdisk.self_link,\n type: \"PERSISTENT\",\n auto_delete: delete_on_termination\n )\n\n MU.log \"Attaching disk #{resname} to #{@cloud_id} at #{devname}\"\n MU::Cloud::Google.compute(credentials: @config['credentials']).attach_disk(\n @project_id,\n @config['availability_zone'],\n @cloud_id,\n attachobj\n )\n\n end", "def attached_to\n\t\tregion = resource[:availability_zone].to_s.gsub(/.$/,'') \n\t\tcompute = Fog::Compute.new(:provider => 'aws', :region => \"#{region}\")\n\t\tvolume = volinfo(compute,resource[:volume_name])\n\t\tprint \"attached_to: looking at volume #{resource[:volume_name]}\\n\" if $debug\n\t\tif ( volume['status'] == 'in-use' ) \n\t\t\t# Look for the name of the instance which this volume is attached to.\n\t\t\tif ( volume['attachmentSet'][0]['instanceId'] != nil )\n\t\t\t\tprint \"#{resource[:volume_name]} is attached to #{volume['attachmentSet'][0]['instanceId']}\\n\" if $debug\n\t\t\t\t# If the resource is specified as attached_to => \"me\" then we'd better check that it is attached\n\t\t\t\t# to this machine.\n\t\t\t\tif ( resource[:attached_to] == \"me\")\n\t\t\t\t\tprint \"Am I me?\\n\" if $debug\n\t\t\t\t\tprint \"I am #{myname(compute)}\\n\" if $debug\n\t\t\t\t\tif ( myname(compute) == lookupname(compute,volume['attachmentSet'][0]['instanceId']))\n\t\t\t\t\t\treturn \"me\"\n\t\t\t\t\telse\n\t\t\t\t\t\treturn lookupname(compute,volume['attachmentSet'][0]['instanceId'])\n\t\t\t\t\tend\n\t\t\t\telse\n\t\t\t\t\treturn lookupname(compute,volume['attachmentSet'][0]['instanceId'])\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\treturn ''\n\tend", "def currently_attached_volume(instance_id, device)\n ec2.describe_volumes.find{|v| v[:aws_instance_id] == instance_id && v[:aws_device] == device}\n end", "def create # rubocop:disable Metrics/AbcSize\n inst_details = AttrFinder.new(@instanceparameters)\n inst_details.options = @options\n inst_details.validate = @validate\n inst_details.function = 'server'\n BmcAuthenticate.new(@options)\n request = OracleBMC::Core::Models::LaunchInstanceDetails.new\n ssh_public_key = @instanceparameters['server']['ssh-key']\n request.availability_domain = inst_details.ad\n request.compartment_id = inst_details.compartment\n request.display_name = @instanceparameters['server']['display_name']\n request.image_id = inst_details.image\n request.shape = @instanceparameters['server']['shape']\n request.subnet_id = inst_details.subnet\n request.metadata = { 'ssh_authorized_keys' => ssh_public_key }\n api = OracleBMC::Core::ComputeClient.new\n response = api.launch_instance(request)\n @instance_id = response.data.id\n compartment(inst_details.compartment)\n running_instance = api.get_instance(@instance_id).wait_until(:lifecycle_state,\n OracleBMC::Core::Models::Instance::LIFECYCLE_STATE_RUNNING,\n max_interval_seconds: 5, max_wait_seconds: 300)\n if @instanceparameters['server']['attachments']\n @instanceparameters['server']['attachments'].each do |vol|\n attach(@instance_id, vol['volume'])\n end\n end\n running_instance\n end", "def attaches_ebs_volumes settings\n has_role settings, \"ebs_volumes_attach\"\nend", "def createvolume\n if not checkRequirements([\"thezone\",\"thevolume\"])\n return false\n end\n checkToken(@thezone)\n req = {}\n req[\"name\"] = \"oe-#{@thevolume.name}\"\n req[\"description\"] = @thevolume.description\n req[\"sizeGb\"] = @thevolume.size\n submit = queryGCE(:path => '/compute/v1beta15/projects/#{@thezone.name}/zones/#{@thevolume.azone.name}/disks', :method => 'post', :options => '', :data => req.to_json, :access_token => @thezone.toekn )\n d = checkQuery(:type => 'zone', :token => @thezone.token, :projectname => @thezone.name, :zonename => @thevolume.azone.name, :operationname => submit[\"name\"])\n data = queryGCE(:path => '/compute/v1beta15/projects/#{@thezone.name}/zones/#{@thevolume.azone.name}/disks/#{req[\"name\"]}', :method => 'get', :options => '', :access_token => @thezone.token) if d\n data ? data[\"name\"] : false\n end", "def create_volume(options)\n # Creating the volume is part of the server creation\n end", "def create_volume(options)\n # Creating the volume is part of the server creation\n end", "def attach_storage(server_uuid, storage_uuid:, type: \"disk\", address: nil)\n data = {\n \"storage_device\" => {\n \"type\" => type,\n \"storage\" => storage_uuid\n }\n }\n data[\"storage_device\"][\"address\"] = address unless address.nil?\n\n json = JSON.generate data\n\n response = post \"server/#{server_uuid}/storage/attach\", json\n\n response\n end", "def prepare_vm_disk_attachment(disk_spec, storage_spec)\n disk_spec = disk_spec.symbolize_keys\n attachment_builder = ManageIQ::Providers::Ovirt::InfraManager::DiskAttachmentBuilder.new(\n :size_in_mb => disk_spec[:disk_size_in_mb],\n :storage => storage_spec,\n :name => disk_spec[:disk_name],\n :thin_provisioned => disk_spec[:thin_provisioned],\n :bootable => disk_spec[:bootable],\n :interface => disk_spec[:interface]\n )\n attachment_builder.disk_attachment\n end", "def terminate_volume\n device = get_device_mount_point(self.MountPoint)\n STDERR.puts \"EBS device detected: #{device}...umounting it...\"\n umount_dir(self.MountPoint)\n #detache the mounted volume\n STDERR.puts \"Detaching current EBS volume:\"\n detached_vol=detach_volume(device)\n STDERR.puts \"detachment started (#{detached_vol.inspect})\"\n# this is necessary, for delete to succeed. however it may be too long of a wait time for decom\n wait_for_detachment(device, 60)\n delete_volume(detached_vol['aws_id'])\n detached_vol['aws_id']\n rescue => e\n display_exception(e, \"terminate_volume\")\n end", "def create_aws_instance(config, name, instance_type=\"m3.medium\")\n config.ssh.pty = true\n config.vm.define name do |server|\n server.vm.box = AWS_BOX\n server.vm.provider :aws do |aws, override|\n aws.instance_type = instance_type\n aws.region = AWS_REGION\n aws.ami = AWS_AMI\n aws.keypair_name = AWS_PRIVATE_KEY\n override.ssh.username = AWS_SSH_USERNAME\n override.ssh.private_key_path = AWS_PRIVATE_KEY_PATH\n yield(aws,override,server)\n end\n end\nend", "def mount_kvm_volume(name)\n dev = available_dev\n enable_netblockdev(name, dev)\n vol_grp = lvm_volume_group(\n lvm_partition(dev)\n )\n root = lvm_root(vol_grp)\n lvm_enable(vol_grp) unless lvm_enabled?(root)\n mount(name, root)\n dev\nend", "def detach()\n $ec2.describe_volumes([self.id]).each do |result|\n if result[:aws_attachment_status] == 'attached'\n $ec2.detach_volume(self.id)\n end\n end\n self.attached_instance = nil\n self.save()\n end", "def create_snapshot(options)\n snapshot = ec2.snapshots.new\n snapshot.volume_id = options['volume_id']\n snapshot.description = options['description']\n\n attempts = 0\n\n begin\n snapshot.save\n snapshot.reload\n rescue Fog::Compute::AWS::Error\n sleep 5\n attempts += 1\n if attempts == 5\n log \"Error communicating with API; Unable to save volume `#{options['volume_id']}` (Desc: #{options['description']})\"\n end\n return unless attempts == 5\n end\n\n options['tags'].each do |k,v|\n begin\n ec2.tags.create({resource_id: snapshot.id, key: k, value: v})\n rescue Errno::EINPROGRESS , Errno::EISCONN\n log \"API Connection Error\"\n sleep 1\n retry\n rescue Fog::Compute::AWS::Error\n log \"Failed attaching tag `'#{k}' => #{v}` to #{options['snapshot_type']} snapshot #{snapshot.id}\"\n sleep 1\n retry\n end\n end\n\n end", "def attach_disk(vm_cid, disk_cid)\n @telemetry_manager.monitor('initialize') do\n _init_azure\n end\n with_thread_name(\"attach_disk(#{vm_cid},#{disk_cid})\") do\n @telemetry_manager.monitor('attach_disk', id: vm_cid) do\n instance_id = InstanceId.parse(vm_cid, _azure_config.resource_group_name)\n disk_id = DiskId.parse(disk_cid, _azure_config.resource_group_name)\n vm_name = instance_id.vm_name\n disk_name = disk_id.disk_name\n\n vm = @vm_manager.find(instance_id)\n\n # Workaround for issue #280\n # Issue root cause: Attaching a data disk to a VM whose OS disk is busy might lead to OS hang.\n # If 'use_root_disk' is true in vm_types/vm_extensions, release packages will be copied to OS disk before attaching data disk,\n # it will continuously write the data to OS disk, that is why OS disk is busy.\n # Workaround: Sleep 30 seconds before attaching data disk, to wait for completion of data writing.\n has_ephemeral_disk = false\n vm[:data_disks].each do |disk|\n has_ephemeral_disk = true if is_ephemeral_disk?(disk[:name])\n end\n unless has_ephemeral_disk\n @logger.debug('Sleep 30 seconds before attaching data disk - workaround for issue #280')\n sleep(30)\n end\n\n if @use_managed_disks\n disk = @disk_manager2.get_data_disk(disk_id)\n vm_zone = vm[:zone]\n if instance_id.use_managed_disks?\n if disk.nil?\n if disk_id.disk_name.start_with?(DATA_DISK_PREFIX)\n @logger.info(\"attach_disk - migrate the disk '#{disk_name}' from unmanaged to managed\")\n begin\n storage_account_name = disk_id.storage_account_name\n blob_uri = @disk_manager.get_data_disk_uri(disk_id)\n storage_account = @azure_client.get_storage_account_by_name(storage_account_name)\n location = storage_account[:location]\n # Can not use the type of the default storage account because only Standard_LRS and Premium_LRS are supported for managed disk.\n account_type = storage_account[:sku_tier] == SKU_TIER_PREMIUM ? STORAGE_ACCOUNT_TYPE_PREMIUM_LRS : STORAGE_ACCOUNT_TYPE_STANDARD_LRS\n @logger.debug(\"attach_disk - Migrating the unmanaged disk '#{disk_name}' to a managed disk\")\n @disk_manager2.create_disk_from_blob(disk_id, blob_uri, location, account_type, storage_account[:id], vm_zone)\n\n # Set below metadata but not delete it.\n # Users can manually delete all blobs in container 'bosh' whose names start with 'bosh-data' after migration is finished.\n @blob_manager.set_blob_metadata(storage_account_name, DISK_CONTAINER, \"#{disk_name}.vhd\", METADATA_FOR_MIGRATED_BLOB_DISK)\n rescue StandardError => e\n if account_type # There are no other functions between defining account_type and @disk_manager2.create_disk_from_blob\n begin\n @disk_manager2.delete_data_disk(disk_id)\n rescue StandardError => err\n @logger.error(\"attach_disk - Failed to delete the created managed disk #{disk_name}. Error: #{e.inspect}\\n#{e.backtrace.join(\"\\n\")}\")\n end\n end\n cloud_error(\"attach_disk - Failed to create the managed disk for #{disk_name}. Error: #{e.inspect}\\n#{e.backtrace.join(\"\\n\")}\")\n end\n end\n elsif disk[:zone].nil? && !vm_zone.nil?\n @logger.info(\"attach_disk - migrate the managed disk '#{disk_name}' from regional to zonal\")\n begin\n @disk_manager2.migrate_to_zone(disk_id, disk, vm_zone)\n rescue StandardError => e\n cloud_error(\"attach_disk - Failed to migrate disk #{disk_name} to zone #{vm_zone}. Error: #{e.inspect}\\n#{e.backtrace.join(\"\\n\")}\")\n end\n end\n else\n cloud_error('Cannot attach a managed disk to a VM with unmanaged disks') unless disk.nil?\n @logger.debug(\"attach_disk - although use_managed_disks is enabled, will still attach the unmanaged disk '#{disk_name}' to the VM '#{vm_name}' with unmanaged disks\")\n end\n end\n\n lun = @vm_manager.attach_disk(instance_id, disk_id)\n\n disk_hints = {\n 'lun' => lun,\n 'host_device_id' => AZURE_SCSI_HOST_DEVICE_ID\n }\n\n if _should_write_to_registry?\n _update_agent_settings(instance_id.to_s) do |settings|\n settings['disks'] ||= {}\n settings['disks']['persistent'] ||= {}\n settings['disks']['persistent'][disk_id.to_s] = disk_hints\n end\n end\n\n @logger.info(\"Attached the disk '#{disk_id}' to the instance '#{instance_id}', lun '#{lun}'\")\n\n _attach_disk_response(disk_hints)\n end\n end\n end", "def create_snapshot_bundle\n # we shouldn't specify -k $EC2_PRIVATE_KEY since we assume private keys are already appended to /root/.ssh/authorized_keys\n # but it's a required parameter -- doh!\n run \"#{ec2_cmd('ec2-bundle-vol')} -v #{volume_to_bundle} -d #{bundling_directory} -k $EC2_PRIVATE_KEY -u #{@ec2_user_id} -s #{volume_size}\"\n end", "def create_volume(options={}) \n raise \"Volume nickname required\" unless options[:nickname]\n params = {:nickname => options[:nickname],:size => options[:size], :api_version => 1.0}\n params[:description] = options[:description] if options[:description]\n #STDERR.puts \"HERE IS THE URL: #{@api_url}/create_ebs_volume.js (PARAMS: #{params.inspect})\"\n body = RestClient.post @api_url+\"/create_ebs_volume.js\",params\n json = JSON.load(body)\n STDERR.puts \"CREATED_VOLUME: #{json}\"\n json\n rescue => e\n display_exception(e, \"create_volume: #{options.inspect}\")\n end", "def configure_instance(aws_node, private_ip_address, node_name, node_config)\n # Spin up EC2 instances\n aws_node.vm.provider :aws do |ec2, override|\n ec2.keypair_name = KEYPAIR_NAME\n ec2.access_key_id = ACCESS_KEY_ID\n ec2.secret_access_key = SECRET_ACCESS_KEY\n ec2.security_groups = SECURITY_GROUPS\n override.ssh.private_key_path = PRIVATE_KEY_PATH\n\n # read region, ami etc from json.\n ec2.region = AWS_CFG['region']\n ec2.subnet_id = AWS_CFG['subnet_id']\n ec2.availability_zone = AWS_CFG['region'] + AWS_CFG['availability_zone']\n ec2.ami = node_config['ami_id']\n ec2.instance_type = node_config['instance_type']\n ec2.private_ip_address = private_ip_address\n ec2.associate_public_ip = true\n\n if node_config.key?('volume_size')\n # Size in GB\n # (untested)\n ec2.block_device_mapping = [{ 'DeviceName' => '/dev/sda1', 'Ebs.VolumeSize' => node_config['volume_size'] }]\n end\n\n override.ssh.username = AWS_CFG['ssh_username']\n\n # Collect tags (can't be longer than 250 chars)\n ec2.tags = ({})\n ec2.tags['Name'] = node_name[0..245]\n ec2.tags['Type'] = 'Hyperledger'\n ec2.tags['Version'] = VERSION\n ec2.tags['Fabric'] = node_config['fabric'].map { |f| f['role'] }.join(',')[0..245]\n end\nend", "def create_volume( options = {} )\n options = { :availability_zone => '' }.merge(options)\n raise ArgumentError, \"No :availability_zone provided\" if options[:availability_zone].nil? || options[:availability_zone].empty?\n options = { :size => '' }.merge(options)\n options = { :snapshot_id => '' }.merge(options)\n params = {\n \"AvailabilityZone\" => options[:availability_zone],\n \"Size\" => options[:size],\n \"SnapshotId\" => options[:snapshot_id]\n }\n return response_generator(:action => \"CreateVolume\", :params => params)\n end", "def create_vbd(vm_ref, vdi_ref, position, boot = true)\n vbd_record = {\n 'VM' => vm_ref,\n 'VDI' => vdi_ref,\n 'empty' => false,\n 'other_config' => { 'owner' => '' },\n 'userdevice' => position.to_s,\n 'bootable' => boot,\n 'mode' => 'RW',\n 'qos_algorithm_type' => '',\n 'qos_algorithm_params' => {},\n 'qos_supported_algorithms' => [],\n 'type' => 'Disk'\n }\n\n task = xapi.Async.VBD.create(vbd_record)\n ui.msg 'Waiting for VBD create'\n vbd_ref = get_task_ref(task)\n vbd_ref\n end", "def volume_create(name)\n @log.info \"Creating volume #{name} from offering id #{DISK_OFFERING}...\"\n ret = @cloud_stack.create_volume(name, ZONE, DISK_OFFERING)\n id = ret[\"createvolumeresponse\"][\"jobid\"]\n wait_for_job id\n vol_id = ret[\"createvolumeresponse\"][\"id\"]\n @log.info \"Created volume id: #{vol_id}\"\n vol_id\n end", "def launch\n puts \"==> Creating EC2 instance...\"\n\n @instance = @aws_ec2.instances.create( @config.env.merge( { \"key_name\" => key_name, \"security_groups\" => [security_group_name] } ) )\n @instance.tag(\"environment\", {value: @config.environment})\n\n while @instance.status == :pending\n print \".\"\n sleep 2\n end\n\n # Sleep for 30 more seconds\n 15.times do\n print \".\"\n sleep 2\n end\n puts \".\" # new line\n\n puts \"==> Successfully created EC2 instance '#{@instance.id}'\"\n end", "def enable_netblockdev(name, dev)\n res = system \"kvm-nbd -c #{dev} #{File.join(KVM_HOME, 'storage', \"#{name}.qcow2\")}\"\n raise \"Failed to attach image to network block device server\" unless res\n puts \"Attached net block device: #{dev}\"\nend", "def store_image instance, tags\n begin\n \n puts \"waiting 2 minutes before starting to take the image...\"\n sleep 120\n puts \"creating image...\"\n \n image = @ec2.images.create( \n :instance_id => instance.id,\n :no_reboot => true,\n :description => \"automaticaly created #{tags[ 'image_type' ]} image\",\n :name => \"#{tags[ 'image_type' ]} #{Digest::SHA1.hexdigest tags.inspect}\" )\n \n wait_for_image image\n \n tags.each do | key, value |\n image.add_tag( key, :value => value )\n end \n ensure\n stop_instance instance\n end\n end", "def createEBSSnapshot(client=nil,description='',volume_id=nil)\n return false if volume_id.nil? || client.nil?\n # Fetch the Volume Name. This will be used in the description of the snapshot\n resp = client.describe_volumes({dry_run: false, volume_ids: [volume_id] })\n resp.volumes[0].tags.each do |t|\n if t.key=='Name'\n description = t.value unless t.value.empty?\n break\n end\n end\n # puts \"Taking snapshot of volume #{volume_id}...\"\n return client.create_snapshot({\n dry_run: false,\n volume_id: volume_id,\n description: description\n })\nend", "def attach(options = {})\n options = { :stream => true, :stdout => true }.merge(options)\n connection.post(\"/containers/#{id}/attach\", options)\n end", "def create_disk(size, cloud_properties, instance_id = nil)\n raise ArgumentError, 'disk size needs to be an integer' unless size.kind_of?(Integer)\n with_thread_name(\"create_disk(#{size}, #{instance_id})\") do\n volume_properties = VolumeProperties.new(\n size: size,\n type: cloud_properties['type'],\n iops: cloud_properties['iops'],\n az: @az_selector.select_availability_zone(instance_id),\n encrypted: cloud_properties['encrypted'],\n kms_key_arn: cloud_properties['kms_key_arn']\n )\n\n resp = @ec2_client.client.create_volume(volume_properties.persistent_disk_config)\n volume = AWS::EC2::Volume.new_from(:create_volume, resp, resp.volume_id, config: @ec2_client.config)\n\n logger.info(\"Creating volume '#{volume.id}'\")\n ResourceWait.for_volume(volume: volume, state: :available)\n\n volume.id\n end\n end", "def add_volume(container_name: nil, volume_name: nil, volume_config:, mount_path: nil,\n mount_config: {}, block: false, timeout: 60, polling: 5)\n\n create_volume(volume_name, config: volume_config)\n mount_volume(container_name,\n volume_name: volume_name,\n mount_path: mount_path,\n **mount_config)\n\n update\n sleep polling\n wait_for_deployments(timeout: timeout, polling: polling) if block\n reload(true)\n end", "def create_disk(size, cloud_properties, instance_id = nil)\n raise ArgumentError, 'disk size needs to be an integer' unless size.kind_of?(Integer)\n with_thread_name(\"create_disk(#{size}, #{instance_id})\") do\n volume_properties = VolumeProperties.new(\n size: size,\n type: cloud_properties['type'],\n iops: cloud_properties['iops'],\n az: @az_selector.select_availability_zone(instance_id),\n encrypted: cloud_properties['encrypted'],\n kms_key_arn: cloud_properties['kms_key_arn']\n )\n\n volume_resp = @ec2_client.create_volume(volume_properties.persistent_disk_config)\n volume = Aws::EC2::Volume.new(\n id: volume_resp.volume_id,\n client: @ec2_client,\n )\n\n logger.info(\"Creating volume '#{volume.id}'\")\n ResourceWait.for_volume(volume: volume, state: 'available')\n\n volume.id\n end\n end", "def createEc2Instance\n\n instance_descriptor = {\n :image_id => @config[\"image_id\"],\n :key_name => @deploy.ssh_key_name,\n :instance_type => @config[\"size\"],\n :disable_api_termination => true,\n :min_count => 1,\n :max_count => 1\n }\n\n instance_descriptor[:iam_instance_profile] = getIAMProfile\n\n security_groups = myFirewallRules.map { |fw| fw.cloud_id }\n if security_groups.size > 0\n instance_descriptor[:security_group_ids] = security_groups\n else\n raise MuError, \"Didn't get any security groups assigned to be in #{@mu_name}, that shouldn't happen\"\n end\n\n if @config['private_ip']\n instance_descriptor[:private_ip_address] = @config['private_ip']\n end\n\n if [email protected]? and @config.has_key?(\"vpc\")\n subnet = mySubnets.sample\n if subnet.nil?\n raise MuError, \"Got null subnet id out of #{@config['vpc']}\"\n end\n MU.log \"Deploying #{@mu_name} into VPC #{@vpc.cloud_id} Subnet #{subnet.cloud_id}\"\n allowBastionAccess\n instance_descriptor[:subnet_id] = subnet.cloud_id\n end\n\n if [email protected]? and [email protected]?\n instance_descriptor[:user_data] = Base64.encode64(@userdata)\n end\n\n MU::Cloud::AWS::Server.waitForAMI(@config[\"image_id\"], region: @region, credentials: @credentials)\n\n instance_descriptor[:block_device_mappings] = MU::Cloud::AWS::Server.configureBlockDevices(image_id: @config[\"image_id\"], storage: @config['storage'], region: @region, credentials: @credentials)\n\n instance_descriptor[:monitoring] = {enabled: @config['monitoring']}\n\n if @tags and @tags.size > 0\n instance_descriptor[:tag_specifications] = [{\n :resource_type => \"instance\",\n :tags => @tags.keys.map { |k|\n { :key => k, :value => @tags[k] }\n }\n }]\n end\n\n MU.log \"Creating EC2 instance #{@mu_name}\", details: instance_descriptor\n\n instance = resp = nil\n loop_if = Proc.new {\n instance = resp.instances.first if resp and resp.instances\n resp.nil? or resp.instances.nil? or instance.nil?\n }\n\n bad_subnets = []\n mysubnet_ids = if mySubnets\n mySubnets.map { |s| s.cloud_id }\n end\n begin\n MU.retrier([Aws::EC2::Errors::InvalidGroupNotFound, Aws::EC2::Errors::InvalidSubnetIDNotFound, Aws::EC2::Errors::InvalidParameterValue], loop_if: loop_if, loop_msg: \"Waiting for run_instances to return #{@mu_name}\") {\n resp = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).run_instances(instance_descriptor)\n }\n rescue Aws::EC2::Errors::Unsupported => e\n bad_subnets << instance_descriptor[:subnet_id]\n better_subnet = (mysubnet_ids - bad_subnets).sample\n if e.message !~ /is not supported in your requested Availability Zone/ and\n (mysubnet_ids.nil? or mysubnet_ids.empty? or\n mysubnet_ids.size == bad_subnets.size or\n better_subnet.nil? or better_subnet == \"\")\n raise MuError.new e.message, details: mysubnet_ids\n end\n instance_descriptor[:subnet_id] = (mysubnet_ids - bad_subnets).sample\n if instance_descriptor[:subnet_id].nil?\n raise MuError.new \"Specified subnet#{bad_subnets.size > 1 ? \"s do\" : \" does\"} not support instance type #{instance_descriptor[:instance_type]}\", details: bad_subnets\n end\n MU.log \"One or more subnets does not support instance type #{instance_descriptor[:instance_type]}, attempting with #{instance_descriptor[:subnet_id]} instead\", MU::WARN, details: bad_subnets\n retry\n rescue Aws::EC2::Errors::InvalidRequest => e\n MU.log e.message, MU::ERR, details: instance_descriptor\n raise e\n end\n\n MU.log \"#{@mu_name} (#{instance.instance_id}) coming online\"\n\n instance\n end", "def clone_volume(source, target)\n debug(\"Creating Libvirt volume #{target}\")\n debug(\"Cloning volume from #{source}\")\n\n # Attempt to locate the target or source volume\n source_image = client.volumes.get(source)\n if source_image.name =~ /^fog-\\d+/\n error(\"Could not find target image: #{source}.\")\n end\n\n # Clone the source volume\n source_image.clone_volume(target)\n client.volumes.all.find { |vol| vol.name == target }\n end", "def attach_disk request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_attach_disk_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def handle_attach_request(obj, original_message)\n if obj.include? :disk\n params = obj[:disk]\n disk_number = params[:disk_number]\n vds_uid = params[:vds_uid]\n\n vds = VpsConfiguration.find_by_uid(vds_uid)\n if vds.nil?\n msg = Cirrocumulus::Message.new(nil, 'refuse', [original_message.content, [:vds_not_found]])\n msg.ontology = self.name\n self.agent.reply_to_message(msg, original_message)\n return\n end\n\n disk = VdsDisk.find_by_number(disk_number)\n if disk.nil?\n msg = Cirrocumulus::Message.new(nil, 'refuse', [original_message.content, [:disk_not_found]])\n msg.ontology = self.name\n self.agent.reply_to_message(msg, original_message)\n return\n end\n\n if vds.disks.find {|d| d.number == disk.number}\n msg = Cirrocumulus::Message.new(nil, 'refuse', [original_message.content, [:already_attached]])\n msg.ontology = self.name\n self.agent.reply_to_message(msg, original_message)\n return\n end\n\n vds.attach_disk(disk)\n create_saga(AttachVirtualDiskSaga).start(disk_number, vds_uid, disk.block_device, original_message)\n end\n end", "def addDisk(backingFile, sizeInMB, label = nil, summary = nil, options = {})\n # Remove nil keys if any, since the next line may not work\n options.reject! { |_k, v| v.nil? }\n # Merge default values:\n # - persistent is set to true to be backward compatible\n # - thin_provisioned is set to false explicitly since we call to_s on it further, so nil will not work for us\n options = {:persistent => true, :thin_provisioned => false}.merge(options)\n ck, un = available_scsi_units.first\n raise \"addDisk: no SCSI controller found\" unless ck\n\n vmConfigSpec = VimHash.new(\"VirtualMachineConfigSpec\") do |vmcs|\n vmcs.deviceChange = VimArray.new(\"ArrayOfVirtualDeviceConfigSpec\") do |vmcs_vca|\n vmcs_vca << VimHash.new(\"VirtualDeviceConfigSpec\") do |vdcs|\n vdcs.operation = VirtualDeviceConfigSpecOperation::Add\n if sizeInMB < 0\n sizeInMB = -sizeInMB\n else\n vdcs.fileOperation = VirtualDeviceConfigSpecFileOperation::Create\n end\n vdcs.device = VimHash.new(\"VirtualDisk\") do |vDev|\n vDev.key = -100 # temp key for creation\n vDev.capacityInKB = sizeInMB * 1024\n vDev.controllerKey = ck\n vDev.unitNumber = un\n # The following doesn't seem to work.\n vDev.deviceInfo = VimHash.new(\"Description\") do |desc|\n desc.label = label\n desc.summary = summary\n end if label || summary\n vDev.connectable = VimHash.new(\"VirtualDeviceConnectInfo\") do |con|\n con.allowGuestControl = \"false\"\n con.startConnected = \"true\"\n con.connected = \"true\"\n end\n if options[:dependent]\n mode = (options[:persistent] ? VirtualDiskMode::Persistent : VirtualDiskMode::Nonpersistent)\n else\n mode = (options[:persistent] ? VirtualDiskMode::Independent_persistent : VirtualDiskMode::Independent_nonpersistent)\n end\n vDev.backing = VimHash.new(\"VirtualDiskFlatVer2BackingInfo\") do |bck|\n bck.diskMode = mode\n bck.split = \"false\"\n bck.thinProvisioned = options[:thin_provisioned].to_s\n bck.writeThrough = \"false\"\n bck.fileName = backingFile\n begin\n dsn = @invObj.path2dsName(@dsPath)\n bck.datastore = @invObj.dsName2mo_local(dsn)\n rescue\n bck.datastore = nil\n end\n end\n end\n end\n end\n end\n\n logger.info \"MiqVimVm(#{@invObj.server}, #{@invObj.username}).addDisk: calling reconfigVM_Task\"\n taskMor = @invObj.reconfigVM_Task(@vmMor, vmConfigSpec)\n logger.info \"MiqVimVm(#{@invObj.server}, #{@invObj.username}).addDisk: returned from reconfigVM_Task\"\n waitForTask(taskMor)\n end", "def ec2_instance(instance_id)\n EC2Instance.new(instance_id)\n end", "def ec2_instance(instance_id)\n EC2Instance.new(instance_id)\n end", "def create_vbd(vm_ref, vdi_ref, position)\n vbd_record = {\n \"VM\" => vm_ref,\n \"VDI\" => vdi_ref,\n \"empty\" => false,\n \"other_config\" => {\"owner\"=>\"\"},\n \"userdevice\" => position.to_s,\n \"bootable\" => true,\n \"mode\" => \"RW\",\n \"qos_algorithm_type\" => \"\",\n \"qos_algorithm_params\" => {},\n \"qos_supported_algorithms\" => [],\n \"type\" => \"Disk\"\n }\n\n task = xapi.Async.VBD.create(vbd_record)\n ui.msg \"Waiting for VBD create\"\n vbd_ref = get_task_ref(task) \n end", "def adopt_existing_volumes!\n Volume.all.each do |ec2_vol|\n next if ec2_vol.deleted? || ec2_vol.deleting?\n instance = Instance.find(ec2_vol.attached_instance_id) ; p instance ; next unless instance\n cluster_node_id = instance.get_cluster_node_id(self.name) ; next unless cluster_node_id\n cluster_vol_id = cluster_node_id + '-' + ec2_vol.device\n volume_in_cluster = @all_volumes[cluster_vol_id] ; next unless volume_in_cluster\n ec2_vol.update! volume_in_cluster.logical_attributes\n @all_volumes[cluster_vol_id] = ec2_vol\n end\n end", "def aws_instance_create(opts)\n AWS::EC2::InstanceCollection.new.create(\n image_id: Rails.configuration.x.aws[Rails.configuration.x.aws['region']][\"ami_#{self.os}\"], \n private_ip_address: self.ip_address,\n key_name: Rails.configuration.x.aws['ec2_key_pair_name'],\n user_data: self.generate_init,\n instance_type: \"t2.small\",\n subnet: self.subnet.driver_id\n )\n end", "def create_ec2_instance(attrs)\n instance = ec2.instances.create(attrs)\n perform_instance_checks(instance)\n instance\n end", "def deploy(open_vz_data, container)\n OpenNebula.log_debug(\"Deploying vm #{open_vz_data.vmid} using ctid:#{container.ctid}\")\n\n # create symlink to enable ovz to find image\n template_name = container.ctid\n template_cache = create_template template_name, open_vz_data.disk\n\n # options to be passed to vzctl create\n options = process_options open_vz_data.raw, {:ostemplate => template_name}\n\n # create and run container\n container.create( options )\n container.start\n\n # set up networking\n apply_network_settings container, open_vz_data.networking\n\n # and contextualise it\n contextualise container, open_vz_data.context_disk, open_vz_data.context\n \n container.ctid\n rescue RuntimeError => e\n raise OpenVzDriverError, \"Container #{container.ctid} can't be deployed. Details: #{e.message}\"\n ensure\n # cleanup template cache - we don't need it anymore\n File.delete template_cache if template_cache and File.exists? template_cache\n end", "def mountiso(vid, iso)\n perform_request(:action => 'vserver-mountiso', :vserverid => vid, :iso => iso)\n end", "def create_image(params)\n instance_id = params['InstanceId']\n instance_id = instance_id.split('-')[1]\n\n vm = VirtualMachine.new(\n VirtualMachine.build_xml(instance_id),\n @client)\n\n rc = vm.info\n if OpenNebula::is_error?(rc)\n rc.ec2_code = \"InvalidInstanceID.NotFound\"\n return rc\n end\n\n image_id = vm.disk_saveas(1,\n params[\"Name\"],\n OpenNebula::Image::IMAGE_TYPES[0])\n\n # TODO Add AMI Tags\n # TODO A new persistent image should be created for each instance\n\n if OpenNebula::is_error?(image_id)\n return image_id\n end\n\n erb_version = params['Version']\n\n response = ERB.new(File.read(@config[:views]+\"/create_image.erb\"))\n return response.result(binding), 200\n end", "def create_snapshot(volume_id, options = {})\n action = 'CreateSnapshot'\n params = {\n 'Action' => action,\n 'VolumeId' => volume_id\n }\n params['Description'] = options[:description] unless options[:description].blank?\n\n response = send_query_request(params)\n parser = Awsum::Ec2::SnapshotParser.new(self)\n snapshot = parser.parse(response.body)[0]\n if options[:tags] && options[:tags].size > 0\n create_tags snapshot.id, options[:tags]\n end\n snapshot\n end", "def create_volume(options = {})\n options[:capacity] = options[:capacity] * GB if options[:capacity] < 100000\n vol = pool.create_volume_xml(Volume.to_xml(options))\n Volume.new vol, self\n end", "def add_instance_disk(array, instance, disk)\n if instance.managed_disk?\n disk_type = 'managed'\n disk_location = disk.managed_disk.id\n managed_disk = @managed_disks.find { |d| d.id.casecmp(disk_location).zero? }\n\n if managed_disk\n disk_size = managed_disk.properties.disk_size_gb.gigabytes\n mode = managed_disk.try(:sku).try(:name)\n else\n _log.warn(\"Unable to find disk information for #{instance.name}/#{instance.resource_group}\")\n disk_size = nil\n mode = nil\n end\n else\n disk_type = 'unmanaged'\n disk_location = disk.try(:vhd).try(:uri)\n disk_size = disk.try(:disk_size_gb).try(:gigabytes)\n\n if disk_location\n uri = Addressable::URI.parse(disk_location)\n storage_name = uri.host.split('.').first\n container_name = File.dirname(uri.path)\n blob_name = uri.basename\n\n storage_acct = @storage_accounts.find { |s| s.name.casecmp(storage_name).zero? }\n mode = storage_acct.try(:sku).try(:name)\n\n if @options.get_unmanaged_disk_space && disk_size.nil?\n storage_keys = @sas.list_account_keys(storage_acct.name, storage_acct.resource_group)\n storage_key = storage_keys['key1'] || storage_keys['key2']\n blob_props = storage_acct.blob_properties(container_name, blob_name, storage_key)\n disk_size = blob_props.content_length.to_i\n end\n end\n end\n\n disk_record = {\n :device_type => 'disk',\n :controller_type => 'azure',\n :device_name => disk.name,\n :location => disk_location,\n :size => disk_size,\n :disk_type => disk_type,\n :mode => mode\n }\n\n array << disk_record\n end", "def mount_volume(container_name = nil, volume_name:, mount_path:, **kwargs)\n object = { name: volume_name, mountPath: mount_path }.merge(kwargs)\n log.info(\"Mounting volume: #{object}\")\n volume_mounts(container_name) << object\n end", "def configure_disks(vb, server, hostname, name)\n vminfo = vm_info(name)\n disks = server['disks'] || {}\n unless vminfo =~ /Storage Controller Name \\(1\\): *SATA Controller/\n # puts \"Attaching SATA Controller\"\n vb.customize [\n 'storagectl', :id,\n '--name', 'SATA Controller',\n '--add', 'sata',\n '--portcount', disks.size\n ]\n # else\n # puts 'SATA Controller already attached'\n end\n\n disks.each_with_index do |disk, i|\n disk_name = disk.first\n disk_size = disk.last['size']\n disk_uuid = disk.last['uuid']\n real_uuid = \"00000000-0000-0000-0000-#{disk_uuid.rjust(12,'0')}\"\n if server['cluster']\n disk_filename = File.join(VAGRANT_ROOT, \"#{disk_name}_#{server['cluster']}.vdi\")\n else\n disk_filename = File.join(VAGRANT_ROOT, \"#{disk_name}.vdi\")\n end\n\n if File.file?(disk_filename)\n # puts \"Disk #{disk_filename} already created\"\n disk_hash = `VBoxManage showmediuminfo \"#{disk_filename}\"`.scan(/(.*): *(.*)/).to_h\n current_uuid = disk_hash['UUID']\n else\n # puts \"Creating disk #{disk_filename}\"\n current_uuid = '0'\n if server['cluster']\n vb.customize [\n 'createhd',\n '--filename', disk_filename,\n '--size', disk_size.to_s,\n '--variant', 'Fixed'\n ]\n vb.customize [\n 'modifyhd', disk_filename,\n '--type', 'shareable'\n ]\n else\n vb.customize [\n 'createhd',\n '--filename', disk_filename,\n '--size', disk_size.to_s,\n '--variant', 'Standard'\n ]\n end\n end\n\n # Conditional for adding disk_uuid\n if server['cluster'] && current_uuid == real_uuid\n # puts \"Attaching shareable disk #{disk_filename}\"\n vb.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', (i + 1).to_s,\n '--device', 0,\n '--type', 'hdd',\n '--medium', disk_filename,\n '--mtype', 'shareable'\n ]\n elsif server['cluster']\n # puts \"Attaching shareable disk #{disk_filename}, adding UUID #{real_uuid}\"\n vb.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', (i + 1).to_s,\n '--device', 0,\n '--type', 'hdd',\n '--medium', disk_filename,\n '--mtype', 'shareable',\n '--setuuid', real_uuid\n ]\n elsif current_uuid == real_uuid\n # puts \"Attaching normal disk #{disk_filename}\"\n vb.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', (i + 1).to_s,\n '--device', 0,\n '--type', 'hdd',\n '--medium', disk_filename\n ]\n else\n # puts \"Attaching normal disk #{disk_filename}, adding UUID #{real_uuid}\"\n vb.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', (i + 1).to_s,\n '--device', 0,\n '--type', 'hdd',\n '--medium', disk_filename,\n '--setuuid', real_uuid\n ]\n end\n end\nend" ]
[ "0.7741415", "0.77355874", "0.7572211", "0.75179416", "0.7482979", "0.7384399", "0.7287279", "0.7228706", "0.7121573", "0.7054218", "0.6932078", "0.69124585", "0.6901333", "0.67454475", "0.6722063", "0.6716593", "0.6690154", "0.6649605", "0.6640001", "0.6605321", "0.63948745", "0.63780355", "0.63007253", "0.627916", "0.6275866", "0.62661856", "0.6254984", "0.6245867", "0.6189199", "0.61697364", "0.6081454", "0.6058432", "0.6047725", "0.59678876", "0.5966085", "0.59361935", "0.5917842", "0.5875886", "0.5869419", "0.58362836", "0.5829304", "0.5817827", "0.58036035", "0.5802105", "0.57920825", "0.5755146", "0.5750988", "0.57417434", "0.57343954", "0.570518", "0.56729954", "0.56554466", "0.56469977", "0.56179565", "0.56023264", "0.5580143", "0.5569868", "0.55651516", "0.55651516", "0.55534565", "0.5549331", "0.553177", "0.553114", "0.5509533", "0.5489498", "0.5438825", "0.5426955", "0.5422154", "0.54218435", "0.54153585", "0.5351563", "0.5344455", "0.5344163", "0.53387624", "0.5335965", "0.5335612", "0.5331469", "0.5298181", "0.5274765", "0.5256424", "0.52451384", "0.52355105", "0.5235325", "0.5230506", "0.5218693", "0.52033544", "0.52004635", "0.52004635", "0.5186997", "0.51745135", "0.51711494", "0.5170481", "0.5151854", "0.51288635", "0.51286465", "0.5127542", "0.511582", "0.51152", "0.51040053", "0.50968885" ]
0.6268131
25
Detach an EBS volume from an EC2 instance
def detach_disk(instance_id, disk_id) with_thread_name("detach_disk(#{instance_id}, #{disk_id})") do instance = @ec2_client.instances[instance_id] volume = @ec2_client.volumes[disk_id] if volume.exists? detach_ebs_volume(instance, volume) else @logger.info("Disk `#{disk_id}' not found while trying to detach it from vm `#{instance_id}'...") end update_agent_settings(instance) do |settings| settings["disks"] ||= {} settings["disks"]["persistent"] ||= {} settings["disks"]["persistent"].delete(disk_id) end logger.info("Detached `#{disk_id}' from `#{instance_id}'") end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def detach()\n $ec2.describe_volumes([self.id]).each do |result|\n if result[:aws_attachment_status] == 'attached'\n $ec2.detach_volume(self.id)\n end\n end\n self.attached_instance = nil\n self.save()\n end", "def terminate_volume\n device = get_device_mount_point(self.MountPoint)\n STDERR.puts \"EBS device detected: #{device}...umounting it...\"\n umount_dir(self.MountPoint)\n #detache the mounted volume\n STDERR.puts \"Detaching current EBS volume:\"\n detached_vol=detach_volume(device)\n STDERR.puts \"detachment started (#{detached_vol.inspect})\"\n# this is necessary, for delete to succeed. however it may be too long of a wait time for decom\n wait_for_detachment(device, 60)\n delete_volume(detached_vol['aws_id'])\n detached_vol['aws_id']\n rescue => e\n display_exception(e, \"terminate_volume\")\n end", "def detach!(volume_or_id)\n @@ec2.detach_volume(volume_or_id, id, device)\n @volumes = nil\n end", "def detachvol(compute,volume)\n\t\tprint \"ebsvol[aws]->destroy: detaching #{volume['volumeId']} from #{volume['attachmentSet'][0]['instanceId']}\\n\" if $debug\n\t\tresponse = compute.detach_volume(volume['volumeId'], \n\t\t\t\t\t\t\t\t\t\t\t\t\t{ 'Device' => volume['attachmentSet'][0]['device'], \n\t\t\t\t\t\t\t\t\t\t\t\t\t\t'Force' => true, \n\t\t\t\t\t\t\t\t\t\t\t\t\t\t'InstanceId' => volume['attachmentSet'][0]['instanceId'] })\n\t\tif (response.status == 200)\n\t\t\t# now wait for it to detach!\n\t\t\tcheck = volinfo(compute,volume['tagSet']['Name'])\n\t\t\twhile ( check['status'] != 'available' ) do\n\t\t\t\tprint \"ebsvol[aws]->detachvol: status is #{check['status']}\\n\" if $debug\n\t\t\t\tsleep 5\n\t\t\t\tcheck = volinfo(compute,volume['tagSet']['Name'])\n\t\t\tend\n\t\t\tsleep 5 # allow aws to propigate the fact\n\t\t\tprint \"ebsvol[aws]->detachvol: volume is now detached\\n\" if $debug\n\t\telse\n\t\t\traise \"ebsvol[aws]->detachvol: Sorry, I could not detach #{volume['volumeId']} from #{volume['attachmentSet'][0]['instanceId']}\"\n\t\tend\n\tend", "def detach_volume( options = {} )\n options = { :volume_id => '' }.merge(options)\n raise ArgumentError, \"No :volume_id provided\" if options[:volume_id].nil? || options[:volume_id].empty?\n options = { :instance_id => '' }.merge(options)\n options = { :device => '' }.merge(options)\n options = { :force => '' }.merge(options)\n params = {\n \"VolumeId\" => options[:volume_id],\n \"InstanceId\" => options[:instance_id],\n \"Device\" => options[:device],\n \"Force\" => options[:force].to_s\n }\n return response_generator(:action => \"DetachVolume\", :params => params)\n end", "def detachvolume\n if not checkRequirements([\"thezone\",\"theserver\",\"thevolume\"]) \n return false\n end\n checkToken(@thezone)\n data = queryGCE(:path => '/compute/v1beta15/projects/#{@thezone.name}/zones/#{@theserver.azone.name}/instances/#{@theserver.serial}/detachDisk', :method => 'post', :options => '', :access_token => @thezone.token )\n data ? data[\"floating_ip\"][\"ip\"] : false\n end", "def detach_volume(volume_id, options = {})\n action = 'DetachVolume'\n params = {\n 'Action' => action,\n 'VolumeId' => volume_id\n }\n params['InstanceId'] = options[:instance_id] unless options[:instance_id].blank?\n params['Device'] = options[:device] unless options[:device].blank?\n params['Force'] = options[:force] unless options[:force].blank?\n\n response = send_query_request(params)\n response.is_a?(Net::HTTPSuccess)\n end", "def detach_volume(id, attachment)\n return delete_request(address(\"/servers/\"+id+\"/os-volume_attachments/\"+volume), @token)\n end", "def detach_disk(instance, zone, device_name)\n data = false\n conn = @ec2_main.environment.connection\n if conn != nil\n response = conn.detach_disk(instance, zone, device_name)\n if response.status == 200\n data = response.body\n else\n data = {}\n end\n else\n raise \"Connection Error\"\n end\n return data\n end", "def detach_volume(server, volume)\n @logger.info(\"Detaching volume `#{volume.id}' from `#{server.id}'...\")\n volume_attachments = @openstack.with_openstack { server.volume_attachments }\n attachment = volume_attachments.find { |a| a['volumeId'] == volume.id }\n if attachment\n @openstack.with_openstack { server.detach_volume(volume.id) }\n @openstack.wait_resource(volume, :available)\n else\n @logger.info(\"Disk `#{volume.id}' is not attached to server `#{server.id}'. Skipping.\")\n end\n end", "def umount_and_detach_device(options={})\n detached_vol=nil\n device = get_device_mount_point(self.MountPoint)\n if(options[:device])\n STDERR.puts \"WARNING! the previously mounted device (#{device}) is different from the device we're asking to detach (#{options[:device]})\"\n device = options[:device]\n end\n begin\n umount_dir(self.MountPoint)\n rescue Exception => e\n STDERR.puts \"#{e}\\n ...continuing without unmounting\"\n end\n #detache the mounted volume\n STDERR.puts \"Detaching volume in device #{device}:\"\n begin\n detached_vol=detach_volume(device)\n\n raise EBSRemoteExecException.new(nil,$?,\"Timeout while waiting for the device to attach\") unless wait_for_detachment(device)\n rescue Exception => e\n display_exception(e, \"unmount_and_detach_device\")\n STDERR.puts \"...was the previously mounted DB directory not an EBS volume??\\n continuing without the detachment...\"\n end\n detached_vol\n end", "def detach_volume(volume_id, timeout)\n Chef::Log.debug(\"Detaching #{volume_id}\")\n vol = volume_by_id(volume_id)\n orig_instance_id = vol[:aws_instance_id]\n ec2.detach_volume(volume_id)\n\n # block until detached\n begin\n Timeout::timeout(timeout) do\n while true\n vol = volume_by_id(volume_id)\n if vol && vol[:aws_status] != \"deleting\"\n if vol[:aws_instance_id] != orig_instance_id\n Chef::Log.info(\"Volume detached from #{orig_instance_id}\")\n break\n else\n Chef::Log.debug(\"Volume: #{vol.inspect}\")\n end\n else\n Chef::Log.debug(\"Volume #{volume_id} no longer exists\")\n break\n end\n sleep 3\n end\n end\n rescue Timeout::Error\n raise \"Timed out waiting for volume detachment after #{timeout} seconds\"\n end\n end", "def detach_disk(instance_id, disk_id)\n with_thread_name(\"detach_disk(#{instance_id}, #{disk_id})\") do\n instance = @ec2_resource.instance(instance_id)\n volume = @ec2_resource.volume(disk_id)\n\n if has_disk?(disk_id)\n detach_ebs_volume(instance, volume)\n else\n @logger.info(\"Disk `#{disk_id}' not found while trying to detach it from vm `#{instance_id}'...\")\n end\n\n update_agent_settings(instance) do |settings|\n settings[\"disks\"] ||= {}\n settings[\"disks\"][\"persistent\"] ||= {}\n settings[\"disks\"][\"persistent\"].delete(disk_id)\n end\n\n logger.info(\"Detached `#{disk_id}' from `#{instance_id}'\")\n end\n end", "def terminate_device(dev, keep_volume=true)\n vol_info = detach_volume(dev) \n wait_for_detachment(dev)\n delete_volume(vol_info['aws_id']) unless vol_info.nil? || keep_volume\n #depricating this rescue block -> see wait_for_detachment\n rescue Exception => e\n display_exception(e, \"terminate_device(#{dev}, #{keep_volume})\")\n STDERR.puts \"...was the previously mounted DB directory not an EBS volume??\\n continuing without the detachment...\"\n end", "def detach_storage_volume(volume_id)\n must_support! :storage_volumes\n result = connection.post(api_uri(\"/storage_volumes/#{volume_id}/detach\"))\n if result.status.is_ok?\n from_resource(:storage_volume, result)\n end\n end", "def detach_planned_volume(mapping)\n payload = {:agent_identity => @agent_identity, :device_name => mapping[:device_name]}\n Log.info(\"Detaching volume #{mapping[:volume_id]} for management purposes.\")\n req = RetryableRequest.new(\"/storage_valet/detach_volume\", payload, :retry_delay => VolumeManagement::VOLUME_RETRY_SECONDS)\n\n req.callback do |res|\n # don't set :volume_status here as that should only be queried\n mapping[:management_status] = 'detached'\n mapping[:attempts] = nil\n yield if block_given?\n end\n\n req.errback do |res|\n unless InstanceState.value == 'stranded'\n # volume could already be detaching or have been deleted\n # which we can't see because of latency; go around again\n # and check state of volume later.\n Log.error(\"Failed to detach volume #{mapping[:volume_id]} (#{res})\")\n mapping[:attempts] ||= 0\n mapping[:attempts] += 1\n # retry indefinitely so long as core api instructs us to retry or else fail after max attempts.\n if mapping[:attempts] >= VolumeManagement::MAX_VOLUME_ATTEMPTS\n strand(\"Exceeded maximum of #{VolumeManagement::MAX_VOLUME_ATTEMPTS} attempts detaching volume #{mapping[:volume_id]} with error: #{res}\")\n else\n yield if block_given?\n end\n end\n end\n\n req.run\n end", "def detach\n begin\n run_baby_run 'hdiutil', ['detach', '-force', self.dev_node], :err => '/dev/null'\n rescue Exception => ex\n debug \"The disk image at #{@dev_node} could not be detached: #{ex}\"\n end\n end", "def detachvolume\n false\n end", "def aws_terminate_instance_and_ebs_volumes( iprops )\n ec2 = AWS::EC2.new.regions[ iprops[ :region ] ]\n inst = ec2.instances[ iprops[ :id ] ]\n unless inst.exists?\n raise \"Instance #{iprops[:id]} does not exist in #{iprops[:region]}\"\n end\n\n ebs_volumes = inst.block_devices.map do |dev|\n ebs = dev[ :ebs ]\n if ebs && dev[:device_name] =~ /dh\\d+$/ && !ebs[:delete_on_termination]\n ebs[ :volume_id ]\n end\n end.compact\n\n inst.terminate\n wait_until( \"termination of #{inst.id}\", 2.0 ) { inst.status == :terminated }\n\n ebs_volumes = ebs_volumes.map do |vid|\n volume = ec2.volumes[ vid ]\n if volume.exists?\n volume\n else\n puts \"WARN: #{volume} doesn't exist\"\n nil\n end\n end.compact\n\n ebs_volumes.each do |vol|\n wait_until( \"deletion of vol #{vol.id}\" ) do\n vol.status == :available || vol.status == :deleted\n end\n vol.delete if vol.status == :available\n end\n\n found = aws_find_instance( iprops )\n if found\n aws_instance_removed( found )\n aws_write_instances\n end\n end", "def detach_from_elbs(instance:)\n unless instance.is_a?(Aws::OpsWorks::Types::Instance)\n fail(ArgumentError, \"instance must be a Aws::OpsWorks::Types::Instance struct\")\n end\n\n all_load_balancers = @elb_client.describe_load_balancers\n .load_balancer_descriptions\n\n load_balancers = detach_from(all_load_balancers, instance)\n\n @lb_wait_params = []\n\n load_balancers.each do |lb|\n params = {\n load_balancer_name: lb.load_balancer_name,\n instances: [{ instance_id: instance.ec2_instance_id }]\n }\n\n remaining_instances = @elb_client\n .deregister_instances_from_load_balancer(params)\n .instances\n\n log(<<-MSG.split.join(\" \"))\n Will detach instance #{instance.hostname} from\n #{lb.load_balancer_name} (remaining attached instances:\n #{remaining_instances.count.to_s})\n MSG\n\n @lb_wait_params << params\n end\n \n unless @regional_deploy\n if @lb_wait_params.any?\n wait_for_detach(@lb_wait_params)\n else\n log(\"No load balancers found for instance #{instance.hostname}\")\n end\n end\n\n load_balancers\n end", "def detach_volume(name=nil, volname=nil)\n if name\n @mgr.setparam(\"name\", name)\n end\n if volname\n @mgr.setparam(\"volname\", volname)\n end\n @mgr.normalize_name_parameters()\n name, volname = @mgr.getparams(\"name\", \"volname\")\n instance, err = resolve_instance()\n unless instance\n yield \"#{@mgr.timestamp()} detach_volume called on non-existing instance #{name}: #{err}\"\n return nil\n end\n return detach_instance_volume(instance, volname) { |s| yield s }\n end", "def detach_disk(server_id, disk_id)\n with_thread_name(\"detach_disk(#{server_id}, #{disk_id})\") do\n server = @openstack.with_openstack { @openstack.compute.servers.get(server_id) }\n cloud_error(\"Server `#{server_id}' not found\") unless server\n\n volume = @openstack.with_openstack { @openstack.volume.volumes.get(disk_id) }\n if volume.nil?\n @logger.info(\"Disk `#{disk_id}' not found while trying to detach it from vm `#{server_id}'...\")\n else\n detach_volume(server, volume)\n end\n\n update_agent_settings(server) do |settings|\n settings['disks'] ||= {}\n settings['disks']['persistent'] ||= {}\n settings['disks']['persistent'].delete(disk_id)\n end\n end\n end", "def detach_disk(instance_id, disk_id)\n with_thread_name(\"detach_disk(#{instance_id}, #{disk_id}):v2\") do\n @cloud_core.detach_disk(instance_id, disk_id) do |disk_id|\n if @stemcell_api_version < 2\n update_agent_settings(instance_id) do |settings|\n settings['disks'] ||= {}\n settings['disks']['persistent'] ||= {}\n settings['disks']['persistent'].delete(disk_id)\n end\n end\n end\n end\n end", "def detach_disk(vm_id, disk_id)\n with_thread_name(\"detach_disk(#{vm_id}, #{disk_id})\") do\n begin\n return unless volume_group_attached?(vm_id, disk_id)\n @logger.debug(\"Detaching volume group #{disk_id} to VM #{vm_id}\")\n @vol_group_manager.detach_from_vm(disk_id, vm_id)\n update_agent_settings(vm_id) do |settings|\n settings['disks']['persistent'] ||= {}\n settings['disks']['persistent'].delete(disk_id)\n end\n rescue => e\n @logger.error(e)\n cloud_error(e.message)\n end\n end\n end", "def detach(id, cidr)\n self.execute(['--local', 'detach', cidr, id])\n end", "def detach_file_from_parallels_vm(options)\n message = \"Information:\\tAttaching Image \"+options['file']+\" to \"+options['name']\n command = \"prlctl set \\\"#{options['name']}\\\" --device-set cdrom0 --disable\\\"\"\n execute_command(options,message,command)\n return\nend", "def detach_storage(server_uuid, address:)\n data = {\n \"storage_device\" => {\n \"address\" => address\n }\n }\n\n json = JSON.generate data\n\n response = post \"server/#{server_uuid}/storage/detach\", json\n\n response\n end", "def detach_disk(instance_id, disk_id)\n with_thread_name(\"detach_disk(#{instance_id}, #{disk_id})\") do\n @cloud_core.detach_disk(instance_id, disk_id) do |disk_id|\n update_agent_settings(instance_id) do |settings|\n settings['disks'] ||= {}\n settings['disks']['persistent'] ||= {}\n settings['disks']['persistent'].delete(disk_id)\n end\n end\n end\n end", "def handle_detach_request(obj, original_message)\n if obj.include? :disk\n params = obj[:disk]\n disk_number = params[:disk_number]\n\n disk = VdsDisk.find_by_number(disk_number)\n if disk.nil?\n msg = Cirrocumulus::Message.new(nil, 'refuse', [original_message.content, [:disk_not_found]])\n msg.ontology = self.name\n self.agent.reply_to_message(msg, original_message)\n return\n end\n\n vdses = @engine.match [:virtual_disk, disk_number, :attached_to, :VDS, :as, :BLOCK_DEVICE]\n if vdses.empty?\n msg = Cirrocumulus::Message.new(nil, 'refuse', [original_message.content, [:already_detached]])\n msg.ontology = self.name\n self.agent.reply_to_message(msg, original_message)\n return\n end\n\n vds_uid = vdses.first[:VDS]\n\n vds = VpsConfiguration.find_by_uid(vds_uid)\n if vds.nil?\n msg = Cirrocumulus::Message.new(nil, 'refuse', [original_message.content, [:vds_not_found]])\n msg.ontology = self.name\n self.agent.reply_to_message(msg, original_message)\n return\n end\n\n vds.detach_disk(disk)\n create_saga(DetachVirtualDiskSaga).start(disk_number, original_message)\n end\n end", "def unmount_kvm_volume(name, dev)\n unmount(name)\n disable_netblockdev(dev)\nend", "def attachvol(compute,volume,instance,device)\n\t\tprint \"Running attachvol\\n\" if $debug\n\t\traise ArgumentError \"ebsvol[aws]->attachvol: Sorry, you must specify a valid device matching /dev/sd[a-m].\" if (device !~ /^\\/dev\\/sd[a-m]/)\n\t\tif (volume['status'] != \"in-use\" )\n\t\t\t# check instance is in the same availability zone\n\t\t\tif ( volume['availabilityZone'] != instance['placement']['availabilityZone'])\n\t\t\t\traise \"ebsvol[aws]->attachvol: Sorry, volumes must be in the same availability zone as the instance to be attached to.\\nThe volume #{volume['tagSet']['Name']} is in availability zone #{volume['availabilityZone']} and the instance is in #{instance['placement']['availabilityZone']}\" \n\t\t\telse\n\t\t\t\t# check that the device is available\n\t\t\t\tinuse = false\n\t\t\t\tinstance['blockDeviceMapping'].each { |x| inuse=true if x['deviceName'] == device }\n\t\t\t\tif ( inuse )\n\t\t\t\t\traise \"ebsvol[aws]->attachvol: Sorry, the device #{device} is already in use on #{instance['tagSet']['Name']}\" \n\t\t\t\telse\n\t\t\t\t\tresp = compute.attach_volume(instance['instanceId'],volume['volumeId'],device)\n\t\t\t\t\tif (resp.status == 200)\n\t\t\t\t\t\t# now wait for it to attach!\n\t\t\t\t\t\tcheck = volinfo(compute,volume['tagSet']['Name'])\n\t\t\t\t\t\twhile ( check['status'] !~ /(attached|in-use)/ ) do\n\t\t\t\t\t\t\tprint \"ebsvol[aws]->attachvol: status is #{check['status']}\\n\" if $debug\n\t\t\t\t\t\t\tsleep 5\n\t\t\t\t\t\t\tcheck = volinfo(compute,volume['tagSet']['Name'])\n\t\t\t\t\t\tend\n\t\t\t\t\t\tsleep 5 # allow aws to propigate the fact\n\t\t\t\t\t\tprint \"ebsvol[aws]->attachvol: volume is now attached\\n\" if $debug\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\telse\n\t\t\traise \"ebsvol[aws]->attachvol: Sorry, I could not attach #{volume['volumeId']} because it is in use!\"\n\t\tend\n\tend", "def detach_volume_queue(userid, server_ems_ref)\n task_opts = {\n :action => \"detaching Cloud Volume for user #{userid}\",\n :userid => userid\n }\n\n queue_opts = {\n :class_name => self.class.name,\n :method_name => 'detach_volume',\n :instance_id => id,\n :role => 'ems_operations',\n :queue_name => ext_management_system.queue_name_for_ems_operations,\n :zone => ext_management_system.my_zone,\n :args => [server_ems_ref]\n }\n\n MiqTask.generic_action_with_callback(task_opts, queue_opts)\n end", "def delete_server!(conn, server_name, delete_volumes = false)\n server = conn.servers.find{ |i| i.name == server_name }\n if server\n @log.info \"Deleting instance with name: #{server_name}\"\n # check and delete any floating ip addresses associated with instance\n server.all_addresses.each do |address|\n if address['ip']\n begin\n @log.info \"Disassociating floating ip address associated with instance: #{server.name}\"\n conn.disassociate_address(server.id, address['ip'])\n @log.info \"Releasing floating ip address: #{address['ip']}\"\n conn.release_address(conn.addresses.find {|a| a.ip == address['ip']}.id)\n rescue Exception => ex\n @log.debug \"Error encountered releasing floating ip, reason: #{ex}\"\n # continue\n end\n end\n end\n server.destroy\n if delete_volumes\n volume_service = Fog::Volume::OpenStack.new(\n :openstack_api_key => @os_password,\n :openstack_username => @os_username,\n :openstack_auth_url => @os_auth_url,\n :openstack_tenant => @os_tenant,\n )\n vols_to_del = volume_service.volumes.find{|v| v.display_name =~ /#{server_name}/}\n if vols_to_del\n vols_to_del.each do |vol|\n @log.info \"Waiting for volume to detach from instance: #{server_name}\"\n wait_for_vol(volume_service, vol.id)\n vol.destroy\n end\n else\n @log.info \"No volumes attached for the instance #{server_name}\"\n end\n end\n end\n end", "def restore_from_snap(last_snapshot, options = {})\n options[:device] = \"/dev/sdk\" unless options[:device]\n options[:vol_nickname] = last_snapshot[\"nickname\"] unless options[:vol_nickname]\n \n # 5 - Unmount and detach the current EBS volume (forcing to detach the device we're gonna need later for attching ours...)\n umount_and_detach_device({:device => options[:device]})\n # 6- Create the volume from the latest snapshot, attach it to the instance and then mount it\n STDERR.puts \"Creating new DB volume from snapshot #{last_snapshot['aws_id']}\"\n vol = ( options[:new_size_gb] ? create_volume_from_snap_size_gb(last_snapshot[\"aws_id\"],options[:vol_nickname],options[:new_size_gb] ) : create_volume_from_snap(last_snapshot[\"aws_id\"],options[:vol_nickname] ) )\n unless vol.nil?\n \tSTDERR.puts \"Attaching new DB volume: #{vol['aws_id']}\"\n \tatt = attach_volume(vol['aws_id'], options[:device])\n \twait_for_attachment(options[:device])\n \tFileUtils.mkdir_p self.MountPoint\n \tres = `mount -t xfs -o noatime #{options[:device]} #{self.MountPoint}`\n \traise EBSRemoteExecException.new(nil,$?,\"Error mounting newly created volume (#{vol['aws_id']}) on #{options[:device]}:\\n\"+res) if $? != 0 \n else\n\t raise \"create volume failed from snapshot\"\n end\n end", "def detach_disk(device_name, async = true)\n requires :identity, :zone\n\n data = service.detach_disk(identity, zone, device_name)\n operation = Fog::Compute::Google::Operations\n .new(:service => service)\n .get(data.name, data.zone)\n operation.wait_for { ready? } unless async\n reload\n end", "def attach!(volume_or_id, device)\n @@ec2.attach_volume(volume_or_id, id, device)\n @volumes = nil\n end", "def volumes\n volumes = @ec2.volumes\n volumes.delete_if {|v| v.instance_id != id}\n end", "def detach_disk request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_detach_disk_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def aws_instance_volumes_delete_on_termination_set(instance)\n log \"AWS: setting Instance '#{self.driver_id}' volumes deleteOnTermination\"\n aws_call('aws_instance_block_devices_get', instance: instance).each do |block_device|\n aws_call('aws_instance_block_device_ebs_delete_on_termination_set', instance: instance, block_device: block_device)\n end\n end", "def detach_disk(vm_cid, disk_cid)\n @telemetry_manager.monitor('initialize') do\n _init_azure\n end\n with_thread_name(\"detach_disk(#{vm_cid},#{disk_cid})\") do\n @telemetry_manager.monitor('detach_disk', id: vm_cid) do\n if _should_write_to_registry?\n _update_agent_settings(vm_cid) do |settings|\n settings['disks'] ||= {}\n settings['disks']['persistent'] ||= {}\n settings['disks']['persistent'].delete(disk_cid)\n end\n end\n\n @vm_manager.detach_disk(\n InstanceId.parse(vm_cid, _azure_config.resource_group_name),\n DiskId.parse(disk_cid, _azure_config.resource_group_name)\n )\n\n @logger.info(\"Detached '#{disk_cid}' from '#{vm_cid}'\")\n end\n end\n end", "def terminate(instance, decrement=false)\n Log.log \"Detaching #{instance.instance_id.light_yellow} from ASG\"\n client.detach_instances(\n instance_ids: [ instance.instance_id ],\n auto_scaling_group_name: asg_name,\n should_decrement_desired_capacity: false)\n\n # need to describe the instance status in the asg here so that we wait till connections have drained.\n count = 0\n Log.log \"Awaiting connection draining... \", newline: false\n while present?(instance.instance_id) && count < 120\n sleep 1\n count += 1\n end\n Log.log \"done\", timestamp: false\n\n Log.log \"Terminating #{instance.instance_id.light_red}... \", newline: false\n ec2_client.terminate_instances(instance_ids: [ instance.instance_id ])\n Log.log \"done\", timestamp: false\n end", "def create_ec2_ebs_volume opts\n Ec2EbsVolume.create opts.merge :account => self\n end", "def detach options = {}\n if attachment = self.attachment\n attachment.detach(options)\n else\n raise 'unable to detach network interface, no attachment present'\n end\n end", "def ebs(device_name, type: 'gp2', size: 8)\n {\n device_name: device_name,\n ebs: {\n delete_on_termination: true,\n volume_size: size,\n volume_type: type\n },\n no_device: ''\n }\nend", "def detach_persistent_disks(vm)\n spec_hash = {}\n spec_hash[:deviceChange] = []\n ipool = VCenterDriver::VIHelper.one_pool(OpenNebula::ImagePool)\n if ipool.respond_to?(:message)\n raise \"Could not get OpenNebula ImagePool: #{ipool.message}\"\n end\n\n vm.config.hardware.device.each do |disk|\n next unless disk_or_cdrom?(disk)\n\n # Let's try to find if disks is persistent\n source_unescaped = disk.backing.fileName.sub(\n /^\\[(.*?)\\] /, ''\n ) rescue next\n source = VCenterDriver::FileHelper.escape_path(source_unescaped)\n\n persistent = VCenterDriver::VIHelper\n .find_persistent_image_by_source(\n source, ipool\n )\n\n next unless persistent\n\n spec_hash[:deviceChange] << {\n :operation => :remove,\n :device => disk\n }\n end\n\n return if spec_hash[:deviceChange].empty?\n\n begin\n vm.ReconfigVM_Task(:spec => spec_hash).wait_for_completion\n rescue StandardError => e\n error = \"Cannot detach all DISKs from VM: #{e.message}.\"\n\n if VCenterDriver::CONFIG[:debug_information]\n error += \"\\n\\n#{e.backtrace}\"\n end\n\n raise error\n end\n end", "def detach(filename); end", "def detach(node)\n Result.new(call(CMD_DETACH % node))\n end", "def currently_attached_volume(instance_id, device)\n ec2.describe_volumes.find{|v| v[:aws_instance_id] == instance_id && v[:aws_device] == device}\n end", "def detach_file_from_fusion_vm(options)\n if options['verbose'] == true\n handle_output(options,\"Information:\\tDetaching CDROM from #{options['name']}\")\n end\n if options['host-os-name'].to_s.match(/Linux/)\n fusion_vm_dir = options['fusiondir']+\"/\"+options['name']\n else\n fusion_vm_dir = options['fusiondir']+\"/\"+options['name']+\".vmwarevm\"\n end\n fusion_vmx_file = fusion_vm_dir+\"/\"+options['name']+\".vmx\"\n copy=[]\n file=IO.readlines(fusion_vmx_file)\n file.each do |line|\n (item,value) = line.split(/\\=/)\n item = item.gsub(/\\s+/,\"\")\n case item\n when \"ide0:0.deviceType\"\n copy.push(\"ide0:0.startConnected = TRUE\\n\")\n when \"ide0:0.filename\"\n copy.push(\"\\n\")\n else\n copy.push(line)\n end\n end\n File.open(fusion_vmx_file,\"w\") {|file_data| file_data.puts copy}\n return\nend", "def create\n\t\tregion = resource[:availability_zone].to_s.gsub(/.$/,'') \n\t\tcompute = Fog::Compute.new(:provider => 'aws', :region => \"#{region}\")\n\t\tprint \"ebsvol[aws]->create: Region is #{region}\\n\" if $debug\n\t\tprint \"ebsvol[aws]->create: Availability_zone is #{resource[:availability_zone]}\\n\" if $debug\n\t\t# create the requested volume\n\t\tresponse = compute.create_volume(resource[:availability_zone],resource[:size],resource[:snapshot])\t\n\t\tif (response.status == 200)\n\t\t\tvolumeid = response.body['volumeId']\n\t\t\tprint \"ebsvol[aws]->create: I created volume #{volumeid}.\\n\" if $debug\n\t\t\t# now tag the volume with volumename so we can identify it by name\n\t\t\t# and not the volumeid\n\t\t\tresponse = compute.create_tags(volumeid,{ :Name => resource[:volume_name] })\n\t\t\tif (response.status == 200)\n\t\t\t\tprint \"ebsvol[aws]->create: I tagged #{volumeid} with Name = #{resource[:volume_name]}\\n\" if $debug\n\t\t\tend\n\t\t\t# Check if I need to attach it to an ec2 instance.\n\t\t\tattachto = resource[:attached_to].to_s\n\t\t\tprint \"attachto is #{attachto}\\n\" if $debug\n\t\t\tif ( attachto != '' )\n\t\t\t\tif ( attachto == 'me')\n\t\t\t\t\tinstance = instanceinfo(compute,myname(compute))\n\t\t\t\telse\n\t\t\t\t\tinstance = instanceinfo(compute,attachto)\n\t\t\t\tend\n\t\t\t\tif ( resource[:device] != nil )\n\t\t\t\t\t# try to attach the volume to requested instance\n\t\t\t\t\tprint \"attach the volume\\n\" if $debug\n\t\t\t\t\tvolume = volinfo(compute,resource[:volume_name])\n\t\t\t\t\tattachvol(compute,volume,instance,resource[:device])\n\t\t\t\telse\n\t\t\t\t\traise \"ebsvol[aws]->create: Sorry, I can't attach a volume with out a device to attach to!\"\n\t\t\t\tend\n\t\t\tend\n\t\telse\n\t\t\traise \"ebsvol[aws]->create: I couldn't create the ebs volume, sorry!\"\n\t\tend\n\tend", "def detach(opts={})\n send_cmd 'D'\n read_response if opts.fetch(:read, true)\n end", "def down\n updown_command :detach\n end", "def cleanup_storage vm\n vm.volumes.each do |vol|\n @logger.debug \"Deleting volume #{vol.name} for OpenStack host #{vm.name}\"\n vm.detach_volume(vol.id)\n vol.wait_for { ready? }\n vol.destroy\n end\n end", "def ebs_volumes\n @ebs_volumes ||= init_ebs_volumes.reject do |vol|\n vol.attachments.any? do |att|\n attached_instance = id_instances[att.instance_id]\n attached_instance.root_device_name == att.device\n end\n end\n end", "def detach_from(load_balancers, instance)\n check_arguments(instance: instance, load_balancers: load_balancers)\n\n load_balancers.select do |lb|\n matched_instance = lb.instances.any? do |lb_instance|\n instance.ec2_instance_id == lb_instance.instance_id\n end\n\n if matched_instance && lb.instances.count > 1\n # We can detach this instance safely because there is at least one other\n # instance to handle traffic\n true\n elsif matched_instance && lb.instances.count == 1\n # We can't detach this instance because it's the only one\n log(<<-MSG.split.join(\" \"))\n Will not detach #{instance.hostname} from load balancer\n #{lb.load_balancer_name} because it is the only instance connected\n MSG\n\n false\n else\n # This load balancer isn't attached to this instance\n false\n end\n end\n end", "def stop()\n self.destroy()\n\n # Stop the EC2 instance\n $ec2.terminate_instances([self.id])\n end", "def deletevolume\n if not checkRequirements([\"thezone\",\"thevolume\"])\n return false\n end\n checkToken(@thezone)\n submit = queryGCE(:path => '/compute/v1beta15/projects/#{@thezone.name}/zones/#{@thevolume.azone.name}/disks/#{@thevolume.serial}', :method => 'delete', :options => '', :access_token => @thezone.token )\n checkQuery(:type => 'zone', :token => @thezone.token, :projectname => @thezone.name, :zonename => @thevolume.azone.name, :operationname => submit[\"name\"])\n end", "def aws_instance_block_device_ebs_delete_on_termination_set(opts)\n AWS::EC2.new.client.modify_instance_attribute(\n instance_id: opts[:instance].id,\n attribute: \"blockDeviceMapping\",\n block_device_mappings: [device_name: opts[:block_device][:device_name], ebs:{ delete_on_termination: true}]\n )\n end", "def remove_gdom_disk(options)\n vds_disk = options['name']+\"_vdisk0\"\n message = \"Information:\\tRemoving disk \"+vds_disk+\" from Virtual Disk Server\"\n command = \"ldm remove-vdisk #{vds_disk} #{options['name']}\"\n execute_command(options,message,command)\n return\nend", "def detach\n\t\tEventMachine::detach_fd @signature\n\tend", "def attach(instance)\n # Attach to the instance\n $ec2.attach_volume(self.id, instance.id, '/dev/sdh')\n\n # Wait for it to be attached\n while true\n done = false\n $ec2.describe_volumes([self.id]).each do |result|\n if result[:aws_attachment_status] == 'attached'\n done = true\n end\n end\n if done\n break\n end\n sleep(5)\n end\n\n # Update the database\n self.attached_instance = instance.id\n self.save()\n end", "def unmountiso(vid)\n perform_request(:action => 'vserver-unmountiso', :vserverid => vid)\n end", "def addVolume(dev, size, type: \"gp2\", delete_on_termination: false)\n\n if setDeleteOntermination(dev, delete_on_termination)\n MU.log \"A volume #{dev} already attached to #{self}, skipping\", MU::NOTICE\n return\n end\n\n MU.log \"Creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n creation = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).create_volume(\n availability_zone: cloud_desc.placement.availability_zone,\n size: size,\n volume_type: type\n )\n\n MU.retrier(wait: 3, loop_if: Proc.new {\n creation = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).describe_volumes(volume_ids: [creation.volume_id]).volumes.first\n if ![\"creating\", \"available\"].include?(creation.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n creation.state != \"available\"\n })\n\n\n if @deploy\n MU::Cloud::AWS.createStandardTags(\n creation.volume_id,\n region: @region,\n credentials: @credentials,\n optional: @config['optional_tags'],\n nametag: @mu_name+\"-\"+dev.upcase,\n othertags: @config['tags']\n )\n end\n\n MU.log \"Attaching #{creation.volume_id} as #{dev} to #{@cloud_id} in #{@region} (credentials #{@credentials})\"\n attachment = nil\n MU.retrier([Aws::EC2::Errors::IncorrectState], wait: 15, max: 4) {\n attachment = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).attach_volume(\n device: dev,\n instance_id: @cloud_id,\n volume_id: creation.volume_id\n )\n }\n\n begin\n att_resp = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).describe_volumes(volume_ids: [attachment.volume_id])\n if att_resp and att_resp.volumes and !att_resp.volumes.empty? and\n att_resp.volumes.first.attachments and\n !att_resp.volumes.first.attachments.empty?\n attachment = att_resp.volumes.first.attachments.first\n if !attachment.nil? and ![\"attaching\", \"attached\"].include?(attachment.state)\n raise MuError, \"Saw state '#{creation.state}' while creating #{size}GB #{type} volume on #{dev} for #{@cloud_id}\"\n end\n end\n end while attachment.nil? or attachment.state != \"attached\"\n\n # Set delete_on_termination, which for some reason is an instance\n # attribute and not on the attachment\n setDeleteOntermination(dev, delete_on_termination)\n end", "def terminate_instance!(instance_id=nil)\n ec2.terminate_instances(:instance_id => instance_id)\n end", "def destroy\n ret = qmgmt(['volume', 'delete', resource[:name]])\n out = Array.new\n ret.each_line { |l|\n out.push(' ' + l)\n }\n if ( ret.exitstatus != 0 )\n fail(\"quobyte volume delete #{resource[:name]} failed with status #{ret.exitstatus.to_s}. Output follows.\" + out.join(\"\\n\"))\n end\n end", "def terminate(server_name, access_key, secret, snapshot_removal = true, force = false)\n ##############################\n # Initialize AWS and create EC2 connection\n ##############################\n initialize_aws(access_key, secret)\n ec2 = AWS::EC2.new\n\n ##############################\n # Find instance\n ##############################\n instance = nil\n AWS.memoize do\n instances = ec2.instances.filter(\"tag:Name\", server_name)\n instances.each do |i|\n unless i.status == :shutting_down || i.status == :terminated\n instance = i\n break\n end # unless status\n end # instance loop\n end # memoize\n\n if instance\n environment_name = nil\n AWS.memoize do\n environment_name = instance.tags[\"environment\"].strip if instance.tags[\"environment\"]\n end\n\n ##############################\n # ENVIRONMENT\n ##############################\n if environment_name.nil? && ! force\n @log.fatal \"No environment tag found for host. Use the --force option to override and terminate.\"\n exit 3\n end\n\n if (! @environments.has_key?(environment_name)) && (! force)\n @log.fatal \"Environment not found: '#{environment_name}'\"\n exit 2\n end\n @environment = @environments[environment_name] if environment_name\n\n ##############################\n # Create Route53 connection\n ##############################\n aws_route53 = nil\n if @environment && @environment.route53_zone_id\n aws_route53 = AWS::Route53.new\n route53 = EC2Launcher::Route53.new(aws_route53, @environment.route53_zone_id, @log)\n end\n\n ##############################\n # EBS Volumes\n ##############################\n # Find EBS volumes\n attachments = nil\n AWS.memoize do\n attachments = instance.block_device_mappings.values\n\n # Remove snapshots\n remove_snapshots(ec2, attachments) if snapshot_removal\n\n # Remove volumes, if necessary\n remove_volumes(ec2, attachments)\n end\n\n private_ip_address = instance.private_ip_address\n \n run_with_backoff(30, 1, \"terminating instance: #{server_name} [#{instance.instance_id}]\") do\n instance.terminate\n end\n\n if route53\n @log.info(\"Deleting A record from Route53: #{server_name} => #{private_ip_address}\")\n route53.delete_record_by_name(server_name, 'A')\n end\n\n @log.info(\"Deleting node/client from Chef: #{server_name}\")\n node_result = `echo \"Y\" |knife node delete #{server_name}`\n client_result = `echo \"Y\" |knife client delete #{server_name}`\n @log.debug(\"Deleted Chef node: #{node_result}\")\n @log.debug(\"Deleted Chef client: #{client_result}\")\n else\n @log.error(\"Unable to find instance: #{server_name}\")\n end\n end", "def attach_volume(volume_id, instance_id, device, timeout)\n Chef::Log.debug(\"Attaching #{volume_id} as #{device}\")\n ec2.attach_volume(volume_id, instance_id, device)\n\n # block until attached\n begin\n Timeout::timeout(timeout) do\n while true\n vol = volume_by_id(volume_id)\n if vol && vol[:aws_status] != \"deleting\"\n if vol[:aws_attachment_status] == \"attached\"\n if vol[:aws_instance_id] == instance_id\n Chef::Log.info(\"Volume #{volume_id} is attached to #{instance_id}\")\n break\n else\n raise \"Volume is attached to instance #{vol[:aws_instance_id]} instead of #{instance_id}\"\n end\n else\n Chef::Log.debug(\"Volume is #{vol[:aws_status]}\")\n end\n sleep 3\n else\n raise \"Volume #{volume_id} no longer exists\"\n end\n end\n end\n rescue Timeout::Error\n raise \"Timed out waiting for volume attachment after #{timeout} seconds\"\n end\n end", "def destroy \n ec2 = self.class.new_ec2(@resource.value(:user), @resource.value(:password))\n ec2.terminate_instances({:instance_id => @property_hash[:instance_id]})\n ec2.delete_security_group({:group_name => @resource.value(:name)})\n end", "def remove_instance(instance)\n deregister_response = client.deregister_instances_from_load_balancer(load_balancer_name: name,\n instances: [{instance_id: instance.ec2_instance_id}])\n remaining_instance_count = deregister_response.instances.size\n puts \"Removed #{instance.hostname} from ELB #{name}. Remaining instances: #{remaining_instance_count}\".light_blue\n _wait_for_connection_draining\n end", "def down(remove_volumes: false)\n run!('down', opts(v: [!!remove_volumes, false]))\n end", "def delete_volume(volume_id)\n data = false\n conn = @ec2_main.environment.volume_connection\n if conn != nil\n if @ec2_main.settings.openstack\n response = conn.delete_volume(volume_id)\n if response.status == 202\n data = response.body\n else\n data = {}\n end\n elsif ((conn.class).to_s).start_with? \"Fog::Compute::AWS\"\n response = conn.delete_volume(volume_id)\n if response.status == 200\n data = true\n else\n data = false\n end\n else\n data = conn.delete_volume(volume_id)\n end\n else\n raise \"Connection Error\"\n end\n return data\n end", "def destroy\n @vdisk = Vdisk.find(params[:id])\n @vdisk.destroy\n\n respond_to do |format|\n format.html { redirect_to vdisks_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @ec2_instance.destroy\n respond_to do |format|\n format.html { redirect_to ec2_instances_url, notice: 'Ec2 instance was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def is_detaching_volume?(mapping)\n case mapping[:volume_status]\n when 'detaching'\n true\n when 'attached', 'attaching'\n # also detaching if we have successfully requested detachment but volume\n # status does not yet reflect this change.\n 'detached' == mapping[:management_status]\n else\n false\n end\n end", "def detached\n command = args.join(\" \")\n error(\"Usage: heroku run COMMAND\")if command.empty?\n opts = { :attach => false, :command => command }\n release = options[:version]\n process_data = action(\"Running `#{command}` detached\", :success => \"up\") do\n process_data = api.post_ps(app, command, { :attach => false, :release => release }).body\n status(process_data['process'])\n process_data\n end\n display(\"Use `heroku logs -p #{process_data['process']}` to view the output.\")\n end", "def stop_pvm_instance(instance_id)\n post(\n \"cloud-instances/#{guid}/pvm-instances/#{instance_id}/action\",\n {\"action\" => \"stop\"}.to_json\n )\n end", "def unmountiso(vid)\n perform_request(action: 'vserver-unmountiso', vserverid: vid)\n end", "def attach_blank_volume opts\n device = opts.delete :device\n opts = {:ec2_availability_zone => ec2_availability_zone }.merge opts\n volume = account.create_ec2_ebs_volume opts\n attach_volume volume, device\n end", "def delete_volume(vol_id,wait=true)\n params = {:aws_id => vol_id,:api_version => 1.0}\n success=false\n 20.times do |i| \n begin\n STDERR.puts \"Making RightScale API call to delete EBS volume\"\n #STDERR.puts \"HERE IS THE URL: #{@api_url}/delete_ebs_volume.js (PARAMS: #{requestify(params)})\"\n body = RestClient.delete @api_url+\"/delete_ebs_volume.js\"+\"?\"+requestify(params)\n success=true\n #json = JSON.load(body)\n STDERR.puts \"Deleted VOLUME: #{vol_id}\"\n break\n rescue Exception => e\n display_exception(e, \"delete_volume(#{vol_id}, #{wait})\")\n sleep 5\n end\n end\n raise \"Couldn't delete volume #{vol_id}...aborting.\" unless success\n end", "def remove_from(vm)\n VirtualBox.run_command! ['VBoxManage', '--nologo', 'storagectl', vm.uuid,\n '--name', name, '--remove']\n self\n end", "def detach\n if attached?\n attachment.destroy\n write_attachment nil\n end\n end", "def execute_restore_stripe(options={})\n new_vol_name = \"#{options[:lineage]}-#{ENV['EC2_INSTANCE_ID']}\"\n json_result = self.find_latest_ebs_backup(options[:lineage], options[:from_master], options[:timestamp])\n\n if json_result.nil? \n STDERR.puts \"No existing snapshot found for the specified nickname lineage. Aborting...\"\n exit(-1)\n end\n STDERR.puts \"Restoring.. #{json_result.inspect}\"\n options[:new_size_gb] = (options[:new_volume_size_in_gb] / json_result.size.to_f).ceil if options[:new_volume_size_in_gb]\n\n @disk.sync\n @disk.umount\n @disk.disable_volume\n\n self.execute_terminate_volumes if options[:force]\n\n json_result.each do |snapshot|\n \n# create volume from snap\n create_result = ( options[:new_size_gb] ? create_volume_from_snap_size_gb(snapshot[\"aws_id\"],new_vol_name,options[:new_size_gb] ) : create_volume_from_snap(snapshot[\"aws_id\"], new_vol_name ) )\n raise \"FATAL: error occured in create_volume_from_snap(#{snapshot['aws_id']}, #{new_vol_name})\" if create_result.nil?\n\n# attach volume to instance\n retry_seconds = 0\n while retry_seconds < 200\n begin \n attach_result = attach_volume(create_result['aws_id'], snapshot['device'])\n raise \"FATAL: error occured in attach_volume(#{create_result['aws_id']}, #{snapshot['device']}\" if attach_result.nil?\n break if attach_result\n rescue => e\n puts \"CAUGHT EXCEPTION in execute_restore_stripe. Device attachment. #{e}, Retrying #{retry_seconds} of 200 seconds\"\n retry_seconds += 30\n sleep 30 \n end\n end\n raise \"FATAL: error occured in attach_volume(#{create_result['aws_id']}, #{snapshot['device']}\" if attach_result.nil?\n end\n\n# wait for devices to attach, after completing ALL the api calls\n json_result.each { |s| wait_for_attachment(s['device']) }\n @disk.enable_volume\n raise \"FATAL: mount failed!\" unless @disk.mount\n @disk.write_fstab\n# TODO - grow the filesystem if a new size was given\n end", "def action_detach_tag(list_id, detach_tag_request, opts = {})\n data, _status_code, _headers = action_detach_tag_with_http_info(list_id, detach_tag_request, opts)\n data\n end", "def disable_netblockdev(dev)\n res = system \"nbd-client -d #{dev}\"\n raise \"Failed to detach image from network block device server\" unless res\n puts \"Detached net block device: #{dev}\"\nend", "def destroy\n @admissive_volume = AdmissiveVolume.find(params[:id])\n @admissive_volume.destroy\n\n respond_to do |format|\n format.html { redirect_to admissive_volumes_url }\n format.json { head :no_content }\n end\n end", "def delete_pvm_instance(instance_id)\n delete(\"cloud-instances/#{guid}/pvm-instances/#{instance_id}\")\n end", "def detach(host_id, source=nil)\n extra_params = {}\n if source\n extra_params['source'] = source\n end\n\n request(Net::HTTP::Delete, '/api/' + API_VERSION + '/tags/hosts/' + host_id.to_s, extra_params, nil, false)\n end", "def attached_to\n\t\tregion = resource[:availability_zone].to_s.gsub(/.$/,'') \n\t\tcompute = Fog::Compute.new(:provider => 'aws', :region => \"#{region}\")\n\t\tvolume = volinfo(compute,resource[:volume_name])\n\t\tprint \"attached_to: looking at volume #{resource[:volume_name]}\\n\" if $debug\n\t\tif ( volume['status'] == 'in-use' ) \n\t\t\t# Look for the name of the instance which this volume is attached to.\n\t\t\tif ( volume['attachmentSet'][0]['instanceId'] != nil )\n\t\t\t\tprint \"#{resource[:volume_name]} is attached to #{volume['attachmentSet'][0]['instanceId']}\\n\" if $debug\n\t\t\t\t# If the resource is specified as attached_to => \"me\" then we'd better check that it is attached\n\t\t\t\t# to this machine.\n\t\t\t\tif ( resource[:attached_to] == \"me\")\n\t\t\t\t\tprint \"Am I me?\\n\" if $debug\n\t\t\t\t\tprint \"I am #{myname(compute)}\\n\" if $debug\n\t\t\t\t\tif ( myname(compute) == lookupname(compute,volume['attachmentSet'][0]['instanceId']))\n\t\t\t\t\t\treturn \"me\"\n\t\t\t\t\telse\n\t\t\t\t\t\treturn lookupname(compute,volume['attachmentSet'][0]['instanceId'])\n\t\t\t\t\tend\n\t\t\t\telse\n\t\t\t\t\treturn lookupname(compute,volume['attachmentSet'][0]['instanceId'])\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\treturn ''\n\tend", "def remove_vm_disks(vm_service, disk_specs)\n attachments_service = vm_service.disk_attachments_service\n disk_specs.each do |disk_spec|\n disk_spec = disk_spec.with_indifferent_access\n attachment_service = attachments_service.attachment_service(disk_spec['disk_name'])\n attachment_service.remove(:detach_only => !disk_spec['delete_backing'])\n rescue OvirtSDK4::NotFoundError\n raise \"no disk with the id #{disk_spec['disk_name']} is attached to the vm: #{vm_service.get.name}\"\n rescue OvirtSDK4::Error\n raise \"Failed to detach disk with the id #{disk_spec['disk_name']} from the vm: #{vm_service.get.name}, check that it exists\"\n end\n end", "def deregister_container_instance(params={})\n request({\n 'Action' => 'DeregisterContainerInstance',\n :parser => Fog::Parsers::AWS::ECS::DeregisterContainerInstance.new\n }.merge(params))\n end", "def attach(volume, device = '/dev/sdh')\n @ec2.attach_volume volume.id, id, device\n end", "def detach_disks_specs\n detach_disk_array = []\n extra_config = []\n keys = disk_keys.invert\n\n ipool = VCenterDriver::VIHelper.one_pool(OpenNebula::ImagePool)\n disks_each(:detached?) do |d|\n key = d.key.to_s\n source = VCenterDriver::FileHelper.escape_path(d.path)\n persistent =\n VCenterDriver::VIHelper\n .find_persistent_image_by_source(\n source, ipool\n )\n\n op = { :operation => :remove, :device => d.device }\n if !persistent && d.type != 'CDROM'\n op[:fileOperation] = :destroy\n end\n detach_disk_array << op\n\n # Remove reference opennebula.disk if exist from vmx and cache\n extra_config << d.config(:delete) if keys[key]\n end\n\n [detach_disk_array, extra_config]\n end", "def volumes\n client = Aws::EC2::Client.new\n client.describe_volumes({\n filters: [{\n name: \"tag:backup\",\n values: [\"true\"]\n }]\n }).volumes\nend", "def attach(instance_id, volume)# rubocop:disable Metrics/AbcSize\n inst_details = AttrFinder.new(@instanceparameters)\n @options[:inst] = volume\n inst_details.options = @options\n inst_details.validate = @validate\n inst_details.function = 'server'\n opts = {}\n BmcAuthenticate.new(@options)\n request = OracleBMC::Core::Models::AttachVolumeDetails.new\n request.instance_id = instance_id\n request.type = 'iscsi'\n request.volume_id = inst_details.volume\n api = OracleBMC::Core::ComputeClient.new\n response = api.attach_volume(request, opts)\n end", "def eject\n @eject and HDB.vsystem(\"Ejecting #{@underlying_device}\", \"eject #{@underlying_device}\", nil)\n end", "def destroy\n @sm_volume = SmVolume.find(params[:id])\n @sm_volume.destroy\n\n respond_to do |format|\n format.html { redirect_to sm_volumes_url }\n format.json { head :no_content }\n end\n end", "def off\n attachment = hpg_resolve(shift_argument)\n return unless confirm_command(attachment.config_var, 'Deactiving will destroy all backups')\n action(\"Dectivating #{attachment.config_var} (#{attachment.resource_name})\") do\n RestClient.delete( authed_pgsnapshot_url(\"/client/resource/#{attachment.resource_name}\"))\n end\n end", "def volume_down!\n raise InvalidActionError, \"Cannot volume down an Output that is already voluming.\" unless not_voluming?\n volume_down_action.tap do |action|\n write_action action\n end\n end", "def destroy\n @volume = Volume.find(params[:id])\n @volume.destroy\n\n respond_to do |format|\n format.html { redirect_to(volumes_url) }\n format.xml { head :ok }\n end\n end", "def check()\n # check if teh volume still exists\n begin\n volumes = $ec2.describe_volumes([self.id])\n rescue RightAws::AwsError\n if $!.errors[0][0] == \"InvalidVolume.NotFound\"\n puts \"WARN: Volume #{self.id} is not running\"\n delete()\n return\n else\n p $!.code\n end\n end\n\n # check that it is attached\n if volumes[0][:aws_attachment_status] == 'attached'\n if self.attached_instance != volumes[0][:aws_instance_id]\n self.attached_instance = volumes[0][:aws_instance_id]\n self.save()\n puts \"WARN: volume #{self.id} is now attached to #{self.attached_instance}\"\n end\n elsif self.attached_instance.nil?\n puts \"WARN: volume #{self.id} is no longer attached\"\n self.attached_instance = nil\n self.save()\n end\n end" ]
[ "0.80094284", "0.7860202", "0.7805397", "0.75235146", "0.74850506", "0.7335772", "0.7262752", "0.72382694", "0.715239", "0.7067783", "0.70534164", "0.70353943", "0.69209504", "0.67825323", "0.66788", "0.66427445", "0.64922065", "0.6444169", "0.64404655", "0.6419261", "0.63709265", "0.63137466", "0.6300343", "0.61915183", "0.6169896", "0.61656296", "0.6149712", "0.61476284", "0.6131869", "0.6074886", "0.59694004", "0.591343", "0.5845232", "0.5845082", "0.58236885", "0.5792543", "0.5747639", "0.5736162", "0.5670552", "0.56507075", "0.56016946", "0.5601041", "0.5599398", "0.5570662", "0.5568215", "0.55533576", "0.5540131", "0.5526808", "0.5511902", "0.5501778", "0.545652", "0.5452019", "0.5435713", "0.538614", "0.5353987", "0.53265876", "0.53080165", "0.5303727", "0.5273717", "0.5269261", "0.52594787", "0.52562517", "0.52152634", "0.5205219", "0.52034485", "0.5203264", "0.5201908", "0.5200984", "0.518831", "0.5188038", "0.5164175", "0.5160648", "0.5159933", "0.51501596", "0.51477504", "0.5141268", "0.5135048", "0.5129852", "0.5112773", "0.51109785", "0.51100194", "0.5101168", "0.51002187", "0.50994384", "0.5089369", "0.50864047", "0.50656974", "0.50575054", "0.5051105", "0.5035711", "0.5031897", "0.50290465", "0.5024252", "0.50241053", "0.50220937", "0.50207585", "0.50154716", "0.501296", "0.5002019", "0.5000971" ]
0.697606
12
Take snapshot of disk
def snapshot_disk(disk_id, metadata) metadata = Hash[metadata.map { |key, value| [key.to_s, value] }] with_thread_name("snapshot_disk(#{disk_id})") do volume = @ec2_client.volumes[disk_id] devices = [] volume.attachments.each { |attachment| devices << attachment.device } name = ['deployment', 'job', 'index'].collect { |key| metadata[key] } name << devices.first.split('/').last unless devices.empty? snapshot = volume.create_snapshot(name.join('/')) logger.info("snapshot '#{snapshot.id}' of volume '#{disk_id}' created") ['agent_id', 'instance_id', 'director_name', 'director_uuid'].each do |key| TagManager.tag(snapshot, key, metadata[key]) end TagManager.tag(snapshot, 'device', devices.first) unless devices.empty? TagManager.tag(snapshot, 'Name', name.join('/')) ResourceWait.for_snapshot(snapshot: snapshot, state: :completed) snapshot.id end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def snapshot_disk(disk_id, metadata={})\n # TODO: Get vhd from 'vhd' container in vm storage account and use blob client to snapshot it\n end", "def snapshot_disk(disk_id, metadata)\n metadata = Hash[metadata.map { |key, value| [key.to_s, value] }]\n\n with_thread_name(\"snapshot_disk(#{disk_id})\") do\n volume = @ec2_resource.volume(disk_id)\n devices = []\n volume.attachments.each { |attachment| devices << attachment.device }\n\n name = ['deployment', 'job', 'index'].collect { |key| metadata[key] }\n name << devices.first.split('/').last unless devices.empty?\n\n snapshot = volume.create_snapshot(name.join('/'))\n logger.info(\"snapshot '#{snapshot.id}' of volume '#{disk_id}' created\")\n\n\n tags = {}\n ['agent_id', 'instance_id', 'director_name', 'director_uuid'].each do |key|\n tags[key] = metadata[key]\n end\n tags['device'] = devices.first unless devices.empty?\n tags['Name'] = name.join('/')\n TagManager.tags(snapshot, tags)\n\n ResourceWait.for_snapshot(snapshot: snapshot, state: 'completed')\n snapshot.id\n end\n end", "def snapshot_disk(disk_id, metadata)\n raise Bosh::Clouds::NotSupported.new(false),\n 'snapshot_disk is not supported.'\n end", "def snapshot_disk(disk_id, metadata)\n metadata = Hash[metadata.map { |key, value| [key.to_s, value] }]\n\n with_thread_name(\"snapshot_disk(#{disk_id})\") do\n volume = @ec2_resource.volume(disk_id)\n devices = []\n volume.attachments.each { |attachment| devices << attachment.device }\n\n name = ['deployment', 'job', 'index'].collect { |key| metadata[key] }\n\n unless devices.empty?\n name << devices.first.split('/').last\n metadata['device'] = devices.first\n end\n\n snapshot = volume.create_snapshot(description: name.join('/'))\n logger.info(\"snapshot '#{snapshot.id}' of volume '#{disk_id}' created\")\n\n metadata.merge!(\n 'director' => metadata['director_name'],\n 'instance_index' => metadata['index'].to_s,\n 'instance_name' => metadata['job'] + '/' + metadata['instance_id'],\n 'Name' => name.join('/')\n )\n\n %w[director_name index job].each do |tag|\n metadata.delete(tag)\n end\n\n TagManager.create_tags(snapshot, **metadata)\n ResourceWait.for_snapshot(snapshot: snapshot, state: 'completed')\n snapshot.id\n end\n end", "def take_snapshot\n select\n end", "def snapshot()\n puts \"TODO\"\n end", "def take_a_snapshot\n raise NotImplementedError\n end", "def snapshot_disk(disk_cid, metadata = {})\n @telemetry_manager.monitor('initialize') do\n _init_azure\n end\n with_thread_name(\"snapshot_disk(#{disk_cid},#{metadata})\") do\n @telemetry_manager.monitor('snapshot_disk', id: disk_cid) do\n disk_id = DiskId.parse(disk_cid, _azure_config.resource_group_name)\n resource_group_name = disk_id.resource_group_name\n disk_name = disk_id.disk_name\n caching = disk_id.caching\n if disk_name.start_with?(MANAGED_DATA_DISK_PREFIX)\n snapshot_id = DiskId.create(caching, true, resource_group_name: resource_group_name)\n @disk_manager2.snapshot_disk(snapshot_id, disk_name, encode_metadata(metadata))\n else\n disk = @disk_manager2.get_data_disk(disk_id)\n if disk.nil?\n storage_account_name = disk_id.storage_account_name\n snapshot_name = @disk_manager.snapshot_disk(storage_account_name, disk_name, encode_metadata(metadata))\n snapshot_id = DiskId.create(caching, false, disk_name: snapshot_name, storage_account_name: storage_account_name)\n else\n snapshot_id = DiskId.create(caching, true, resource_group_name: resource_group_name)\n @disk_manager2.snapshot_disk(snapshot_id, disk_name, encode_metadata(metadata))\n end\n end\n\n @logger.info(\"Take a snapshot '#{snapshot_id}' for the disk '#{disk_id}'\")\n snapshot_id.to_s\n end\n end\n end", "def snapshot\r\n date = self.class.service_instance.snapshot_blob(self.path)\r\n properties = self.class.service_instance.get_blob_properties(self.path)\r\n return BlobObject.new(:name => self.name, \r\n :url => self.class.service_instance.generate_request_uri(self.path) + \"?snapshot=#{date}\",\r\n :content_type => properties[:content_type],\r\n :snapshot_date => date)\r\n end", "def create_snapshot(vm, name)\n task = vm.CreateSnapshot_Task(\n name: name,\n memory: false,\n quiesce: false)\n\n if block_given?\n task.wait_for_progress do |progress|\n yield progress unless progress.nil?\n end\n else\n task.wait_for_completion\n end\n end", "def make_backup\n @backup = editor.create_snapshot\n end", "def snapshot()\n\t\tif(@wait != -1 && @count < @wait)\n\t\t\t@count += 1\n\t\t\treturn\n\t\tend\n\t\t\n\t\tif (! @quiet)\n\t\t\tputs \"Snapshot ##{@count}\"\n\t\tend\n\t\t\n\t\tnewName = File.basename(@file.path, \".svg\") + \"#{@count}\"\n\t\t@count += 1\n\t\t\n\t\t## Save a new SVG file\n\t\tsave(newName + \".svg\")\n\t\t\n\t\t## Convert to EPS\n\t\tif (@eps)\n\t\t\tsystem(\"#{@inkscape} -E=#{newName}.eps #{newName}.svg -z -d=90 -C --export-ignore-filters 2>/dev/null\")\n\t\tend\n\t\t\n\t\t## Delete the SVG file\n\t\tif (@cleanup)\n\t\t\tFile.delete(newName + \".svg\")\n\t\tend\n\tend", "def take_snapshots\n snapped_volumes = []\n logger.debug \"Issuing sync command\"\n system 'sync'\n\n logger.debug \"Walking attached volumes\"\n attached_volumes.each do |vol|\n dir = device_to_directory device_name vol\n logger.debug \"Found #{vol.id} mounted on #{dir}\"\n unless should_snap vol\n logger.debug \"Skipping #{vol.id}\"\n next\n end\n\n fs_freeze dir if options[:fs_freeze]\n take_snapshot vol\n snapped_volumes.push vol\n fs_unfreeze dir if options[:fs_freeze]\n end\n snapped_volumes\n end", "def wipe_snapshots_data; end", "def wipe_snapshots_data; end", "def snap(cmd, snap)\n nodes = getnodes.map{|x| x.name}\n out = `vagrant snapshot list`\n snaps = out.include?(\"No snapshots\") ? [] : out.split.uniq.sort\n curr_snap = snaps.last\n next_snap = curr_snap ? curr_snap[0..-2] + (curr_snap[-1].to_i + 1).to_s : \"snap1\"\n\n if cmd == \"save\"\n puts(\":: Creating snapshot for VMs [#{nodes * ','}]...\".colorize(:light_yellow))\n snap ||= next_snap\n Sys.exec(\"vagrant snapshot save #{snap}\")\n elsif cmd == \"restore\"\n puts(\":: Restoring previous snapshot for VMs [#{nodes * ','}]...\".colorize(:light_yellow))\n snap ||= curr_snap\n Sys.exec(\"vagrant snapshot restore #{snap}\")\n elsif cmd == \"delete\"\n puts(\":: Deleting previous snapshot for VMs [#{nodes * ','}]...\".colorize(:light_yellow))\n snap ||= curr_snap\n Sys.exec(\"vagrant snapshot delete #{snap}\")\n elsif cmd == \"list\"\n puts(\":: Listing snapshots for VMs [#{nodes * ','}]...\".colorize(:light_yellow))\n puts(snaps)\n end\n end", "def _snapshot\n raise NotImplementedError\n end", "def restore_from_snap(last_snapshot, options = {})\n options[:device] = \"/dev/sdk\" unless options[:device]\n options[:vol_nickname] = last_snapshot[\"nickname\"] unless options[:vol_nickname]\n \n # 5 - Unmount and detach the current EBS volume (forcing to detach the device we're gonna need later for attching ours...)\n umount_and_detach_device({:device => options[:device]})\n # 6- Create the volume from the latest snapshot, attach it to the instance and then mount it\n STDERR.puts \"Creating new DB volume from snapshot #{last_snapshot['aws_id']}\"\n vol = ( options[:new_size_gb] ? create_volume_from_snap_size_gb(last_snapshot[\"aws_id\"],options[:vol_nickname],options[:new_size_gb] ) : create_volume_from_snap(last_snapshot[\"aws_id\"],options[:vol_nickname] ) )\n unless vol.nil?\n \tSTDERR.puts \"Attaching new DB volume: #{vol['aws_id']}\"\n \tatt = attach_volume(vol['aws_id'], options[:device])\n \twait_for_attachment(options[:device])\n \tFileUtils.mkdir_p self.MountPoint\n \tres = `mount -t xfs -o noatime #{options[:device]} #{self.MountPoint}`\n \traise EBSRemoteExecException.new(nil,$?,\"Error mounting newly created volume (#{vol['aws_id']}) on #{options[:device]}:\\n\"+res) if $? != 0 \n else\n\t raise \"create volume failed from snapshot\"\n end\n end", "def snapshot\n if disk_usage_exceeds?(90)\n $stderr.puts \"WARNING: The application's disk usage is very close to the quota limit. The snapshot may fail unexpectedly\"\n $stderr.puts \"depending on the amount of data present and the snapshot procedure used by your application's cartridges.\"\n end\n\n pre_snapshot_state = @state.value\n stop_gear\n\n scalable_snapshot = !!@cartridge_model.web_proxy\n\n if scalable_snapshot\n begin\n handle_scalable_snapshot\n rescue => e\n $stderr.puts \"We were unable to snapshot this application due to communication issues with the OpenShift broker. Please try again later.\"\n $stderr.puts \"#{e.message}\"\n $stderr.puts \"#{e.backtrace}\"\n return false\n end\n end\n\n @cartridge_model.each_cartridge do |cartridge|\n @cartridge_model.do_control('pre-snapshot',\n cartridge,\n err: $stderr,\n pre_action_hooks_enabled: false,\n post_action_hooks_enabled: false,\n prefix_action_hooks: false,)\n end\n\n exclusions = []\n\n @cartridge_model.each_cartridge do |cartridge|\n exclusions |= snapshot_exclusions(cartridge)\n end\n\n write_snapshot_archive(exclusions)\n\n result = @cartridge_model.each_cartridge do |cartridge|\n @cartridge_model.do_control('post-snapshot',\n cartridge,\n err: $stderr,\n pre_action_hooks_enabled: false,\n post_action_hooks_enabled: false)\n end\n\n # Revert to the pre-snapshot state of the currently snapshotted gear\n #\n if @state.value != pre_snapshot_state\n (@state.value != State::STARTED) && start_gear\n end\n\n result\n end", "def current_snapshot\n `zfs list -r -H -t snapshot -o name -S creation #{@dataset}/share | head -1`.chomp\n end", "def new_snapshot!(time = Time.now)\n snapshot_path = time.strftime(dir.path + '/%Y-%m-%dT%H:%M%z')\n Snapshot.new(self, snapshot_path).create!(current.path)\n end", "def take_snapshot\n json_string = self.snapshot_model.to_json\n Crypto::Bytes.from_string(json_string)\n end", "def store_screenshot(path)\n screenshot = screenshots.first\n if (screenshot)\n begin \n variant = screenshot.variant(resize_to_limit: [425, nil], resize_to_fill: [425, 250, { crop: :low }]).processed\n path = variant.blob.service.send(:path_for, variant.key)\n FileUtils.cp(path, \"/Users/jan.prill/Documents/workspace/msp/inviadorepo/web/js/gridsome/inviado/src/assets/images/inviado/#{id}.png\")\n rescue\n p \"There is a problem on #{variant}\"\n end\n end\n end", "def create_snapshot(snap_id, snap_name)\n memory_dumps = true\n memory_dumps = CONFIG[:memory_dumps] \\\n unless CONFIG[:memory_dumps].nil?\n\n snapshot_hash = {\n :name => snap_id,\n :description => \"OpenNebula Snapshot: #{snap_name}\",\n :memory => memory_dumps,\n :quiesce => true\n }\n\n begin\n @item.CreateSnapshot_Task(snapshot_hash).wait_for_completion\n rescue StandardError => e\n error = \"Cannot create snapshot for VM: #{e.message}.\"\n\n if VCenterDriver::CONFIG[:debug_information]\n error += \"\\n\\n#{e.backtrace.join(\"\\n\")}\"\n end\n\n raise error\n end\n\n snap_id\n end", "def snapshot\n begin\n response = resource[\"/snapshot/#{app}\"].post(nil)\n rescue RestClient::InternalServerError\n display \"An error has occurred.\"\n end\n display response.to_s\n end", "def take_raw(snapshot)\n _buffer(snapshot)\n end", "def snapshot(cart_name)\n raise NotImplementedError(\"snapshot\")\n end", "def snap(descriptor = \"\")\n name = clean_url(page.current_url)\n\n # Descriptor\n name = name + (descriptor.empty? ? \"\" : \"-state-#{descriptor}\")\n p \"#snap\", \"name\", name unless name.empty?\n\n set_window_size\n\n # Ensure @folder exists\n FileUtils.mkdir_p(@folder) unless File.exists?(@folder)\n Capybara.current_session.driver.browser.save_screenshot(\"#{@folder}/#{name}.png\")\n end", "def create_snapshot(name)\n unless exists?\n return Response.new :code => 1, :message => 'VM does not exist'\n end\n\n running_response = running?\n return running_response unless running_response.successful?\n\n unless running_response.data\n message = 'The VM must be running in order to take a snapshot.'\n return Response.new :code => 1, :message => message\n end\n\n conf_file_response = conf_file\n return conf_file_response unless conf_file_response.successful?\n\n snapshots_response = snapshots\n return snapshots_response unless snapshots_response.successful?\n\n if snapshots_response.data.include? name\n message = \"There is already a snapshot named '#{name}'.\"\n return Response.new :code => 1, :message => message\n end\n\n command = \"#{vmrun_cmd} snapshot \"\n command << \"#{conf_file_response.data} \\\"#{name}\\\" 2>&1\"\n\n Response.from_command(`#{command}`)\n end", "def snap_delete(volume,snapshot)\n output = @filer.invoke(\"snapshot-create\",\"volume\",volume,\"snapshot\",snapshot)\n if (output.results_status() == \"failed\")\n raise \"Error #{output.results_errno} creating snapshot #{snapshot} on #{volume}: #{output.results_reason()}\\n\"\n end\n end", "def create_snapshot\n @current_snapshot = @bitmap.to_json if @bitmap\n end", "def createEBSSnapshot(client=nil,description='',volume_id=nil)\n return false if volume_id.nil? || client.nil?\n # Fetch the Volume Name. This will be used in the description of the snapshot\n resp = client.describe_volumes({dry_run: false, volume_ids: [volume_id] })\n resp.volumes[0].tags.each do |t|\n if t.key=='Name'\n description = t.value unless t.value.empty?\n break\n end\n end\n # puts \"Taking snapshot of volume #{volume_id}...\"\n return client.create_snapshot({\n dry_run: false,\n volume_id: volume_id,\n description: description\n })\nend", "def create_snapshot(metadata={})\n Azure::Blobs.create_snapshot(self, metadata)\n end", "def backup_image\n case provider\n when :libvirt\n system \"sudo virt-clone -o #{IMAGE_NAME} -n #{IMAGE_NAME}_sav --file /var/lib/libvirt/images/#{IMAGE_NAME}_sav.qcow2\"\n when :virtualbox\n # Shutdown the system\n system \"VBoxManage controlvm #{IMAGE_NAME} acpipowerbutton\"\n sleep SLEEP_TIME_AFTER_SHUTDOWN\n system \"VBoxManage controlvm #{IMAGE_NAME} poweroff\"\n\n # Copy the virtual machine (this is the only way of having an identical system)\n vm_config = `VBoxManage showvminfo #{IMAGE_NAME} | grep \"Config file\" | cut -f2 -d:`.strip\n vm_dir = File.dirname(vm_config)\n system \"VBoxManage unregistervm #{IMAGE_NAME}\"\n FileUtils.mv vm_dir, \"#{vm_dir}.sav\"\n system \"sync\"\n end\n end", "def execute_backup_stripe(options={})\n @disk.sync\n\n# Ensure filesystem is frozen and uses retry\n raise \"FATAL: unable to freeze lvm filesystem!\" unless @disk.freeze\n\n create_opts = {\n :lineage => options[:lineage],\n :prefix_override => options[:lineage], #Make sure we use the volume_nickname \"lineage\" as the prefix for our snaps (even if the volume has a different, perhaps more descriptive name)\n :description => \"Snapshot created by RightScale DB tools on instance #{ENV['EC2_INSTANCE_ID']}.\",\n :max_snaps => options[:max_snapshots],\n :keep_dailies => options[:keep_dailies],\n :keep_weeklies => options[:keep_weeklies],\n :keep_monthlies => options[:keep_monthlies],\n :keep_yearlies => options[:keep_yearlies],\n :devices => options[:devices].join(\",\")\n }\n create_opts[:suffix] = options[:suffix] if options[:suffix]\n\n # TODO: retry on the http requests? sounds like a good idea to me\n result = self.create_ebs_backup(create_opts)\n raise \"FATAL: unable to create snapshots!\" if result.nil?\n aws_ids = result['aws_ids']\n raise \"FATAL: result not recognized #{result}\" unless aws_ids.is_a?(Array)\n\n# Ensure filesystem is unfrozen and uses retry\n raise \"FATAL: could not unfreeze filesystem!\" unless @disk.unfreeze\n\n aws_ids.each do |snap|\n # TODO: does this http call need to be retried also? \n self.update_snapshot(snap, \"committed\")\n end\n\n # e) - Perform snapshot cleanup\n lst = self.cleanup_snapshots_stripe(options[:cleanup_prefix],{:keep_last => options[:max_snapshots], :dailies => options[:keep_dailies], :weeklies => options[:keep_weeklies], :monthlies => options[:keep_monthlies], :yearlies => options[:keep_yearlies]})\n puts \"Cleanup resulted in deleting #{lst.length} snapshots : #{lst.inspect}\"\n \n end", "def create_snapshot(droplet) # rubocop:disable MethodLength,Metrics/AbcSize\n fail_if_shutdown(droplet)\n\n logger.info \"Start creating snapshot for droplet id: #{droplet.id} name: #{droplet.name}.\"\n\n today = DateTime.now\n name = \"#{droplet.name}_#{today.strftime('%Y_%m_%d')}\"\n # noinspection RubyResolve\n snapshot_size = api.snapshots(droplet).size\n\n logger.debug 'Wait until snapshot will be created.'\n\n api.create_snapshot droplet.id, name\n\n snapshot_size += 1\n\n logger.info \"Snapshot name: #{name} created successfully.\"\n logger.info \"Droplet id: #{droplet.id} name: #{droplet.name} snapshots: #{snapshot_size}.\"\n\n # Cleanup snapshots.\n cleanup_snapshots droplet, snapshot_size if clean\n rescue => e\n case e.class.to_s\n when 'DoSnapshot::SnapshotCleanupError'\n raise e.class, e.message, e.backtrace\n when 'DoSnapshot::DropletPowerError'\n return\n else\n raise SnapshotCreateError.new(droplet.id), e.message, e.backtrace\n end\n end", "def snapshot(table, snapshot_name, *args)\n # Table name should be a string\n raise(ArgumentError, 'Table name must be of type String') unless table.is_a?(String)\n\n # Snapshot name should be a string\n raise(ArgumentError, 'Snapshot name must be of type String') unless\n snapshot_name.is_a?(String)\n\n table_name = TableName.valueOf(table)\n if args.empty?\n @admin.snapshot(snapshot_name, table_name)\n else\n args.each do |arg|\n ttl = arg[TTL]\n ttl = ttl ? ttl.to_java(:long) : -1\n snapshot_props = java.util.HashMap.new\n snapshot_props.put(\"TTL\", ttl)\n max_filesize = arg[MAX_FILESIZE]\n max_filesize = max_filesize ? max_filesize.to_java(:long) : -1\n snapshot_props.put(\"MAX_FILESIZE\", max_filesize)\n if arg[SKIP_FLUSH] == true\n @admin.snapshot(snapshot_name, table_name,\n org.apache.hadoop.hbase.client.SnapshotType::SKIPFLUSH, snapshot_props)\n else\n @admin.snapshot(snapshot_name, table_name, snapshot_props)\n end\n end\n end\n end", "def saos\n save_and_open_screenshot\n end", "def snapshot(url)\n `phantomjs #{File.dirname(__FILE__)}/take_snapshot.js '#{url}'`\n end", "def save_snapshot\n return unless @current_snapshot\n current_state = @bitmap.to_json\n return if snapshots_match?(@current_snapshot, current_state)\n\n @snapshots << @current_snapshot\n end", "def screen_capture(fileName)\n return $marathon.saveScreenShot(fileName)\nend", "def create_snapshot(device, options = {})\n # TODO - add in new param commit=explicit once the API is availible\n params = {:device => device, \n :suffix => options[:suffix], \n :description => options[:description], \n :tag => options[:tag],\n :max_snaps => options[:max_snaps],\n :prefix_override => options[:prefix_override],\n :commit => \"explicit\",\n :api_version => 1.0}\n # Perform API call to snapshot the volume attached to the device on the instance\n STDERR.puts \"Performing RightScale API call to create a new snapshot\"\n #STDERR.puts \"HERE IS THE URL: #{@api_url}/create_ebs_snapshot.js (PARAMS: #{params.inspect})\"\n json=nil\n SystemTimer.timeout_after(@api_snap_timeout) do\n body = RestClient.post @api_url+\"/create_ebs_snapshot.js\",params\n json = body.nil? ? nil: JSON.load(body)\n STDERR.puts \"CREATED_SNAP: #{json}\"\n end\n json \n rescue Exception => e\n display_exception(e, \"create_snapshot(#{device}, #{options.inspect})\")\n end", "def screen_capture(fileName)\n return $marathon.saveScreenShot(fileName)\nend", "def download_prepared_dump id\n name = \"trunk-#{id}.dump\"\n target_path = File.expand_path(\"../../fixtures/#{name}\", __FILE__)\n \n puts \"Accessing prepared DB test snapshot #{id} from S3.\"\n \n require 's3'\n service = S3::Service.new(:access_key_id => access_key_id, :secret_access_key => secret_access_key)\n bucket = service.buckets.find(\"cocoapods-org-testing-dumps\")\n \n # Due to a bug in the s3 gem we are searching for the object via iterating.\n bucket.objects.each do |obj|\n if obj.key == name\n puts \"Downloading prepared DB test snapshot #{id} from S3.\"\n File.open(target_path, 'w') do |file|\n file.write(obj.content)\n end\n break\n end\n end\n \n puts \"Prepared DB test snapshot #{id} downloaded to #{target_path}\"\n end", "def snapshot name\n Aptly::create_mirror_snapshot name, @name\n end", "def snapshot_key\n # no-op\n end", "def snap\n @cleaner.photo = params[:photo][:file]\n content_type = content_type(@cleaner.photo_file_name)\n @cleaner.photo_content_type = content_type if content_type\n @cleaner.save!\n render :text => @cleaner.photo.url(:large)\n end", "def create_blob_snapshot(container, blob, options={})\n query = { 'comp' => 'snapshot'}\n StorageService.with_query query, 'timeout', options[:timeout].to_s if options[:timeout]\n\n uri = blob_uri(container, blob, query)\n\n headers = StorageService.common_headers\n unless options.empty?\n StorageService.add_metadata_to_headers(options[:metadata], headers)\n add_blob_conditional_headers(options, headers)\n end\n\n response = call(:put, uri, nil, headers, options)\n\n response.headers['x-ms-snapshot']\n end", "def snap()\n \n end", "def screenshot\n dname = Document.name.get[0][0..-5]\n a = File.open(\"/tmp/skim-#{dname}-tmp\",\"a\")\n page = Document.get[0].current_page.get.index.get\n\n curfile = File.last_added(\"#{Home_path}/Desktop/Screen*.png\")\n a << \"#{curfile},#{page}\\n\"\n growl(\"One picture added to wiki notes cache\")\nend", "def oldest_snapshot\n `zfs list -r -H -t snapshot -o name -S creation #{@dataset}/share | tail -1`.chomp\n end", "def testsnapshot(onwindow: false)\n baseImageName = \"AfterDrawSnapshot.jpg\"\n begin\n # 1. Create window or bitmap context. width: 800, height: 600\n bitmapObject = if onwindow\n SmigHelpers.create_window(width: 800, height: 600)\n else\n SmigHelpers.create_bitmapcontext(width: 800, height: 600)\n end\n\n # 2. Try and draw a snapshot\n didThrow = false\n begin\n Smig.perform_command(CommandModule.make_snapshot(bitmapObject,\n snapshottype: :drawsnapshot))\n rescue RuntimeError => e\n # 3. Take note that drawing the snapshot failed.\n didThrow = true\n end\n\n # 3. Continued. Raise an exception if snapshot failed & exception not thrown\n unless didThrow\n raise \"Draw shapshot should have failed as no snapshot has been taken\"\n end\n\n if onwindow\n # Need to initialize the window to having a white background.\n windowRect = MIShapes.make_rectangle(size:{ :width => 800,\n :height => 600 },\n origin: { :x => 0, :y => 0 } )\n whiteColor = MIColor.make_rgbacolor(1.0, 1.0, 1.0)\n drawBackgroundElement = MIDrawElement.new(:fillrectangle)\n drawBackgroundElement.rectangle = windowRect\n drawBackgroundElement.fillcolor = whiteColor\n drawBackgroundCommand = CommandModule.make_drawelement(bitmapObject, \n drawinstructions: drawBackgroundElement)\n Smig.perform_command(drawBackgroundCommand)\n end\n\n # 4. Draw some stuff into the bitmap context.\n points = []\n points.push(MIShapes.make_point(100, 50))\n points.push(MIShapes.make_point(700, 50))\n points.push(MIShapes.make_point(400, 550))\n newPath = MIPath.new\n newPath.add_triangle(points: points)\n drawElement = MIDrawElement.new(:fillpath)\n drawElement.fillcolor = MIColor.make_rgbacolor(0.8, 0.2, 0.1)\n drawElement.arrayofpathelements = newPath\n drawElement.startpoint = points[0]\n drawElementCommand = CommandModule.make_drawelement(bitmapObject, \n drawinstructions: drawElement)\n Smig.perform_command(drawElementCommand)\n # 5. Take a snapshot\n Smig.perform_command(CommandModule.make_snapshot(bitmapObject,\n snapshottype: :takesnapshot))\n # 6. Draw the snapshot into the bitmap context.\n Smig.perform_command(CommandModule.make_snapshot(bitmapObject,\n snapshottype: :drawsnapshot))\n # 7. Draw some more stuff into it\n points = []\n points.push(MIShapes.make_point(100, 550))\n points.push(MIShapes.make_point(700, 550))\n points.push(MIShapes.make_point(400, 50))\n newPath = MIPath.new\n newPath.add_triangle(points: points)\n drawElement = MIDrawElement.new(:fillpath)\n drawElement.fillcolor = MIColor.make_rgbacolor(0.2, 0.7, 0.1)\n drawElement.arrayofpathelements = newPath\n drawElement.startpoint = points[0]\n drawElementCommand = CommandModule.make_drawelement(bitmapObject,\n drawinstructions: drawElement)\n Smig.perform_command(drawElementCommand)\n # 8. Export an image from the bitmap\n tempFile2 = File.join(Dir.tmpdir(), \"BeforeSnapshot.jpg\")\n SmigHelpers.save_image(imagesource: bitmapObject, pathtofile: tempFile2)\n # 9. Draw the snap shot into the bitmap context\n Smig.perform_command(CommandModule.make_snapshot(bitmapObject,\n snapshottype: :drawsnapshot))\n # 10. Save the bitmap context to an image file.\n tempFile = File.join(Dir.tmpdir(), baseImageName)\n SmigHelpers.save_image(imagesource: bitmapObject, pathtofile: tempFile)\n origFile = File.join($compareImageDir, baseImageName)\n\n unless AreImageFilesSame(origFile, tempFile)\n # 11. Report if different.\n raise \"Different image files: \" + origFile + \" and \" + tempFile\n end\n\n rescue RuntimeError => e\n $errorcode = Smig.exitvalue\n unless $errorcode.zero?\n puts \"Exit string: \" + Smig.exitstring\n puts \"Exit status: \" + $errorcode.to_s\n end\n puts e.message\n puts e.backtrace.to_s\n# exit 240\n ensure\n Smig.close_object_nothrow(bitmapObject)\n# `open #{tempFile}`\n# `open #{tempFile2}`\n# `open #{origFile}`\n FileUtils.rm_f(tempFile)\n FileUtils.rm_f(tempFile2)\n end\nend", "def vm_disk_write\n return unless @options[:mode] == 'vm_disk_write'\n rrddata_helper(value: 'diskwrite', output_msg: 'Disk write')\n end", "def screenshot_small\n screenshot(:small)\n end", "def create_snapshot_bundle\n # we shouldn't specify -k $EC2_PRIVATE_KEY since we assume private keys are already appended to /root/.ssh/authorized_keys\n # but it's a required parameter -- doh!\n run \"#{ec2_cmd('ec2-bundle-vol')} -v #{volume_to_bundle} -d #{bundling_directory} -k $EC2_PRIVATE_KEY -u #{@ec2_user_id} -s #{volume_size}\"\n end", "def snapshot esurl, snapurl, options\n elastic esurl\n set_opts(options)\n set_repository snapurl, options\n initiate_snapshot snapurl\n end", "def snap_delete(volume,snapshot)\n output = @filer.invoke(\"snapshot-delete\",\"volume\",volume,\"snapshot\",snapshot)\n if (output.results_status() == \"failed\")\n raise \"Error #{output.results_errno} deleting snapshot #{snapshot} on #{volume}: #{output.results_reason()}\\n\"\n end\n end", "def restore_snapshot(uuid, snapshot_id)\n # Sometimes this command fails with 'Data synchronization is currently\n # in progress'. Just wait and retry.\n retryable(on: VagrantPlugins::Parallels::Errors::ExecutionError, tries: 2, sleep: 2) do\n execute_prlctl('snapshot-switch', uuid, '-i', snapshot_id)\n end\n end", "def create_blob_snapshot(container, blob, options={})\n query = { 'comp' => 'snapshot'}\n query['timeout'] = options[:timeout].to_s if options[:timeout]\n\n uri = blob_uri(container, blob, query)\n\n headers = service_properties_headers\n unless options.empty?\n add_metadata_to_headers(options[:metadata], headers) if options[:metadata]\n\n headers['If-Modified-Since'] = options[:if_modified_since] if options[:if_modified_since]\n headers['If-Unmodified-Since'] = options[:if_unmodified_since] if options[:if_unmodified_since]\n headers['If-Match'] = options[:if_match] if options[:if_match]\n headers['If-None-Match'] = options[:if_none_match] if options[:if_none_match]\n end\n\n response = call(:put, uri, nil, headers)\n\n response.headers['x-ms-snapshot']\n end", "def takeSnapshot(description= nil)\n\n #@return : the Snapshot instance for this snapshot\t\n\t\t response = RestClient.post(\"https://#{Cbthelper.username}:#{Cbthelper.authkey}@crossbrowsertesting.com/api/v3/selenium/#{@testId}/snapshots\",\n\t\t \t\"selenium_test_id=#{@testId}\")\n\t\t hash = /(?<=\"hash\": \")((\\w|\\d)*)/.match(response)[0]\n\t\t snap = Snapshot.new(hash, @testId)\n\t\t if description != nil\n\t\t \tsnap.setDescription(description)\n\t\t end\n\t\t return snap\n\n\tend", "def move_to_storage\n process_emulation 5\n clear_progress_bar\n self.finished_at = Time.now.utc\n save! && ready!\n end", "def snapshot_devices(devices, prefix, limit = 0, name = \"#{instance_id}\")\n log \"Snapshot limit set to #{limit} (0 means never purge)\"\n ts = DateTime.now.strftime(\"%Y-%m-%d-%H-%M\").to_s\n name = \"#{prefix} \" + name\n volumes = {}\n devices.each do |device|\n volumes[device] = find_volume_for_device(device)\n end\n sn = []\n volumes.each do |device, volume|\n log \"Creating volume snapshot for #{device} on instance #{instance_id}\"\n snapshot = volume.snapshots.new\n snapshot.description = name+\": #{device}\"\n snapshot.save\n sn << snapshot\n snapshot.reload\n\n @compute.tags.create(:resource_id => snapshot.id, :key =>\"device\", :value => device)\n @compute.tags.create(:resource_id => snapshot.id, :key =>\"instance_id\", :value =>instance_id)\n @compute.tags.create(:resource_id => snapshot.id, :key =>\"date\", :value => ts)\n end\n\n # DO NOT need to wait for creating EBS snapshot\n #log \"Waiting for snapshots to complete.\"\n #sn.each do |s|\n # begin\n # sleep(3)\n # s.reload\n # end while s.state == 'nil' || s.state == 'pending'\n #end\n\n if limit != 0\n # populate data structure with updated information\n snapshots = list_snapshots(devices)\n nsnaps = snapshots.keys.length\n if nsnaps-limit > 0\n dates = snapshots.keys.sort\n puts dates.inspect\n extra_snapshots = dates[0..-(limit+1)]\n remaining_snapshots = dates[-limit..-1]\n extra_snapshots.each do |date|\n snapshots[date].each do |snap|\n log \"Destroying #{snap.description} #{snap.id}\"\n snap.destroy\n end\n end\n end\n end\n end", "def perform_backup\n \n add_memories_to_dropbox! if has_dropbox?\n\n # Clone the repo incase something is writing to it while we are backing up\n run \"cd #{@home} && git clone --bare #{@repo} #{@repo}.mirror\"\n output=run \"backup perform --trigger=daily_backup --log-path #{@dirs[:logs ]}\"\n run \"cd #{@home} && rm -fr #{@repo}.mirror\"\n \n get_timestamp(output)\n end", "def restore_snapshot(vm, name)\n snapshot = enumerate_snapshots(vm).find { |s| s.name == name }\n\n # No snapshot matching \"name\"\n return nil if snapshot.nil?\n\n task = snapshot.snapshot.RevertToSnapshot_Task(suppressPowerOn: true)\n\n if block_given?\n task.wait_for_progress do |progress|\n yield progress unless progress.nil?\n end\n else\n task.wait_for_completion\n end\n end", "def get_backup\n tar_file = get_tempfile\n safe_run \"tar -cf #{tar_file} #{tar_dir}\"\n tar_file\n end", "def build_snapshot\n raise \"build_snapshot not implemented in #{self}\"\n end", "def execute_restore_stripe(options={})\n new_vol_name = \"#{options[:lineage]}-#{ENV['EC2_INSTANCE_ID']}\"\n json_result = self.find_latest_ebs_backup(options[:lineage], options[:from_master], options[:timestamp])\n\n if json_result.nil? \n STDERR.puts \"No existing snapshot found for the specified nickname lineage. Aborting...\"\n exit(-1)\n end\n STDERR.puts \"Restoring.. #{json_result.inspect}\"\n options[:new_size_gb] = (options[:new_volume_size_in_gb] / json_result.size.to_f).ceil if options[:new_volume_size_in_gb]\n\n @disk.sync\n @disk.umount\n @disk.disable_volume\n\n self.execute_terminate_volumes if options[:force]\n\n json_result.each do |snapshot|\n \n# create volume from snap\n create_result = ( options[:new_size_gb] ? create_volume_from_snap_size_gb(snapshot[\"aws_id\"],new_vol_name,options[:new_size_gb] ) : create_volume_from_snap(snapshot[\"aws_id\"], new_vol_name ) )\n raise \"FATAL: error occured in create_volume_from_snap(#{snapshot['aws_id']}, #{new_vol_name})\" if create_result.nil?\n\n# attach volume to instance\n retry_seconds = 0\n while retry_seconds < 200\n begin \n attach_result = attach_volume(create_result['aws_id'], snapshot['device'])\n raise \"FATAL: error occured in attach_volume(#{create_result['aws_id']}, #{snapshot['device']}\" if attach_result.nil?\n break if attach_result\n rescue => e\n puts \"CAUGHT EXCEPTION in execute_restore_stripe. Device attachment. #{e}, Retrying #{retry_seconds} of 200 seconds\"\n retry_seconds += 30\n sleep 30 \n end\n end\n raise \"FATAL: error occured in attach_volume(#{create_result['aws_id']}, #{snapshot['device']}\" if attach_result.nil?\n end\n\n# wait for devices to attach, after completing ALL the api calls\n json_result.each { |s| wait_for_attachment(s['device']) }\n @disk.enable_volume\n raise \"FATAL: mount failed!\" unless @disk.mount\n @disk.write_fstab\n# TODO - grow the filesystem if a new size was given\n end", "def create_snapshot(name)\n Fission::Action::Snapshot::Creator.new(self).create_snapshot(name)\n end", "def create_snapshot request_pb, options:, &block\n uri = \"/compute/v1/projects/#{request_pb.project}/zones/#{request_pb.zone}/disks/#{request_pb.disk}/createSnapshot\"\n body = request_pb.snapshot_resource.to_json\n\n response = @client_stub.make_post_request(\n uri: uri,\n body: body,\n options: options,\n )\n\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, response if block_given?\n\n result\n end", "def create_snapshot(options)\n snapshot = ec2.snapshots.new\n snapshot.volume_id = options['volume_id']\n snapshot.description = options['description']\n\n attempts = 0\n\n begin\n snapshot.save\n snapshot.reload\n rescue Fog::Compute::AWS::Error\n sleep 5\n attempts += 1\n if attempts == 5\n log \"Error communicating with API; Unable to save volume `#{options['volume_id']}` (Desc: #{options['description']})\"\n end\n return unless attempts == 5\n end\n\n options['tags'].each do |k,v|\n begin\n ec2.tags.create({resource_id: snapshot.id, key: k, value: v})\n rescue Errno::EINPROGRESS , Errno::EISCONN\n log \"API Connection Error\"\n sleep 1\n retry\n rescue Fog::Compute::AWS::Error\n log \"Failed attaching tag `'#{k}' => #{v}` to #{options['snapshot_type']} snapshot #{snapshot.id}\"\n sleep 1\n retry\n end\n end\n\n end", "def restore_and_grow(server,new_size,force)\n options = { \"EBS_MOUNT_POINT\" => \"text:#{@mount_point}\",\n \"EBS_TOTAL_VOLUME_GROUP_SIZE\" => \"text:#{new_size}\",\n \"OPT_DB_FORCE_RESTORE\" => \"text:#{force}\",\n \"EBS_LINEAGE\" => \"text:#{@lineage}\" }\n audit = server.run_executable(@scripts_to_run['grow_volume'], options)\n audit.wait_for_completed\n end", "def take\n check\n `adb pull /dev/graphics/fb0 fb0`\n `dd bs=1920 count=800 if=fb0 of=fb0b`\n `ffmpeg -vframes 1 -vcodec rawvideo -f rawvideo -pix_fmt rgb565 -s 480x800 -i fb0 -f image2 -vcodec png #{@destination_folder}/#{@prefix}#{@no}.png`\n `rm fb0b`\n end", "def snapshotInfo(_refresh = false)\n sni = nil\n @cacheLock.synchronize(:SH) do\n sni = @invObj.dupObj(snapshotInfo_locked)\n end\n (sni)\n end", "def thumbnail(command)\n path = '/' + clean_up(command[1])\n dst = command[2]\n out, metadata = @client.files.thumbnail(path)\n pp metadata\n open(dest, 'w') { |f| f.puts out }\n puts \"wrote thumbnail #{ dst }.\"\n end", "def system_backup\n\n\n end", "def backup2Drive(src,conf)\n dest = conf[:backupDrive]\n dest = dest + \"/\" unless dest [-1] =~ /[\\/\\\\]/\n dest = dest + src\n puts src\n puts dest\n FileUtils.mkdir_p(File.dirname(dest))\n FileUtils.cp(src, dest)\n puts aktTime()+\" archive copied\"\n cleanUp(conf) if conf[:generations]\n \nend", "def xRestoreDump()\n puts \"Back up commencing...\"\n Dir.chdir('/Users/jeydurai')\n system('start_mongorestore.bat')\n end", "def rsync_storage_files_to(instance=nil)\n hide_output {Kernel.system \"#{rsync_storage_files_to_command(instance)}\" if instance}\n end", "def screenshot_large\n screenshot()\n end", "def create_snapshot(snapshot_name)\n execute(:create_snapshot, VmId: vm_id, SnapName: snapshot_name)\n end", "def revert_to_snapshot(name)\n raise Fission::Error,\"VM #{@name} does not exist\" unless self.exists?\n\n command = \"#{vmrun_cmd} revertToSnapshot #{vmx_path.shellescape} \\\"#{name}\\\" 2>&1\"\n output = `#{command}`\n\n response = Fission::Response.new :code => $?.exitstatus\n response.output = output unless response.successful?\n\n response\n end", "def blob\n generate\n storage.get(path).body\n end", "def volume_create_from_snap(source, name, snapshot_id)\n retries = 3\n begin \n @log.info \"Creating volume #{name} from snapshot id #{snapshot_id}...\"\n ret = @cloud_stack.create_volume(name, ZONE, nil, snapshot_id)\n id = ret[\"createvolumeresponse\"][\"jobid\"]\n wait_for_job id\n rescue Exception => e\n retries -= 1\n if retries > 0\n @log.error \"Failed. #{e.message}. Retrying...\"\n retry\n end\n raise e\n end\n vol_id = ret[\"createvolumeresponse\"][\"id\"]\n @log.info \"Created volume id: #{vol_id}\"\n vol_id\n end", "def wRestoreDump()\n puts \"Back up commencing...\"\n Dir.chdir('/Users/jeydurai')\n system('start_mongorestore.bat')\n end", "def set_snapshot\n @snapshot = Snapshot.find(params[:id])\n end", "def write_snapshots\n FileUtils.mkdir_p(snapshots_path)\n File.write(snapshot_path, Snapshot.dump(snapshots))\n end", "def snap()\n @snap_id += 1\n @snap[@snap_id] = @current.clone\n @snap_id\n end", "def apply_snapshot(snapshot)\n raise \"apply_snapshot not implemented in #{self}\"\n end", "def snapshot_path instance_id, cluster_id, snapshot_id\n Admin::V2::BigtableTableAdminClient.snapshot_path(\n project_id,\n instance_id,\n cluster_id,\n snapshot_id\n )\n end", "def create_snapshots(isCopy)\n ec2 = AWS::EC2.new.client\n\n #get all volumes tagged as \"backup\"\n volumes = ec2.describe_volumes(:filters => [:name => 'tag-key', :values => ['backup']])\n snapshots = []\n\n #loop thru and create snapshots for all these volumes\n if volumes \n volumes.data[:volume_set].each do |v|\n name = get_tagvalue(v, \"Name\")\n snap = ec2.create_snapshot(:volume_id => v[:volume_id], :description => \"Backup for \" + Time.now.to_s + \" - created by SDK\")\n if snap\n snapshots << snap.data\n #add name tag\n ec2.create_tags(:resources => [snap.data[:snapshot_id]], :tags => [{:key => \"Name\", :value => name + \" backup\"}])\n\n #now copy snapshots to another region\n if isCopy\n copy_snapshot(snap.data[:snapshot_id], AWSCONFIG[:default_region], AWSCONFIG[:backup_region], \n name + \" backup\", \"Backup for \" + Time.now.to_s + \" - created by SDK\")\n end\n end\n end\n end\n\n return snapshots\n end", "def delete_from_disk; end", "def restore_snapshot(snapshot_name)\n execute(:restore_snapshot, VmId: vm_id, SnapName: snapshot_name)\n end", "def run\n super\n\n uri = _get_entity_attribute \"name\"\n filename = \"screenshot_#{rand(100000000000000)}.png\"\n full_path = \"#{Dir.pwd}/public/screenshots/#{filename}\"\n\n begin\n @task_log.log \"Saving to... #{full_path}\"\n\n f = Screencap::Fetcher.new(uri)\n screenshot = f.fetch(\n :output => full_path, # don't forget the extension!\n # optional:\n #:div => '.header', # selector for a specific element to take screenshot of\n #:width => 1024,\n #:height => 768,\n #:top => 0, :left => 0, :width => 100, :height => 100 # dimensions for a specific area\n )\n\n @task_log.good \"Saved to #{full_path}\"\n _create_entity \"Screenshot\", :name => \"#{uri}_screenshot\", :uri => \"#{$intrigue_server_uri}/screenshots/#{filename}\"\n\n rescue Screencap::Error => e\n @task_log.error \"Unable to capture screenshot: #{e}\"\n end\n\n end", "def create_snapshot\n @attributes[:create_snapshot]\n end", "def write_disk_cache\n\t\t\t\tif File.exists?(\"#{$cache_path}/#{@host}_disks.yaml\")\n\t\t\t\t\t\t$log.info \"Doing backup of cachefile to #{@host}_disks.yaml\"\n\t\t\t\t\t\tFileUtils.mv(\"#{$cache_path}/#{@host}_disks.yaml\",\"#{$cache_path}/#{@host}_disks_backup.yaml\")\n\t\t\t\t\t\t@olddisks = YAML::load(File.read(\"#{$cache_path}/#{@host}_disks_backup.yaml\"))\n\t\t\t\tend\n\t\t\t\tFile.open( \"#{$cache_path}/#{@host}_disks.yaml\", 'w' ) do |out|\n\t\t\t\t\t\tYAML.dump(@disks,out)\n\t\t\t\tend\n\t\t\t\t$log.info \"Disk cache written for #{@host}\"\n\t\tend", "def provision_and_mount_volume(server, disk_size, device)\n unless provider.find_server_device(server, device)\n say \"Provisioning #{disk_size}Gb persistent disk for inception VM...\"\n provider.create_and_attach_volume(\"Inception Disk\", disk_size, server, device)\n end\n\n # Format and mount the volume\n if aws?\n say \"Skipping volume mounting on AWS 12.10 inception VM until its fixed\", [:yellow, :bold]\n run_ssh_command_until_successful server, \"sudo mkdir -p /var/vcap/store\"\n else\n say \"Mounting persistent disk as volume on inception VM...\"\n run_ssh_command_until_successful server, \"sudo mkfs.ext4 #{device} -F\"\n run_ssh_command_until_successful server, \"sudo mkdir -p /var/vcap/store\"\n run_ssh_command_until_successful server, \"sudo mount #{device} /var/vcap/store\"\n end\n end", "def storage_adapter\n Valkyrie::StorageAdapter.find(:disk_via_copy)\n end", "def copy_snapshots! snaps\n # account_snapshots = account.snapshots\n # backed_up_snapshots = self.snapshots\n # backed_up_snapshot_descriptions = backed_up_snapshots.collect { |bus| bus.description }\n if snaps.count > 0\n VfSnapshots.verbose \"\\n#{snaps.count} to copy, here we go.\"\n else\n VfSnapshots.verbose \"\\nNothing to copy.\"\n end\n begin\n tags = { tags: [\n {\n key: \"Account\",\n value: account.name,\n },\n ]\n }\n\n snaps.each_with_index do |snapshot,idx|\n VfSnapshots.verbose \"\\n[#{idx+1} of #{snaps.length}] Copying #{account.name} #{snapshot.description}\"\n\n # modify the source snapshot to share with the backup account\n snapshot.modify_attribute(attribute:'createVolumePermission', operation_type: 'add', user_ids: [ account.account_id ], create_volume_permission: { add: [{ user_id: account_id }] } )\n # get it in the backup account\n shared_snapshot = ec2.snapshot(snapshot.id)\n region = 'us-east-1'\n new_desc = \"#{account.name} #{snapshot.description}\"\n copy_response = shared_snapshot.copy(\n description: new_desc,\n source_region: region,\n destination_region: region,\n )\n ec2.snapshot(copy_response.snapshot_id).create_tags tags\n # puts \"Sleeping...\"\n # sleep 1\n # puts \"Woke!\"\n end\n rescue Aws::EC2::Errors::ResourceLimitExceeded\n VfSnapshots.verbose \"\\nThrottled!\"\n exit\n end\n end", "def copy_tags_to_snapshot\n data[:copy_tags_to_snapshot]\n end", "def copy_tags_to_snapshot\n data[:copy_tags_to_snapshot]\n end" ]
[ "0.713922", "0.67944145", "0.6793815", "0.6761247", "0.6709919", "0.6596206", "0.6394527", "0.634374", "0.62239885", "0.6195495", "0.61737245", "0.6168773", "0.614181", "0.61157143", "0.61157143", "0.6018739", "0.60147166", "0.6005463", "0.59679425", "0.59664416", "0.5944345", "0.58778983", "0.58474797", "0.58466554", "0.5843366", "0.5830781", "0.5754781", "0.5746013", "0.57404774", "0.5725299", "0.5720744", "0.5713066", "0.5707389", "0.57038295", "0.5694739", "0.5643876", "0.56193143", "0.5618518", "0.5615019", "0.5601459", "0.5597227", "0.5575758", "0.5553705", "0.55289185", "0.5528368", "0.55226004", "0.5515165", "0.5488127", "0.5477087", "0.5421907", "0.5398291", "0.5382536", "0.53812903", "0.5371179", "0.5370347", "0.53524446", "0.53505725", "0.5348031", "0.5340201", "0.53262335", "0.53172755", "0.53050804", "0.5302945", "0.52958363", "0.5295044", "0.52902347", "0.52900505", "0.5286883", "0.5283581", "0.5272056", "0.52702904", "0.52613884", "0.5253963", "0.5238301", "0.5234505", "0.5233893", "0.52323127", "0.5230256", "0.52250576", "0.5220695", "0.52157575", "0.52028984", "0.5202647", "0.5192448", "0.5190667", "0.518902", "0.51865256", "0.51771146", "0.5174676", "0.51742023", "0.5173635", "0.51596177", "0.5158646", "0.5132295", "0.51316017", "0.51311594", "0.51001585", "0.5093925", "0.50904185", "0.50904185" ]
0.68221915
1
Delete a disk snapshot
def delete_snapshot(snapshot_id) with_thread_name("delete_snapshot(#{snapshot_id})") do snapshot = @ec2_client.snapshots[snapshot_id] if snapshot.status == :in_use raise Bosh::Clouds::CloudError, "snapshot '#{snapshot.id}' can not be deleted as it is in use" end snapshot.delete logger.info("snapshot '#{snapshot_id}' deleted") end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def snap_delete(volume,snapshot)\n output = @filer.invoke(\"snapshot-create\",\"volume\",volume,\"snapshot\",snapshot)\n if (output.results_status() == \"failed\")\n raise \"Error #{output.results_errno} creating snapshot #{snapshot} on #{volume}: #{output.results_reason()}\\n\"\n end\n end", "def snap_delete(volume,snapshot)\n output = @filer.invoke(\"snapshot-delete\",\"volume\",volume,\"snapshot\",snapshot)\n if (output.results_status() == \"failed\")\n raise \"Error #{output.results_errno} deleting snapshot #{snapshot} on #{volume}: #{output.results_reason()}\\n\"\n end\n end", "def delete_snapshot(snapshot_id)\n end", "def delete_snapshot snapshot\n subscriber.delete_snapshot snapshot: snapshot_path(snapshot)\n end", "def delete_snapshot(snapshot_name)\n execute(:delete_snapshot, VmID: vm_id, SnapName: snapshot_name)\n end", "def delete\n @service.delete_snapshot(self)\n end", "def delete_snapshot(snapshot_cid)\n @telemetry_manager.monitor('initialize') do\n _init_azure\n end\n with_thread_name(\"delete_snapshot(#{snapshot_cid})\") do\n @telemetry_manager.monitor('delete_snapshot', id: snapshot_cid) do\n snapshot_id = DiskId.parse(snapshot_cid, _azure_config.resource_group_name)\n snapshot_name = snapshot_id.disk_name\n if snapshot_name.start_with?(MANAGED_DATA_DISK_PREFIX)\n @disk_manager2.delete_snapshot(snapshot_id)\n else\n @disk_manager.delete_snapshot(snapshot_id)\n end\n @logger.info(\"The snapshot '#{snapshot_id}' is deleted\")\n end\n end\n end", "def delete_snapshot(name)\n Fission::Action::Snapshot::Deleter.new(self).delete_snapshot(name)\n end", "def delete(params = {})\n response = client.delete \"/_snapshot/{repository}/{snapshot}\", update_params(params, action: \"snapshot.delete\", rest_api: \"snapshot.delete\")\n response.body\n end", "def delete_snapshot(snapshot_id)\n raise Bosh::Clouds::NotSupported.new(false),\n 'delete_snapshot is not supported.'\n end", "def delete_snapshot(uuid, snapshot_id)\n # Sometimes this command fails with 'Data synchronization is currently\n # in progress'. Just wait and retry.\n retryable(on: VagrantPlugins::Parallels::Errors::ExecutionError, tries: 2, sleep: 2) do\n execute_prlctl('snapshot-delete', uuid, '--id', snapshot_id)\n end\n end", "def delete_snapshot(snapshot_id)\n with_thread_name(\"delete_snapshot(#{snapshot_id})\") do\n snapshot = @ec2_resource.snapshot(snapshot_id)\n snapshot.delete\n logger.info(\"snapshot '#{snapshot_id}' deleted\")\n end\n end", "def delete\n ensure_service!\n service.delete_snapshot name\n true\n end", "def destroy!\n destroy_storage_snapshot(_id)\n end", "def delete_snapshot(snapshot_id)\n with_thread_name(\"delete_snapshot(#{snapshot_id})\") do\n snapshot = @ec2_resource.snapshot(snapshot_id)\n begin\n snapshot.delete\n rescue Aws::EC2::Errors::InvalidSnapshotNotFound => e\n logger.info(\"snapshot '#{snapshot_id}' not found\")\n end\n logger.info(\"snapshot '#{snapshot_id}' deleted\")\n end\n end", "def delete_snapshot(vm, name)\n snapshot = enumerate_snapshots(vm).find { |s| s.name == name }\n\n # No snapshot matching \"name\"\n return nil if snapshot.nil?\n\n task = snapshot.snapshot.RemoveSnapshot_Task(removeChildren: false)\n\n if block_given?\n task.wait_for_progress do |progress|\n yield progress unless progress.nil?\n end\n else\n task.wait_for_completion\n end\n end", "def delete_snapshot(snapshot_id)\n log \"\\e[0;31m:: Deleting snapshot:\\e[0m #{snapshot_id}\"\n\n begin\n ec2.delete_snapshot(snapshot_id)\n sleep 0.2\n rescue Fog::Compute::AWS::NotFound\n log \"Failed to delete snapshot: #{snapshot_id}; setting { 'protected' => true }\"\n ec2.tags.create({resource_id: snapshot_id, key: 'protected', value: 'true'})\n rescue Fog::Compute::AWS::Error\n log \"API Error\"\n end\n\n end", "def delete_from_disk; end", "def delete_snapshot(key)\n\n raise ArgumentError.new('key must be an String') unless( key.is_a?(String) )\n\n endpoint = format( '/api/snapshots-delete/%s', key)\n @logger.debug(\"Deleting snapshot id #{key} (GET #{endpoint})\") if @debug\n\n delete(endpoint)\n end", "def delete_snapshot(name)\n result = get_snapshot(name)\n response = @client.rest_delete(result['uri'], { 'If-Match' => result['eTag'] }, @api_version)\n @client.response_handler(response)\n true\n end", "def delete_snapshot(snapshot_id)\n link = generate_request(\"DeleteSnapshot\",\n \"SnapshotId\" => snapshot_id.to_s)\n request_info(link, RightBoolResponseParser.new(:logger => @logger))\n rescue Exception\n on_exception\n end", "def delete_from_disk!\n if disk_filename.present?\n diskfile_s3 = diskfile\n Rails.logger.debug(\"Deleting #{diskfile_s3}\")\n RedmicaS3::Connection.delete(diskfile_s3)\n end\n\n Redmine::Thumbnail.batch_delete!(\n thumbnail_path('*').sub(/\\*\\.thumb$/, '')\n )\n end", "def delete! snaps\n if snaps.count > 0\n VfSnapshots.verbose \"\\n#{snaps.count} to delete, here we go.\"\n else\n VfSnapshots.verbose \"\\nNothing to delete.\"\n end\n begin\n snaps.each_with_index do |snapshot,idx|\n VfSnapshots.verbose \"[#{idx+1} of #{snaps.length}] Deleting #{account.name} #{snapshot.description}\"\n snapshot.delete\n # puts \"Sleeping...\"\n # sleep 1\n # puts \"Woke!\"\n end\n rescue Aws::EC2::Errors::ResourceLimitExceeded\n VfSnapshots.verbose \"\\nThrottled!\"\n exit\n end\n end", "def delete_fusion_vm_snapshot(options)\n clone_list = []\n if options['clone'].to_s.match(/\\*/) or options['clone'].to_s.match(/all/)\n clone_list = get_fusion_vm_snapshots(options)\n clone_list = clone_list.split(\"\\n\")[1..-1]\n else\n clone_list[0] = options['clone']\n end\n clone_list.each do |clone|\n fusion_vmx_file = get_fusion_vm_vmx_file(options)\n message = \"Information:\\tDeleting snapshot \"+clone+\" for #{options['vmapp']} VM \"+options['name']\n command = \"'#{options['vmrun']}' -T fusion deleteSnapshot '#{fusion_vmx_file}' '#{clone}'\"\n execute_command(options,message,command)\n end\n return\nend", "def deleteEBSSnapshot(client=nil,snapshots_to_delete=[],dry_run=true)\n return false if client.nil?\n unless snapshots_to_delete.instance_of? Array\n snapshots_to_delete = [snapshots_to_delete]\n end\n snapshots_to_delete.each do |snapshot|\n if dry_run\n printf \"\\e[33m\\\"Delete snapshot #{snapshot}?\\\" (y/n)? \\e[0m\"\n prompt = STDIN.gets.chomp\n next unless prompt == \"y\"\n end\n print \"Deleting ec2 snapshot #{snapshot}...\"\n begin\n # delete_snapshot API has no response\n client.delete_snapshot({\n dry_run: dry_run,\n snapshot_id: snapshot\n })\n puts \"\\e[32msuccess\\e[0m\"\n rescue Exception => e\n puts \"\\e[31mfailed - #{e.message}\\e[0m\"\n end\n end\n return true\nend", "def delete_db_snapshot(group_name)\n \n request({\n 'Action' => 'DeleteDBSnapshot',\n 'DBSnapshotIdentifier' => group_name,\n \n :parser => Fog::Parsers::AWS::RDS::DeleteDBSnapshot.new\n })\n end", "def destroy\n @snapshot.destroy\n\n respond_to do |format|\n format.html { redirect_to(snapshots_url) }\n format.xml { head :ok }\n end\n end", "def delete_blob(container, blob, options={})\n query = { }\n query['snapshot'] = options[:snapshot] if options[:snapshot]\n query['timeout'] = options[:timeout].to_s if options[:timeout]\n\n uri = blob_uri(container, blob, query)\n\n options[:delete_snapshots] = :include unless options[:delete_snapshots]\n\n headers = service_properties_headers\n headers['x-ms-delete-snapshots'] = options[:delete_snapshots].to_s if options[:delete_snapshots] && options[:snapshot] == nil\n\n call(:delete, uri, nil, headers)\n nil\n end", "def delete_virtual_machine_disk(disk_name)\n Loggerx.info \"Deleting Disk \\\"#{disk_name}\\\". \"\n path = \"/services/disks/#{disk_name}\"\n request = ManagementHttpRequest.new(:delete, path)\n request.call\n end", "def wipe_snapshots_data; end", "def wipe_snapshots_data; end", "def purgeOldSnapshots(profile,region)\n json = `aws --profile #{profile} --region #{region} ec2 describe-snapshots --owner-ids self`\n parsed = JSON.parse(json)\n parsed[\"Snapshots\"].each do |snapshot|\n desc = snapshot[\"Description\"]\n snapid = snapshot[\"SnapshotId\"]\n if desc.to_s.match('deleteafter')\n deletedate = desc.to_s.split('-deleteafter').last\n vol = desc.to_s.split('-deleteafter').first\n if Date.strptime(deletedate, \"%Y-%m-%d\") < Date.today\n puts \"Deleting #{snapid} for volume #{vol}- Due date: #{deletedate}\"\n `aws --profile #{profile} --region #{region} ec2 delete-snapshot --snapshot-id #{snapid}`\n end\n end\n end\nend", "def user_deleted_snapshot _user, snapshot\n data = {\n category: 'Snapshots',\n action: 'Deleted a Snapshot',\n label: snapshot.name,\n value: nil,\n bounce: false,\n }\n\n create_event data\n end", "def destroy!\r\n self.class.service_instance.delete_blob(path)\r\n end", "def delete_snapshot request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_delete_snapshot_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Longrunning::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def delete_gdom_disk(options)\n gdom_dir = $ldom_base_dir+\"/\"+options['name']\n client_disk = gdom_dir+\"/vdisk0\"\n message = \"Information:\\tRemoving disk \"+client_disk\n command = \"rm #{client_disk}\"\n execute_command(options,message,command)\n return\nend", "def delete_blob(container, blob, options={})\n query = { }\n StorageService.with_query query, 'snapshot', options[:snapshot]\n StorageService.with_query query, 'timeout', options[:timeout].to_s if options[:timeout]\n\n uri = blob_uri(container, blob, query)\n\n options[:delete_snapshots] = :include unless options[:delete_snapshots]\n\n headers = StorageService.common_headers\n StorageService.with_header headers, 'x-ms-delete-snapshots', options[:delete_snapshots].to_s if options[:delete_snapshots] && options[:snapshot] == nil\n add_blob_conditional_headers options, headers\n\n call(:delete, uri, nil, headers, options)\n nil\n end", "def delete_db_snapshot( options = {} )\n raise ArgumentError, \"No :db_snapshot_identifier provided\" if options.does_not_have?(:db_snapshot_identifier)\n\n params = {}\n params['DBSnapshotIdentifier'] = options[:db_snapshot_identifier]\n\n return response_generator(:action => \"DeleteDBSnapshot\", :params => params)\n end", "def delete_storage(storage_uuid)\n response = delete \"storage/#{storage_uuid}\"\n\n response\n end", "def delete_storage(storage_uuid)\n response = delete \"storage/#{storage_uuid}\"\n\n response\n end", "def delete_from_disk\n thread_local_store.destroy\n end", "def destroy\n ret = qmgmt(['volume', 'delete', resource[:name]])\n out = Array.new\n ret.each_line { |l|\n out.push(' ' + l)\n }\n if ( ret.exitstatus != 0 )\n fail(\"quobyte volume delete #{resource[:name]} failed with status #{ret.exitstatus.to_s}. Output follows.\" + out.join(\"\\n\"))\n end\n end", "def destroy\n @snap = Snap.find(params[:id])\n @snap.destroy\n\n respond_to do |format|\n format.html { redirect_to(snaps_url) }\n format.xml { head :ok }\n end\n end", "def delete(volume)\n dest_path = dest_path_resolver.path(volume)\n removed = fs.remove(dest_path)\n log(volume, removed ? \"removed\" : \"not present\")\n fs.rm_empty_tree(dest_path.parent)\n end", "def deletevolume\n if not checkRequirements([\"thezone\",\"thevolume\"])\n return false\n end\n checkToken(@thezone)\n submit = queryGCE(:path => '/compute/v1beta15/projects/#{@thezone.name}/zones/#{@thevolume.azone.name}/disks/#{@thevolume.serial}', :method => 'delete', :options => '', :access_token => @thezone.token )\n checkQuery(:type => 'zone', :token => @thezone.token, :projectname => @thezone.name, :zonename => @thevolume.azone.name, :operationname => submit[\"name\"])\n end", "def delete_consistency_group_snapshot_view(sys_id, cg_id, view_id)\n\t response = request(:delete, \"/devmgr/v2/storage-systems/#{sys_id}/consistency-groups/#{cg_id}/views/#{view_id}\")\n status(response, 204, 'Failed to remove consistency group snapshot view')\n end", "def delete\n fast_storage.delete\n warehouse_storage.delete\n end", "def delete\n response = client[\"/vaults/#{vault_id}/blobs/#{blob_id}\"].delete\n response.code == 200\n end", "def destroy_zero_sized_snapshots(snapshots)\n ### Shift off the last, so it maintains the changes\n saved_snapshot = snapshots.shift(1)\n remaining_snapshots = [saved_snapshot]\n snapshots.each do |snapshot|\n if snapshot.is_zero?\n puts \"Destroying zero-sized snapshot: #{snapshot.name}\" if $verbose\n snapshot.destroy\n else\n remaining_snapshots << snapshot\n end\n end\n remaining_snapshots\nend", "def destroy\n @vdisk = Vdisk.find(params[:id])\n @vdisk.destroy\n\n respond_to do |format|\n format.html { redirect_to vdisks_url }\n format.json { head :no_content }\n end\n end", "def off\n attachment = hpg_resolve(shift_argument)\n return unless confirm_command(attachment.config_var, 'Deactiving will destroy all backups')\n action(\"Dectivating #{attachment.config_var} (#{attachment.resource_name})\") do\n RestClient.delete( authed_pgsnapshot_url(\"/client/resource/#{attachment.resource_name}\"))\n end\n end", "def delete_db_instance(identifier, snapshot_identifier, skip_snapshot = false) \n params = {}\n params['FinalDBSnapshotIdentifier'] = snapshot_identifier if snapshot_identifier\n request({\n 'Action' => 'DeleteDBInstance',\n 'DBInstanceIdentifier' => identifier,\n 'SkipFinalSnapshot' => skip_snapshot, \n :parser => Fog::Parsers::AWS::RDS::DeleteDBInstance.new\n }.merge(params))\n end", "def check_snapshot(vmname, node)\n if node.name =~ /^prekernel/\n compare_time = Time.now - CLEAN_AFTER_DAYS * 24 * 60 * 60\n if compare_time > node.createTime\n puts 'Deleting snapshot for ' + vmname + ' | ' + node.name + ' | ' + node.createTime.iso8601\n\n snapshot_task = node.snapshot.RemoveSnapshot_Task(removeChildren: false)\n snapshot_task = snapshot_task.wait_for_completion\n end\n end\n\n unless node.childSnapshotList.empty?\n node.childSnapshotList.each { |item| check_snapshot(vmname, item) }\n end\nend", "def destroy\n @snap.update(viewed: true)\n end", "def delete_disk(disk_id)\n with_thread_name(\"delete_disk(#{disk_id})\") do\n begin\n # Skip if disk does not exist\n return unless has_disk?(disk_id)\n @logger.debug(\"Deleting volume group #{disk_id}...\")\n @vol_group_manager.delete_volume_group(disk_id)\n @logger.debug(\"Deleted volume group #{disk_id}.\")\n rescue => e\n logger.error(e)\n cloud_error(e.message)\n end\n end\n end", "def delete_disk(disk_cid)\n @telemetry_manager.monitor('initialize') do\n _init_azure\n end\n with_thread_name(\"delete_disk(#{disk_cid})\") do\n @telemetry_manager.monitor('delete_disk', id: disk_cid) do\n disk_id = DiskId.parse(disk_cid, _azure_config.resource_group_name)\n if @use_managed_disks\n # A managed disk may be created from an old blob disk, so its name still starts with 'bosh-data' instead of 'bosh-disk-data'\n # CPI checks whether the managed disk with the name exists. If not, delete the old blob disk.\n unless disk_id.disk_name.start_with?(MANAGED_DATA_DISK_PREFIX)\n disk = @disk_manager2.get_data_disk(disk_id)\n return @disk_manager.delete_data_disk(disk_id) if disk.nil?\n end\n @disk_manager2.delete_data_disk(disk_id)\n else\n @disk_manager.delete_data_disk(disk_id)\n end\n end\n end\n end", "def delete\n File::unlink @path+\".lock\" rescue nil\n File::unlink @path+\".new\" rescue nil\n File::unlink @path rescue nil\n end", "def delete_backup!(deleted_file)\n _delete(backup_file(deleted_file), action: :backup)\n end", "def delete_storage(request, params)\n # --- Get the Image ---\n image = ImageOCCI.new(\n Image.build_xml(params[:id]),\n @client)\n\n # --- Delete the Image ---\n rc = image.delete\n if OpenNebula.is_error?(rc)\n return rc, CloudServer::HTTP_ERROR_CODE[rc.errno]\n end\n\n return \"\", 204\n end", "def delete\n File::unlink @path+\".lock\" rescue nil\n File::unlink @path+\".new\" rescue nil\n File::unlink @path rescue Errno::ENOENT\n end", "def delete_db_instance(identifier, snapshot_identifier, skip_snapshot = false)\n params = {}\n params['FinalDBSnapshotIdentifier'] = snapshot_identifier if snapshot_identifier\n request({\n 'Action' => 'DeleteDBInstance',\n 'DBInstanceIdentifier' => identifier,\n 'SkipFinalSnapshot' => skip_snapshot,\n :parser => Fog::Parsers::AWS::RDS::DeleteDBInstance.new\n }.merge(params))\n end", "def delete_metadata(key_name)\n requires :id\n service.delete_snapshot_metadata(id, key_name)\n true\n end", "def snapshot_disk(disk_id, metadata)\n raise Bosh::Clouds::NotSupported.new(false),\n 'snapshot_disk is not supported.'\n end", "def cleanup_storage vm\n vm.volumes.each do |vol|\n @logger.debug \"Deleting volume #{vol.name} for OpenStack host #{vm.name}\"\n vm.detach_volume(vol.id)\n vol.wait_for { ready? }\n vol.destroy\n end\n end", "def process_cloud_volume_snapshots(snapshots, task)\n return if snapshots.empty?\n\n if task == \"destroy\"\n snapshots.each do |snapshot|\n audit = {\n :event => \"cloud_volume_snapshot_record_delete_initiateed\",\n :message => \"[#{snapshot.name}] Record delete initiated\",\n :target_id => snapshot.id,\n :target_class => \"CloudVolumeSnapshot\",\n :userid => session[:userid]\n }\n AuditEvent.success(audit)\n snapshot.delete_snapshot_queue(session[:userid])\n end\n add_flash(n_(\"Delete initiated for %{number} Cloud Volume Snapshot.\",\n \"Delete initiated for %{number} Cloud Volume Snapshots.\",\n snapshots.length) % {:number => snapshots.length})\n end\n end", "def process_cloud_volume_snapshots(snapshots, task)\n return if snapshots.empty?\n\n if task == \"destroy\"\n snapshots.each do |snapshot|\n audit = {\n :event => \"cloud_volume_snapshot_record_delete_initiateed\",\n :message => \"[#{snapshot.name}] Record delete initiated\",\n :target_id => snapshot.id,\n :target_class => \"CloudVolumeSnapshot\",\n :userid => session[:userid]\n }\n AuditEvent.success(audit)\n snapshot.delete_snapshot_queue(session[:userid])\n end\n add_flash(n_(\"Delete initiated for %{number} Cloud Volume Snapshot.\",\n \"Delete initiated for %{number} Cloud Volume Snapshots.\",\n snapshots.length) % {:number => snapshots.length})\n end\n end", "def delete\n @service.delete_blob(self)\n end", "def remove_entity_snapshot(id)\n entities.where(:id => id).update(:snapshot => nil)\n end", "def delete(command)\n pp @client.files.delete(clean_up(command[1]))\n end", "def delete(session, id)\n write_task('rvpe.image.delete', session) do\n err_msg = \"You don't have permission to delete the image.\"\n sanity_check(session, id, err_msg) do\n call_one_xmlrpc('one.image.delete', session, id)\n end\n end\n end", "def delete\n ensure_service!\n service.delete_backup instance_id, backup_id\n true\n end", "def detach_disk(instance_id, disk_id)\n with_thread_name(\"detach_disk(#{instance_id}, #{disk_id})\") do\n @cloud_core.detach_disk(instance_id, disk_id) do |disk_id|\n update_agent_settings(instance_id) do |settings|\n settings['disks'] ||= {}\n settings['disks']['persistent'] ||= {}\n settings['disks']['persistent'].delete(disk_id)\n end\n end\n end\n end", "def snapshot_disk(disk_id, metadata={})\n # TODO: Get vhd from 'vhd' container in vm storage account and use blob client to snapshot it\n end", "def destroy\n request(:delete, \"/computing/image/#{uuid}\")\n true\n end", "def delete\n unless exists?\n return Response.new :code => 1, :message => 'VM does not exist'\n end\n\n running_response = running?\n return running_response unless running_response.successful?\n\n if running_response.data\n message = 'The VM must not be running in order to delete it.'\n return Response.new :code => 1, :message => message\n end\n\n FileUtils.rm_rf path\n Metadata.delete_vm_info path\n\n Response.new :code => 0\n end", "def destroy\n @inventory_snapshot_content.destroy\n respond_to do |format|\n format.html { redirect_to inventory_snapshot_contents_url, notice: 'Inventory snapshot content was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def remove_storage_file\n FileUtils.rm(file_path)\n end", "def delete(uuid, key)\n request(method: 'DELETE', uri: \"/files/#{uuid}/metadata/#{key}/\")\n end", "def delete_file storage_file_path\n @bucket.file(storage_file_path).delete if @bucket.file storage_file_path\n end", "def delete_file storage_file_path\n @bucket.file(storage_file_path).delete if @bucket.file storage_file_path\n end", "def destroy\n Rails.logger.debug {\"destroying gridfs file #{@id}\"}\n if persisted?\n Photo.mongo_client.database.fs.find(:_id=>BSON::ObjectId.from_string(@id)).delete_one\n end\n end", "def destroy\n File.delete(self.artifact)\n end", "def detach_disk(instance_id, disk_id)\n with_thread_name(\"detach_disk(#{instance_id}, #{disk_id}):v2\") do\n @cloud_core.detach_disk(instance_id, disk_id) do |disk_id|\n if @stemcell_api_version < 2\n update_agent_settings(instance_id) do |settings|\n settings['disks'] ||= {}\n settings['disks']['persistent'] ||= {}\n settings['disks']['persistent'].delete(disk_id)\n end\n end\n end\n end\n end", "def delete_object(druid)\n storage_object = find_storage_object(druid)\n object_pathname = storage_object.object_pathname\n delete_storage(object_pathname)\n\n # TODO: remove any replicas from the replica-cache.\n\n end", "def delete_blob(container, blob, key = nil, options = {})\n key ||= properties.key1\n\n url = File.join(properties.primary_endpoints.blob, container, blob)\n url += \"?snapshot=\" + options[:date] if options[:date]\n\n headers = build_headers(url, key, :blob, :verb => 'DELETE')\n\n response = ArmrestService.send(\n :rest_delete,\n :url => url,\n :headers => headers,\n :proxy => proxy,\n :ssl_version => ssl_version,\n :ssl_verify => ssl_verify\n )\n\n headers = Azure::Armrest::ResponseHeaders.new(response.headers)\n headers.response_code = response.code\n\n headers\n end", "def delete\n stop\n [ @resource['instances_dir'] + \"/\" + @resource[:name],\n @resource['instances_dir'] + \"/\" + \"_\" + @resource[:name]\n ].each do |dir|\n FileUtils.rm_rf(dir) if File.directory?(dir)\n end\n end", "def destroy\n @physical_disk = PhysicalDisk.find(params[:id])\n @physical_disk.destroy\n\n respond_to do |format|\n format.html { redirect_to physical_disks_url }\n format.json { head :no_content }\n end\n end", "def destroy\n notice \"Removing #{@resource_copy[:path]}\"\n FileUtils.rm_rf(@resource_copy[:path])\n end", "def delete\n storage.delete(id)\n end", "def deleteBackup(target)\n\t\t\n\t\tclient = Octokit::Client.new(:access_token=>User[target.user_id].access_token)\n\n\t\t# delete github hook\n\t\tclient.remove_hook(target.repo_target, target.webhook_id)\n\t\t# delete on s3\n\t\ts3 = AWS::S3.new\n\t\trepo_archive = s3.buckets['anamnesis-112358'].objects[target.external_id+'.zip']\n\t\tif repo_archive.exists? then\n\t\t\trepo_archive.delete\n\t\tend\n\n\t\t# finally, delete it :(s)\n\t\ttarget.delete\n\tend", "def delete\n ::File.unlink(@path)\n end", "def DeletePartitionTable(disk, label)\n Builtins.y2milestone(\"DeletePartitionTable disk:%1 label:%2\", disk, label)\n label = DefaultDiskLabel(disk) if Builtins.isempty(label)\n ret = @sint.destroyPartitionTable(disk, label)\n if ret<0\n Builtins.y2error(\"DeletePartitionTable sint ret:%1\", ret)\n end\n UpdateTargetMap()\n ret == 0\n end", "def delete\n FileUtils.rm_rf(to_s)\n self\n end", "def destroy\n File.unlink(@resource[:path])\n Puppet.debug \"deleted file #{@resource[:path]}\"\n end", "def purge\n purge_file\n cdb_destroy\n end", "def detach_storage(server_uuid, address:)\n data = {\n \"storage_device\" => {\n \"address\" => address\n }\n }\n\n json = JSON.generate data\n\n response = post \"server/#{server_uuid}/storage/detach\", json\n\n response\n end", "def detach()\n $ec2.describe_volumes([self.id]).each do |result|\n if result[:aws_attachment_status] == 'attached'\n $ec2.detach_volume(self.id)\n end\n end\n self.attached_instance = nil\n self.save()\n end", "def delete\n begin\n object = bucket.objects.find(@path)\n object.destroy\n true\n rescue Exception => e\n # If the file's not there, don't panic\n nil\n end\n end", "def wipe_snapshots_data\n @snapshots_cycle = 0\n @snapshot_groups = {}\n end", "def delete_virtualization_virtual_disk(moid, opts = {})\n delete_virtualization_virtual_disk_with_http_info(moid, opts)\n nil\n end" ]
[ "0.80088663", "0.79958016", "0.77460605", "0.7667114", "0.75546783", "0.75281346", "0.7479069", "0.74299383", "0.7417895", "0.73651665", "0.73398304", "0.7300385", "0.72610056", "0.7257571", "0.72526306", "0.7195017", "0.7184923", "0.7133952", "0.695794", "0.6955977", "0.6933833", "0.67865413", "0.666757", "0.6622121", "0.65217793", "0.6502093", "0.64637214", "0.6367879", "0.63324845", "0.6329869", "0.6329869", "0.62753016", "0.62489027", "0.6233529", "0.6211857", "0.61940145", "0.61738163", "0.6170505", "0.6162938", "0.6162938", "0.615317", "0.6144704", "0.61385584", "0.61360645", "0.61042", "0.60760796", "0.5998211", "0.59963095", "0.5958448", "0.5919861", "0.59172976", "0.5900388", "0.589024", "0.5885425", "0.58690304", "0.5860148", "0.5858332", "0.5831771", "0.58164436", "0.5810075", "0.5792119", "0.57888395", "0.5783083", "0.57568705", "0.575651", "0.575651", "0.5748483", "0.5747253", "0.5741987", "0.5739022", "0.57196605", "0.5716733", "0.5708567", "0.5704674", "0.569685", "0.569438", "0.56771374", "0.5669851", "0.5668384", "0.5668384", "0.5664252", "0.5635635", "0.5632729", "0.56275433", "0.56196535", "0.5612864", "0.5611706", "0.5607736", "0.5605924", "0.55917245", "0.5585688", "0.55806834", "0.557302", "0.5562467", "0.5561905", "0.5560649", "0.5552361", "0.5549952", "0.55457056", "0.55424154" ]
0.71849513
16
Configure network for an EC2 instance. No longer supported.
def configure_networks(instance_id, network_spec) raise Bosh::Clouds::NotSupported, "configure_networks is no longer supported" end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def configure_networks(instance_id, network_spec)\n raise Bosh::Clouds::NotSupported, 'configure_networks is no longer supported'\n end", "def configure_private_network(config, ips, net_name)\n ips.each do |ip|\n config.vm.network 'private_network',\n ip: ip,\n netmask: '255.255.255.0',\n virtualbox__intnet: net_name\n end\nend", "def configure(ec2, instance)\n if @vip_network\n configure_vip(ec2, instance)\n else\n # If there is no vip network we should disassociate any elastic IP\n # currently held by instance (as it might have had elastic IP before)\n elastic_ip = instance.elastic_ip\n\n if elastic_ip\n @logger.info(\"Disassociating elastic IP `#{elastic_ip}' \" \\\n \"from instance `#{instance.id}'\")\n instance.disassociate_elastic_ip\n end\n end\n end", "def config_network(instance, vm_config)\n\n vm_config[\"networks\"].each do |network, config|\n if config[\"type\"] == \"private\" then\n if config[\"ip\"] then\n instance.vm.network :private_network, ip: config[\"ip\"]\n end\n elsif config[\"type\"] == \"public\" then\n instance.vm.network :public_network\n end\n end if vm_config[\"networks\"]\n\n vm_config[\"ports\"].each do |port, config|\n\n raise \"At least the guest port is needed in 'guest_port' variable\" \\\n if config[\"guest_port\"].nil?\n\n instance.vm.network \"forwarded_port\",\n guest: config[\"guest_port\"],\n host: config[\"host_port\"] || config[\"guest_port\"],\n protocol: config[\"protocol\"] || \"tcp\",\n auto_correct: config[\"auto_correct\"] || true\n end if vm_config[\"ports\"]\n\nend", "def configure_networks(vm_id, network_spec)\n raise Bosh::Clouds::NotSupported.new(false),\n 'configure_networks is not supported.'\n end", "def configure_networking(\n machine,\n instance_profile,\n provider = 'virtualbox'\n)\n\n instance_networking = lookup_values_yaml(instance_profile, ['providers', provider, 'instance', 'networking'])\n\n return false unless instance_networking\n\n name = replace_characters_string(lookup_values_yaml(instance_profile, ['name']))\n\n configure_interfaces(machine, name, instance_networking)\n\n configure_forwarded_ports(machine, name, instance_networking)\n\nend", "def configure_instance(node, i)\n node.vm.hostname = fqdn(i)\n network_ports node, i\nend", "def apply_network_settings container, networking\n OpenNebula.log_debug \"Configuring network\"\n nic = {:ifname => 'eth0', :host_mac => 'FE:FF:FF:FF:FF:FF'}\n\n container.add_veth nic\n OpenVZ::Util.execute \"brctl addif #{networking[:bridge]} veth#{container.ctid}.0\" unless networking[:bridge].nil?\n\n container.command \"ifconfig eth0 #{networking[:ip]}\"\n container.command \"ifconfig eth0 up\"\n end", "def network_config\n @network_config ||= begin\n raw_config = network_params[\"network_configuration\"] || {\"interfaces\" => []}\n config = NetworkConfiguration.new(raw_config)\n config.add_nics!(device_config, :add_partitions => true) if dell_server?\n config\n end\n end", "def configure_networks(vm_cid, networks)\n @logger.info(\"configure_networks(#{vm_cid}, #{networks})\")\n # Azure does not support to configure the network of an existing VM,\n # so we need to notify the InstanceUpdater to recreate it\n raise Bosh::Clouds::NotSupported\n end", "def configure_networks(server_id, network_spec)\n with_thread_name(\"configure_networks(#{server_id}, ...)\") do\n raise Bosh::Clouds::NotSupported,\n format('network configuration change requires VM recreation: %s', network_spec)\n end\n end", "def set_private_network(vm_name, vm_config, ip: '', gateway: '') \n \n require_string(vm_name)\n require_vagrant_config(vm_config)\n\n log_info(\" - ip: #{ip}, gateway: #{gateway}\")\n\n vm_config.vm.network :private_network, ip: ip, gateway: gateway\n end", "def configure_instance(aws_node, private_ip_address, node_name, node_config)\n # Spin up EC2 instances\n aws_node.vm.provider :aws do |ec2, override|\n ec2.keypair_name = KEYPAIR_NAME\n ec2.access_key_id = ACCESS_KEY_ID\n ec2.secret_access_key = SECRET_ACCESS_KEY\n ec2.security_groups = SECURITY_GROUPS\n override.ssh.private_key_path = PRIVATE_KEY_PATH\n\n # read region, ami etc from json.\n ec2.region = AWS_CFG['region']\n ec2.subnet_id = AWS_CFG['subnet_id']\n ec2.availability_zone = AWS_CFG['region'] + AWS_CFG['availability_zone']\n ec2.ami = node_config['ami_id']\n ec2.instance_type = node_config['instance_type']\n ec2.private_ip_address = private_ip_address\n ec2.associate_public_ip = true\n\n if node_config.key?('volume_size')\n # Size in GB\n # (untested)\n ec2.block_device_mapping = [{ 'DeviceName' => '/dev/sda1', 'Ebs.VolumeSize' => node_config['volume_size'] }]\n end\n\n override.ssh.username = AWS_CFG['ssh_username']\n\n # Collect tags (can't be longer than 250 chars)\n ec2.tags = ({})\n ec2.tags['Name'] = node_name[0..245]\n ec2.tags['Type'] = 'Hyperledger'\n ec2.tags['Version'] = VERSION\n ec2.tags['Fabric'] = node_config['fabric'].map { |f| f['role'] }.join(',')[0..245]\n end\nend", "def configure(huaweicloud, server, network_id)\n cloud_error(\"No IP provided for vip network `#{@name}'\") if @ip.nil?\n\n huaweicloud.with_huaweicloud do\n FloatingIp.reassociate(huaweicloud, @ip, server, network_id)\n end\n end", "def configure_private_network_ip(config, ip, vm_name)\n if ip\n config.vm.network :private_network, :ip => ip, :netmask => \"255.255.255.0\"\n else\n puts \" NO HOSTONLY IP defined for VM #{vm_name}.\"\n end\n end", "def set_vm_network_config(vmid, network_name, config={})\n builder = Nokogiri::XML::Builder.new do |xml|\n xml.NetworkConnectionSection(\n \"xmlns\" => \"http://www.vmware.com/vcloud/v1.5\",\n \"xmlns:ovf\" => \"http://schemas.dmtf.org/ovf/envelope/1\") {\n xml['ovf'].Info \"VM Network configuration\"\n xml.PrimaryNetworkConnectionIndex(config[:primary_index] || 0)\n xml.NetworkConnection(\"network\" => network_name, \"needsCustomization\" => true) {\n xml.NetworkConnectionIndex(config[:network_index] || 0)\n xml.IpAddress config[:ip] if config[:ip]\n xml.IsConnected(config[:is_connected] || true)\n xml.IpAddressAllocationMode config[:ip_allocation_mode] if config[:ip_allocation_mode]\n }\n }\n end\n\n params = {\n 'method' => :put,\n 'command' => \"/vApp/vm-#{vmid}/networkConnectionSection\"\n }\n\n response, headers = send_request(params, builder.to_xml, \"application/vnd.vmware.vcloud.networkConnectionSection+xml\")\n\n task_id = headers[:location].gsub(\"#{@api_url}/task/\", \"\")\n task_id\n end", "def set_client_network(vm_name, vm_config, hostname) \n\n require_string(vm_name)\n require_vagrant_config(vm_config)\n\n require_string(hostname)\n\n network_range = get_network_range\n\n dc_ip = \"#{network_range}.5\"\n machine_ip = get_ip_for_host(hostname)\n \n log_info(\" - private network: ip: #{machine_ip} gateway: #{network_range}.1\") \n vm_config.vm.network :private_network, \n ip: machine_ip, gateway: \"#{network_range}.1\"\n\n log_info(\" - fixing secondary network interface: ip: #{machine_ip} dns: #{dc_ip}\") \n vm_config.vm.provision \"shell\", \n path: \"#{vagrant_script_path}/vagrant/uplift.vagrant.core/uplift.fix-second-network.ps1\", \n args: \"-ip #{machine_ip} -dns #{dc_ip}\"\n end", "def configure(cloudstack, server)\n if @ip.nil?\n cloud_error(\"No IP provided for vip network `#{@name}'\")\n end\n\n # Check if the CloudStack floating IP is allocated. If true, disassociate\n # it from any server before associating it to the new server\n address = cloudstack.addresses.find { |a| a.ip == @ip }\n if address\n unless address.instance_id.nil?\n @logger.info(\"Disassociating floating IP `#{@ip}' \" \\\n \"from server `#{address.instance_id}'\")\n address.server = nil\n end\n\n @logger.info(\"Associating server `#{server.id}' \" \\\n \"with floating IP `#{@ip}'\")\n address.server = server\n else\n cloud_error(\"Floating IP #{@ip} not allocated\")\n end\n end", "def network_configuration\n dns = settings.provider.network.dns\n dns = dns.split(\",\") if dns.is_a?(String)\n {\n \"ip\"=>public_ip,\n \"netmask\"=>settings.provider.network.netmask,\n \"gateway\"=>settings.provider.network.gateway,\n \"dns\"=>dns,\n \"cloud_properties\"=>{\n \"name\"=>settings.provider.network.name\n }\n }\n end", "def network_config\n return '--net=host' unless @vpn_tunnel\n\n hostname = `hostname`.chomp\n \"--net=container:#{hostname}\"\n end", "def set_sysconfig_network( name )\n # If not already set correctly, backup, delete old line, append\n # new line.\n sudo <<-SH\n if ! grep -q '^HOSTNAME=#{name}$' /etc/sysconfig/network; then\n cp -f /etc/sysconfig/network /etc/sysconfig/network~\n sed -i '/^HOSTNAME=.*/d' /etc/sysconfig/network\n echo 'HOSTNAME=#{name}' >> /etc/sysconfig/network\n hostname #{name}\n fi\n SH\n end", "def configure(c, ip: nil)\n c.vm.network 'private_network', ip: ip if ip\n\n c.vm.provider 'virtualbox' do |vb|\n vb.memory = 512\n end\n\n osreplace_path = '../osreplace' if File.directory?('../osreplace')\n c.vm.synced_folder osreplace_path, '/opt/osreplace' if osreplace_path\nend", "def setDefaults\n ips = []\n if $IN_AWS\n [\"public-ipv4\", \"local-ipv4\"].each { |addr|\n begin\n Timeout.timeout(2) do\n ip = URI.open(\"http://169.254.169.254/latest/meta-data/#{addr}\").read\n ips << ip if !ip.nil? and ip.size > 0\n end\n rescue OpenURI::HTTPError, Timeout::Error, SocketError\n # these are ok to ignore\n end\n }\n elsif $IN_GOOGLE\n base_url = \"http://metadata.google.internal/computeMetadata/v1\"\n begin\n Timeout.timeout(2) do\n # TODO iterate across multiple interfaces/access-configs\n ip = URI.open(\"#{base_url}/instance/network-interfaces/0/ip\", \"Metadata-Flavor\" => \"Google\").read\n ips << ip if !ip.nil? and ip.size > 0\n ip = URI.open(\"#{base_url}/instance/network-interfaces/0/access-configs/0/external-ip\", \"Metadata-Flavor\" => \"Google\").read\n ips << ip if !ip.nil? and ip.size > 0\n end\n rescue OpenURI::HTTPError, Timeout::Error, SocketError => e\n # This is fairly normal, just handle it gracefully\n end\n end\n\n\n $CONFIGURABLES[\"allow_invade_foreign_vpcs\"][\"default\"] = false\n $CONFIGURABLES[\"public_address\"][\"default\"] = $possible_addresses.first\n $CONFIGURABLES[\"hostname\"][\"default\"] = Socket.gethostname\n $CONFIGURABLES[\"banner\"][\"default\"] = \"Mu Master at #{$CONFIGURABLES[\"public_address\"][\"default\"]}\"\n if $IN_AWS\n # XXX move this crap to a callback hook for puttering around in the AWS submenu\n aws = JSON.parse(URI.open(\"http://169.254.169.254/latest/dynamic/instance-identity/document\").read)\n iam = nil\n begin\n iam = URI.open(\"http://169.254.169.254/latest/meta-data/iam/security-credentials\").read\n rescue OpenURI::HTTPError, SocketError\n end\n # $CONFIGURABLES[\"aws\"][\"subtree\"][\"account_number\"][\"default\"] = aws[\"accountId\"]\n $CONFIGURABLES[\"aws\"][\"subtree\"][\"region\"][\"default\"] = aws[\"region\"]\n if iam and iam.size > 0\n # XXX can we think of a good way to test our permission set?\n $CONFIGURABLES[\"aws\"][\"subtree\"][\"access_key\"][\"desc\"] = $CONFIGURABLES[\"aws\"][\"subtree\"][\"access_key\"][\"desc\"] + \". Not necessary if IAM Profile #{iam.bold} has sufficient API access.\"\n $CONFIGURABLES[\"aws\"][\"subtree\"][\"access_secret\"][\"desc\"] = $CONFIGURABLES[\"aws\"][\"subtree\"][\"access_key\"][\"desc\"] + \". Not necessary if IAM Profile #{iam.bold} has sufficient API access.\"\n end\n end\n $CONFIGURABLES[\"aws\"][\"subtree\"][\"log_bucket_name\"][\"default\"] = $CONFIGURABLES[\"hostname\"][\"default\"]\n $CONFIGURABLES[\"google\"][\"subtree\"][\"log_bucket_name\"][\"default\"] = $CONFIGURABLES[\"hostname\"][\"default\"]\n end", "def set_vapp_network_config(vappid, network_name, config={})\n builder = Nokogiri::XML::Builder.new do |xml|\n xml.NetworkConfigSection(\n \"xmlns\" => \"http://www.vmware.com/vcloud/v1.5\",\n \"xmlns:ovf\" => \"http://schemas.dmtf.org/ovf/envelope/1\") {\n xml['ovf'].Info \"Network configuration\"\n xml.NetworkConfig(\"networkName\" => network_name) {\n xml.Configuration {\n xml.FenceMode(config[:fence_mode] || 'isolated')\n xml.RetainNetInfoAcrossDeployments(config[:retain_net] || false)\n xml.ParentNetwork(\"href\" => config[:parent_network])\n }\n }\n }\n end\n\n params = {\n 'method' => :put,\n 'command' => \"/vApp/vapp-#{vappid}/networkConfigSection\"\n }\n\n response, headers = send_request(params, builder.to_xml, \"application/vnd.vmware.vcloud.networkConfigSection+xml\")\n\n task_id = headers[:location].gsub(\"#{@api_url}/task/\", \"\")\n task_id\n end", "def configure_aws_region settings\n settings[:aws_region] ||= settings[:availability_zones].first.gsub(/^(\\w+-\\w+-\\d)[a-z]/, '\\1')\n settings[:ec2_url] ||= \"https://#{settings[:aws_region]}.ec2.amazonaws.com\"\n unless ((ENV['EC2_URL'].to_s == '' && settings[:aws_region] == 'us-east-1') || (ENV['EC2_URL'] == settings[:ec2_url]))\n warn \"******\\nThe EC2_URL environment variable should probably be #{settings[:ec2_url]} (from your availability zone), not #{AWS::EC2::DEFAULT_HOST}. Try invoking 'export EC2_URL=#{settings[:ec2_url]}' and re-run.\\n******\"\n end\nend", "def set_private_dc_network(vm_name, vm_config)\n require_string(vm_name)\n require_vagrant_config(vm_config)\n\n log_info_light(\"#{vm_name}: private dc network\")\n \n network_range = get_network_range\n\n set_private_network(\n vm_name,\n vm_config,\n :ip => \"#{network_range}.5\",\n :gateway => \"#{network_range}.1\"\n )\n \n end", "def load_network_conf\n current_cfg = networks_base\n\n new_cfg = load_network_yml\n\n attributes_cfg = node['vnet_part']['networks'].to_hash if node['vnet_part']['networks']\n new_cfg = ::Chef::Mixin::DeepMerge.deep_merge(attributes_cfg, new_cfg) if attributes_cfg\n\n ::Chef::Mixin::DeepMerge.deep_merge(new_cfg, current_cfg)\nend", "def ensure_valid_network\n if valid_network.nil?\n self.build_valid_network(:tag => \"#{self.tag} Asset Config Network\")\n end\n end", "def network_options(type, ip, dhcp_enable)\n options = {}\n options[:ip] = ip\n options[:type] = type\n options[:libvirt__dhcp_enabled] = dhcp_enable\n\n options\nend", "def process_private_network(root_options, network_options, env)\n if root_options[:name] && validate_network_name!(root_options[:name], env)\n network_name = root_options[:name]\n end\n\n if root_options[:type].to_s == \"dhcp\"\n if !root_options[:ip] && !root_options[:subnet]\n network_name = \"vagrant_network\" if !network_name\n return [network_name, network_options]\n end\n if root_options[:subnet]\n addr = IPAddr.new(root_options[:subnet])\n root_options[:netmask] = addr.prefix\n end\n end\n\n if root_options[:ip]\n addr = IPAddr.new(root_options[:ip])\n elsif addr.nil?\n raise Errors::NetworkIPAddressRequired\n end\n\n # If address is ipv6, enable ipv6 support\n network_options[:ipv6] = addr.ipv6?\n\n # If no mask is provided, attempt to locate any existing\n # network which contains the assigned IP address\n if !root_options[:netmask] && !network_name\n network_name = env[:machine].provider.driver.\n network_containing_address(root_options[:ip])\n # When no existing network is found, we are creating\n # a new network. Since no mask was provided, default\n # to /24 for ipv4 and /64 for ipv6\n if !network_name\n root_options[:netmask] = addr.ipv4? ? 24 : 64\n end\n end\n\n # With no network name, process options to find or determine\n # name for new network\n if !network_name\n if !root_options[:subnet]\n # Only generate a subnet if not given one\n subnet = IPAddr.new(\"#{addr}/#{root_options[:netmask]}\")\n network = \"#{subnet}/#{root_options[:netmask]}\"\n else\n network = root_options[:subnet]\n end\n\n network_options[:subnet] = network\n existing_network = env[:machine].provider.driver.\n network_defined?(network)\n\n if !existing_network\n network_name = \"vagrant_network_#{network}\"\n else\n if !existing_network.to_s.start_with?(\"vagrant_network\")\n env[:ui].warn(I18n.t(\"docker_provider.subnet_exists\",\n network_name: existing_network,\n subnet: network))\n end\n network_name = existing_network\n end\n end\n\n [network_name, network_options]\n end", "def aws_internet_gateway_create(opts)\n opts[:vpc].internet_gateway = AWS::EC2.new.internet_gateways.create\n end", "def create_vm_nic_config(network_spec)\n @logger.debug(\"Network spec: #{network_spec}\")\n vm_nic_config = []\n # Get available networks\n networks = JSON.parse(@client.get('v2.0', 'networks'))['entities']\n # Iterate through network spec in the config and create network\n # config. spec. for the virtual machine\n network_spec.each do |name, net|\n network ||= {}\n cloud_error(\"[#{name}] Must provide cloud properties.\") if\n net['cloud_properties'].nil?\n # Fetch subnet name from config\n subnet = net['cloud_properties']['subnet']\n cloud_error(\"[#{name}] Must provide subnet name.\") if subnet.nil?\n # Fetch network's uuid from subnet name\n network = networks.find { |n| n['name'] == subnet }\n cloud_error(\"[#{name}] Subnet #{subnet} not found\") if network.nil?\n network_uuid = network['uuid']\n # When static IP is configured\n if net['type'] == 'manual'\n ip = net['ip']\n cloud_error(\"[#{name}:manual] Must provide IP Address.\") if ip.nil?\n network[:request_ip] = true\n network[:requested_ip_address] = ip\n end\n network[:network_uuid] = network_uuid\n # Add this network spec to the list\n vm_nic_config << network\n end\n vm_nic_config\n rescue => e\n raise e\n end", "def inject_private_network_config(ci_environment_vms)\n return ci_environment_vms if ci_environment_vms.count() == 1\n int_id = 10\n ci_environment_vms.each do |vm,config|\n ci_environment_vms[vm][\"private_ip\"] = \"192.168.50.\" + int_id.to_s\n int_id += 1\n end\n ci_environment_vms\n end", "def cr_network_config\n @node[\"network\"]\n end", "def attach_to_internal_network(name=\"intnet\")\n # couldn't find a way to get VirtualBox to list internal networks over FFI\n self.attachment_type = :internal_network\n self.internal_network = name\n self.enabled = true\n end", "def createEc2Instance\n\t\t name = @server[\"name\"]\n\t\t node = @server['mu_name']\n\t\t\tbegin\n\t\t\t\t@server['iam_role'] = MU::Server.createIAMProfile(\"Server-\"+name, base_profile: @server['iam_role'], extra_policies: @server['iam_policies'])\n\t\t\trescue Aws::EC2::Errors::RequestLimitExceeded => e\n\t\t\t\tsleep 10\n\t\t\t\tretry\n\t\t\tend\n\t\t\t@server['iam_role'] = @server['iam_role']\n\n\t\t\tbegin\n\t\t\t\[email protected]\n\t\t\trescue Aws::EC2::Errors::RequestLimitExceeded => e\n\t\t\t\tsleep 10\n\t\t\t\tretry\n\t\t\tend\n\n\t\t instance_descriptor = {\n\t\t :image_id => @server[\"ami_id\"],\n\t\t :key_name => @deploy.keypairname,\n\t\t :instance_type => @server[\"size\"],\n\t\t :disable_api_termination => true,\n\t\t :min_count => 1,\n\t\t :max_count => 1,\n\t\t\t\t:network_interfaces => [\n\t\t\t\t\t{\n\t\t\t\t\t\t:associate_public_ip_address => name[\"associate_public_ip\"]\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t }\n\t\t\t\n\t\t\tif !@server['private_ip'].nil?\n\t\t\t\tinstance_descriptor[:private_ip_address] = @server['private_ip']\n\t\t\tend\n\n\t\t\tvpc_id=subnet_id=nat_host_name=nat_ssh_user = nil\n\t\t\tsubnet_retries = 0\n\t\t\tif !@server[\"vpc\"].nil?\n\t\t\t\tbegin\n\t\t\t\t\tvpc_id, subnet_ids, nat_host_name, nat_ssh_user = MU::VPC.parseVPC(@server['vpc'])\n\t\t\t\trescue Aws::EC2::Errors::ServiceError => e\n\t\t\t\t\tMU.log e.message, MU::ERR, details: @server\n\t\t\t\t\tif subnet_retries < 5\n\t\t\t\t\t subnet_retries = subnet_retries + 1\n\t\t\t\t\t sleep 15\n\t\t\t\t\t retry\n\t\t\t\t\tend\n\t\t\t\t\traise e\n\t\t\t\tend\n\t\t\t\tsubnet_id = subnet_ids.first\n\t\t\t\tif subnet_id.nil? or subnet_id.empty?\n\t\t\t\t\tMU.log \"Got null Subnet id out of #{@server['vpc']}\", MU::ERR\n\t\t\t\t\traise \"deploy failure\"\n\t\t\t\tend\n\n\t\t\t\tMU.log \"Deploying #{node} into VPC #{vpc_id} Subnet #{subnet_id}\"\n\n\t\t\t\tif !@server[\"vpc\"][\"nat_host_name\"].nil? or !@server[\"vpc\"][\"nat_host_id\"].nil?\n\t\t\t\t\tadmin_sg = MU::Server.punchAdminNAT(@server, node)\n\t\t\t\telse\n\t\t\t\t\tadmin_sg = MU::FirewallRule.setAdminSG(vpc_id: vpc_id, region: @server['region'])\n\t\t\t\tend\n\n\t\t\t\tinstance_descriptor[:subnet_id] = subnet_id\n\t\t\t\tnode_sg = MU::FirewallRule.createEc2SG(\n\t\t\t\t\t\t@server[\"name\"].upcase,\n\t\t\t\t\t\t@server[\"ingress_rules\"],\n\t\t\t\t\t\tdescription: \"SG holes for #{node}\",\n\t\t\t\t\t\tvpc_id: vpc_id,\n\t\t\t\t\t\tregion: @server['region']\n\t\t\t\t)\n\t\t\telse\n\t\t\t\tadmin_sg = MU::FirewallRule.setAdminSG(region: @server['region'])\n\t\t\t\tnode_sg = MU::FirewallRule.createEc2SG(\n\t\t\t\t\t\t@server[\"name\"].upcase,\n\t\t\t\t\t\t@server[\"ingress_rules\"],\n\t\t\t\t\t\tdescription: \"SG holes for #{node}\",\n\t\t\t\t\t\tregion: @server['region']\n\t\t\t\t)\n\t\t\tend\n\t\t\tsecurity_groups = Array.new\n\t\t\tsecurity_groups << admin_sg\n\t\t\tsecurity_groups << node_sg\n\t\t\tif !@server[\"add_firewall_rules\"].nil?\n\t\t\t\t@server[\"add_firewall_rules\"].each { |acl|\n\t\t\t\t\tsg = MU::FirewallRule.find(sg_id: acl[\"rule_id\"], name: acl[\"rule_name\"], region: @server['region'])\n\t\t\t\t\tif sg.nil?\n\t\t\t\t\t\tMU.log \"Couldn't find dependent security group #{acl} for server #{node}\", MU::ERR\n\t\t\t\t\t\traise \"deploy failure\"\n\t\t\t\t\tend\n\t\t\t\t\tsecurity_groups << sg.group_id\n\t\t\t\t}\n\t\t\tend\n\n\t\t\tinstance_descriptor[:security_group_ids] = security_groups\n\n\t\t if [email protected]? and [email protected]?\n\t\t instance_descriptor[:user_data] = Base64.encode64(@userdata)\n\t\t end\n\n\t\t if !@server[\"iam_role\"].nil?\n\t\t instance_descriptor[:iam_instance_profile] = { name: @server[\"iam_role\"]}\n\t\t end\n\n\t\t\tconfigured_storage = Array.new\n\t\t\tif @server[\"storage\"]\n\t\t\t\t@server[\"storage\"].each { |vol|\n\t\t\t\t\tconfigured_storage << MU::Server.convertBlockDeviceMapping(vol)\n\t\t\t\t}\n\t\t\tend\n\t\t\n\t\t\tMU::Server.waitForAMI(@server[\"ami_id\"], region: @server['region'])\n\n\t\t\tinstance_descriptor[:block_device_mappings] = configured_storage\n\t\t\tinstance_descriptor[:block_device_mappings].concat(@ephemeral_mappings)\n\n\t\t\tinstance_descriptor[:monitoring] = { enabled: @server['monitoring'] }\n\n\t\t\tMU.log \"Creating EC2 instance #{node}\"\n\t\t\tMU.log \"Instance details for #{node}: #{instance_descriptor}\", MU::DEBUG\n#\t\t\t\tif instance_descriptor[:block_device_mappings].empty?\n#\t\t\t\t\tinstance_descriptor.delete(:block_device_mappings)\n#\t\t\t\tend\n#pp instance_descriptor[:block_device_mappings]\n\t\t\tretries = 0\n\t\t\tbegin\n\t\t\t\tresponse = MU.ec2(@server['region']).run_instances(instance_descriptor)\n\t\t\trescue Aws::EC2::Errors::InvalidGroupNotFound, Aws::EC2::Errors::InvalidSubnetIDNotFound, Aws::EC2::Errors::InvalidParameterValue, Aws::EC2::Errors::RequestLimitExceeded => e\n\t\t\t\tif retries < 10\n\t\t\t\t\tif retries > 7\n\t\t\t\t\t\tMU.log \"Seeing #{e.inspect} while trying to launch #{node}, retrying a few more times...\", MU::WARN, details: instance_descriptor\n\t\t\t\t\tend\n\t\t\t\t\tsleep 10\n\t\t\t\t\tretries = retries + 1\n\t\t\t\t\tretry\n\t\t\t\telse\n\t\t\t\t\traise e\n\t\t\t\tend\n\t\t\tend\n\n\t\t\tinstance = response.instances.first\n\t\t\tMU.log \"#{node} (#{instance.instance_id}) coming online\"\n\n\n\t\t\treturn instance\n\n\t\tend", "def customize_vm(name,options)\n vm_config = \"\"\n options.each_pair do |key,value|\n case key\n when 'ip'\n vm_config << \" v.vm.network :private_network, :ip => '#{value}'\\n\"\n else\n next\n end\n end\n vm_config\n end", "def customize_vm(name,options)\n vm_config = \"\"\n options.each_pair do |key,value|\n case key\n when 'ip'\n vm_config << \" v.vm.network :private_network, :ip => '#{value}'\\n\"\n else\n next\n end\n end\n vm_config\n end", "def network=(value)\n @root[\"network\"] = value\n end", "def network\n\n\t\tdebug \"Network paramentrs\"\n\t\tnetwork = []\n\t\tiface = resource[:interfaces]\n\t\tif iface.nil? \n\t\t\tnetwork = [\"--network\", \"network=default\"]\n\t\telsif iface == \"disable\"\n\t\t\tnetwork = [\"--nonetworks\"]\n\t\telse\n\t\t\tiface.each do |iface|\n\t\t\t\tif interface?(iface)\t\n\t\t\t\t\tnetwork << [\"--network\",\"bridge=\"+iface]\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\t\n\t\tmacs = resource[:macaddrs]\n\t\tif macs\n\t\t\tresource[:macaddrs].each do |macaddr|\n\t\t\t\t#FIXME -m is decrepted\n\t\t\t\tnetwork << \"-m\"\n\t\t\t\tnetwork << macaddr\n\t\t\tend\n\t\tend\n\n\t\treturn network\n\tend", "def set_public_network(vm_name, vm_config, ip) \n \n require_string(vm_name)\n require_vagrant_config(vm_config)\n\n if ip.to_s.empty?\n log_info(\" - skipping public network ip setup\") \n return\n else \n log_info(\" - public network: ip: #{ip}\") \n end\n\n vm_config.vm.network :public_network, ip: ip\n end", "def network(type, **options)\n options = options.dup\n options[:protocol] ||= \"tcp\"\n\n # Convert to symbol to allow strings\n type = type.to_sym\n\n if !options[:id]\n default_id = nil\n\n if type == :forwarded_port\n # For forwarded ports, set the default ID to be the\n # concat of host_ip, proto and host_port. This would ensure Vagrant\n # caters for port forwarding in an IP aliased environment where\n # different host IP addresses are to be listened on the same port.\n default_id = \"#{options[:host_ip]}#{options[:protocol]}#{options[:host]}\"\n end\n\n options[:id] = default_id || SecureRandom.uuid\n end\n\n # Scope the ID by type so that different types can share IDs\n id = options[:id]\n id = \"#{type}-#{id}\"\n\n # Merge in the previous settings if we have them.\n if @__networks.key?(id)\n options = @__networks[id][1].merge(options)\n end\n\n # Merge in the latest settings and set the internal state\n @__networks[id] = [type.to_sym, options]\n end", "def create_esb_server (config, hostname, ip1, ip2)\n config.vm.define hostname do |esb|\n esb.vm.provider \"virtualbox\" do |provider|\n provider.customize [\"modifyvm\", :id, \"--memory\", 2048]\n end\n\n esb.vm.network \"private_network\", ip: ip1\n esb.vm.host_name = hostname\n\n esb.vm.network \"private_network\", ip: ip2\n end\nend", "def manage_address\n enable_dhcp if new_resource.bootproto == 'dhcp' && current_resource.bootproto != 'dhcp'\n return unless new_resource.bootproto == 'static'\n\n config_static unless new_resource.address.nil? || (current_resource.bootproto == 'static' && ip_subnet_exist?)\n config_gateway unless new_resource.gateway.nil? || (current_resource.bootproto == 'static' && current_resource.gateway == new_resource.gateway)\n end", "def networking\n networking = {}\n if advanced_networking.nil? \n networking['eth0'] = {}\n networking['eth0']['dns'] = dns unless dns.nil?\n networking['eth0']['seclists'] = seclists unless (seclists.nil? or seclists[0]==nil)\n networking['eth0']['ipnetwork'] = ipnetwork unless ipnetwork.nil?\n networking['eth0']['nat'] = nat unless nat.nil?\n if networking['eth0']['ipnetwork'] and (not networking['eth0']['nat'].nil?) and (not networking['eth0']['nat'].is_a?(Array))\n # fix to oracle's format or else!\n if networking['eth0']['nat'].start_with?('ipreservation')\n networking['eth0']['nat']=\"network/v1/#{networking['eth0']['nat']}\"\n end\n networking['eth0']['nat']=[networking['eth0']['nat']]\n end\n networking['eth0']['ip'] = ip unless ip.nil?\n networking['eth0']['address'] = mac_address unless ip.nil?\n networking['eth0']['vnic'] = vnic unless vnic.nil?\n networking['eth0']['vnicsets'] = vnicsets unless vnic.nil?\n networking['eth0']['is_default_gateway'] = is_default_gateway unless is_default_gateway.nil?\n networking['eth0']['name_servers'] = name_servers unless name_servers.nil?\n networking['eth0']['search_domains'] = search_domains unless search_domains.nil?\n else\n networking=advanced_networking\n end\n\n\n networking\n end", "def network=(value)\n if value == @defaults['ai.device.network']\n @values.delete 'ai.device.network' if @values.key? 'ai.device.network'\n else\n @values['ai.device.network'] = value\n end\n end", "def network_params\n params.require(:network).permit(:name, :dhcp_enabled, :gateway, :netmask, :server_address, \n :space, :start_address, :dns_zone, :server_dns, :pxefile, \n :default_lease_time, :max_lease_time, :netbios_name_servers, \n :netbios_node_type, :ntp_servers)\n end", "def network_options(host)\n options = {}\n\n if host.key?('ip')\n options[:ip] = host['ip']\n options[:netmask] = host['netmask'] ||= '255.255.255.0'\n else\n options[:type] = 'dhcp'\n end\n\n if host.key?('mac')\n options[:mac] = host['mac'].gsub(/[-:]/, '')\n end\n \n if host.key?('auto_config')\n options[:auto_config] = host['auto_config']\n end\n \n if host.key?('intnet') && host['intnet']\n options[:virtualbox__intnet] = true\n end\n\n options\nend", "def config_lv_define_box2(vm, conf)\n vm.define conf['hostname_box2'] do |box2|\n box2.vm.hostname = conf['hostname_box2']\n box2.vm.box = conf['imagename_box2']\n box2.vm.network :private_network,\n :libvirt__network_name => \"mgmt\",\n :mac => conf['libvirt_mgmt_mac_box2'],\n :ip => conf['libvirt_mgmt_ip_box2'],\n :libvirt__netmask => conf['libvirt_mgmt_netmask_box2'],\n :libvirt__dhcp_enabled => false,\n :libvirt__forward_mode => \"none\",\n :autostart => true\n box2.vm.network :public_network,\n :network_name => \"ext\",\n :ip => conf['libvirt_ext_ip_box2'],\n :netmask => conf['libvirt_ext_netmask_box2'],\n :gateway => conf['libvirt_ext_gateway_box2'],\n :mac => conf['libvirt_ext_mac_box2'],\n :dev => conf['libvirt_dev'],\n :type => conf['libvirt_type'],\n :mode => conf['libvirt_mode']\n box2.vm.network :private_network,\n :libvirt__network_name => \"ceph\",\n :mac => conf['libvirt_ceph_mac_box2'],\n :ip => conf['libvirt_ceph_ip_box2'],\n :libvirt__netmask => conf['libvirt_ceph_netmask_box2'],\n :libvirt__dhcp_enabled => false,\n :libvirt__forward_mode => \"none\",\n :autostart => true\n box2.vm.network :private_network,\n :libvirt__network_name => \"vm_tunnel\",\n :mac => conf['libvirt_tunnel_mac_box2'],\n :ip => conf['libvirt_tunnel_ip_box2'],\n :libvirt__netmask => conf['libvirt_tunnel_netmask_box2'],\n :libvirt__dhcp_enabled => false,\n :libvirt__forward_mode => \"none\",\n :autostart => true\n box2.vm.provider :libvirt do |domain|\n domain.memory = conf['memory_box2']\n domain.cpus = conf['cpus_box2']\n domain.management_network_name = 'vagrantmgmt'\n domain.management_network_address = conf['libvirt_vagrantmgmt_ip_box2']\n domain.management_network_mode = conf['libvirt_mgmt_mode']\n end\n config_provision(box2.vm, conf)\n end\nend", "def network_options(host)\n options = {}\n\n if host.key?('ip')\n options[:ip] = host['ip']\n options[:netmask] = host['netmask'] ||= '255.255.255.0'\n else\n options[:type] = 'dhcp'\n end\n\n options[:mac] = host['mac'].gsub(/[-:]/, '') if host.key?('mac')\n options[:auto_config] = host['auto_config'] if host.key?('auto_config')\n options[:virtualbox__intnet] = true if host.key?('intnet') && host['intnet']\n options\nend", "def network_options(host)\n options = {}\n\n if host.key?('ip')\n options[:ip] = host['ip']\n options[:netmask] = host['netmask'] ||= '255.255.255.0'\n else\n options[:type] = 'dhcp'\n end\n\n options[:mac] = host['mac'].gsub(/[-:]/, '') if host.key?('mac')\n options[:auto_config] = host['auto_config'] if host.key?('auto_config')\n options[:virtualbox__intnet] = true if host.key?('intnet') && host['intnet']\n options\nend", "def set_network\n @network = Network.find(params[:id])\n end", "def set_network\n @network = Network.find(params[:id])\n end", "def set_network\n @network = Network.find(params[:id])\n end", "def set_network\n @network = Network.find(params[:id])\n end", "def set_network\n @network = Network.find(params[:id])\n end", "def setup_extra_nics(action_handler, bootstrap_options, vm, machine, machine_spec)\n networks = bootstrap_options[:network_name]\n networks = [networks] if networks.is_a?(String)\n return if networks.nil? || networks.count < 2\n\n new_nics = vsphere_helper.add_extra_nic(\n action_handler,\n vm_template_for(bootstrap_options),\n bootstrap_options,\n vm\n )\n if is_windows?(vm) && !new_nics.nil? && @vm_helper.open_port?(machine_spec.location['ipaddress'], @vm_helper.port)\n new_nics.each do |nic|\n nic_label = nic.device.deviceInfo.label\n machine.execute_always(\n \"Disable-Netadapter -Name '#{nic_label}' -Confirm:$false\"\n )\n end\n end\n end", "def configure_node(node)\n node.vm.box = \"ubuntu/bionic64\"\n node.vm.box_check_update = true\n node.vm.synced_folder \"./data\", \"/data\"\n node.vm.network \"public_network\"\n node.vm.network \"private_network\", type: \"dhcp\"\n\n node.vm.provider \"virtualbox\" do |v|\n v.gui = false\n v.memory = \"2048\"\n v.cpus = 2\n v.customize [\"modifyvm\", :id, \"--cpuexecutioncap\", \"100\"]\n v.customize [\"modifyvm\", :id, \"--hpet\", \"on\"]\n v.default_nic_type = \"82545EM\"\n end\n\n node.vm.provision \"shell\", inline: <<-SHELL\n apt -y update\n apt -y install build-essential\n apt -y install emacs25-nox\n apt -y install htop\n apt -y install zsh\n apt -y upgrade\n SHELL\nend", "def push_config\n command = ['VBoxManage', 'modifyvm', uid]\n command.concat board.to_params\n nics.each_with_index do |nic, index|\n if nic.nil?\n command.push \"--nic#{index + 1}\", 'none'\n else\n command.concat nic.to_params(index + 1)\n end\n end\n VirtualBox.run_command! command\n \n io_buses.each { |bus| bus.add_to self }\n \n self\n end", "def network_options(host)\n options = {}\n\n if host.has_key?('ip')\n options[:ip] = host['ip']\n options[:netmask] = host['netmask'] ||= '255.255.255.0'\n else\n options[:type] = 'dhcp'\n end\n\n if host.has_key?('mac')\n options[:mac] = host['mac'].gsub(/[-:]/, '')\n end\n\n if host.has_key?('auto_config')\n options[:auto_config] = host['auto_config']\n end\n\n if host.has_key?('intnet') && host['intnet']\n options[:virtualbox__intnet] = true\n end\n\n options\nend", "def create_internal_network_node(network_config)\n builder = Nokogiri::XML::Builder.new do |xml|\n xml.Configuration {\n xml.IpScopes {\n xml.IpScope {\n xml.IsInherited(network_config[:is_inherited] || \"false\")\n xml.Gateway network_config[:gateway]\n xml.Netmask network_config[:netmask]\n xml.Dns1 network_config[:dns1] if network_config[:dns1]\n xml.Dns2 network_config[:dns2] if network_config[:dns2]\n xml.DnsSuffix network_config[:dns_suffix] if network_config[:dns_suffix]\n xml.IsEnabled(network_config[:is_enabled] || true)\n xml.IpRanges {\n xml.IpRange {\n xml.StartAddress network_config[:start_address]\n xml.EndAddress network_config[:end_address]\n }\n }\n }\n }\n xml.FenceMode 'isolated'\n xml.RetainNetInfoAcrossDeployments(network_config[:retain_info] || false)\n }\n end\n builder.doc\n end", "def network_options(host)\n options = {}\n\n if host.has_key?('ip')\n options[:ip] = host['ip']\n options[:netmask] = host['netmask'] ||= '255.255.255.0'\n else\n options[:type] = 'dhcp'\n end\n\n if host.has_key?('mac')\n options[:mac] = host['mac'].gsub(/[-:]/, '')\n end\n\n if host.has_key?('auto_config')\n options[:auto_config] = host['auto_config']\n end\n\n if host.has_key?('intnet') && host['intnet']\n options[:virtualbox__intnet] = true\n end\n\n options\nend", "def define_vbox_subnet_for_nat(vm, natnet)\n run_shell_cmd(\"VBoxManage modifyvm '#{vm} --natnet1 '#{natnet}'\")\n end", "def networks=(should_list)\n delta_hash = Utils.delta_add_remove(should_list, networks)\n return if delta_hash.values.flatten.empty?\n [:add, :remove].each do |action|\n Cisco::Logger.debug(\"networks delta #{@get_args}\\n #{action}: \" \\\n \"#{delta_hash[action]}\")\n delta_hash[action].each do |network, route_map_policy|\n state = (action == :add) ? '' : 'no'\n network = Utils.process_network_mask(network)\n unless route_map_policy.nil?\n route_map = \"route-map #{route_map_policy}\"\n route_policy = \"route-policy #{route_map_policy}\"\n end\n set_args_keys(state: state, network: network, route_map: route_map,\n route_policy: route_policy)\n config_set('bgp_af', 'networks', @set_args)\n end\n end\n end", "def network_options(host)\n options = {}\n\n if host.has_key?('ip')\n options[:ip] = host['ip']\n options[:netmask] = host['netmask'] ||= '255.255.255.0'\n else\n options[:type] = 'dhcp'\n end\n\n if host.has_key?('mac')\n options[:mac] = host['mac'].gsub(/[-:]/, '')\n end\n if host.has_key?('auto_config')\n options[:auto_config] = host['auto_config']\n end\n if host.has_key?('intnet') && host['intnet']\n options[:virtualbox__intnet] = true\n end\n\n options\nend", "def network_options(host)\n options = {}\n\n if host.has_key?('ip')\n options[:ip] = host['ip']\n options[:netmask] = host['netmask'] ||= '255.255.255.0'\n else\n options[:type] = 'dhcp'\n end\n\n if host.has_key?('mac')\n options[:mac] = host['mac'].gsub(/[-:]/, '')\n end\n if host.has_key?('auto_config')\n options[:auto_config] = host['auto_config']\n end\n if host.has_key?('intnet') && host['intnet']\n options[:virtualbox__intnet] = true\n end\n\n options\nend", "def network_options(host)\n options = {}\n\n if host.has_key?('ip')\n options[:ip] = host['ip']\n options[:netmask] = host['netmask'] ||= '255.255.255.0'\n else\n options[:type] = 'dhcp'\n end\n\n if host.has_key?('mac')\n options[:mac] = host['mac'].gsub(/[-:]/, '')\n end\n if host.has_key?('auto_config')\n options[:auto_config] = host['auto_config']\n end\n if host.has_key?('intnet') && host['intnet']\n options[:virtualbox__intnet] = true\n end\n\n options\nend", "def network_params\n params.require(:network).permit(:name, :network, :vlan, :description, addresses_attributes: [:id, :address, :device_id, :network_id, :device, :network])\n end", "def setup_network\n hidden_layers << output_layer\n end", "def create_host_only_network(options)\n end", "def set_aws_network_acl\n @aws_network_acl = AwsNetworkAcl.find(params[:id])\n end", "def configured_network?(network, server)\n case network.type\n when \"FIP_SNOOPING\"\n false\n when \"PXE\"\n server.os_image_type == \"vmware_esxi\" || !server.os_installed?\n else\n true\n end\n end", "def set_network_system\n @network_system = NetworkSystem.find(params[:id])\n end", "def configure(vm_config, number)\n vm_config.vm.network \"public_network\", ip: \"192.168.1.24#{number}\"\n vm_config.vm.host_name = \"level0#{number}.seoshop.net\"\n \n vm_config.vm.provision :puppet do |puppet|\n puppet.manifests_path = \"puppet\"\n puppet.module_path = \"puppet/modules\"\n puppet.manifest_file = \"site.pp\"\n end\nend", "def instance_with_network(name, image_id, network_interfaces, properties: {})\n if ([:SubnetId, :SecurityGroups, :SecurityGroupIds] & properties).any?\n fail \"Instance with NetworkInterfaces #{name} can not contain instance subnet or security_groups\"\n end\n properties[:ImageId] = image_id\n properties[:NetworkInterfaces] = network_interfaces\n if properties[:Tags] && !properties[:Tags].any? { |x| x[:Key] == 'Name' }\n properties[:Tags] << { Key: 'Name', Value: join('-', aws_stack_name, name) }\n end\n options = {\n Type: 'AWS::EC2::Instance',\n Properties: properties\n }\n resource name, options\n name\n end", "def check_network(options, attributes)\n env.logger.info \"Checking network\"\n if options.nil? or options.empty? or options[\"network_type\"].nil? or options[\"network_type\"] == \"network\"\n check_default_network\n attributes[:network_interface_type] = \"network\"\n elsif options[\"network_type\"] == \"bridge\"\n options[\"network_bridge_name\"].nil?\n if options[\"network_bridge_name\"].nil?\n raise Veewee::Error, \"You need to specify a 'network_bridge_name' if you plan to use 'bridge' as network type\"\n else\n attributes[:network_interface_type] = \"bridge\"\n attributes[:network_bridge_name] = \"#{options[\"network_bridge_name\"]}\"\n end\n else\n raise Veewee::Error, \"You specified a 'network_type' that isn't known (#{options[\"network_type\"]})\"\n end\n end", "def ec2_driver_config\n {\n 'name' => 'ec2',\n 'aws_ssh_key_id' => \"#{cookbook_name}-kitchen\",\n 'security_group_ids' => ENV['AWS_SECURITY_GROUP_ID'] ? [ENV['AWS_SECURITY_GROUP_ID']] : [DEFAULT_EC2_SECURITY_GROUP_ID],\n 'subnet_id' => ENV['AWS_SUBNET_ID'] || DEFAULT_EC2_SUBNET_ID,\n # Because kitchen-rackspace also has a thing called flavor_id.\n 'flavor_id' => nil,\n }\n end", "def generate_network_section(vAppId, network, config, type)\n params = {\n 'method' => :get,\n 'command' => \"/vApp/vapp-#{vAppId}/networkConfigSection\"\n }\n\n vapp_networks, headers = send_request(params)\n create_fake_network_node(vapp_networks, network[:name])\n\n if type.to_sym == :internal\n # Create a network configuration based on the config\n new_network = create_internal_network_node(config)\n else\n # Retrieve the requested network and prepare it for customization\n new_network = get_base_network(network[:id])\n end\n\n merge_network_config(vapp_networks, new_network, config)\n end", "def put_network_connection_system_section_vapp(id, network={})\n data = Fog::Generators::Compute::VcloudDirector::VmNetwork.new(network)\n\n request(\n :body => data.generate_xml,\n :expects => 202,\n :headers => {'Content-Type' => 'application/vnd.vmware.vcloud.networkConnectionSection+xml'},\n :method => 'PUT',\n :parser => Fog::ToHashDocument.new,\n :path => \"vApp/#{id}/networkConnectionSection/\"\n )\n end", "def createEc2Instance\n\n instance_descriptor = {\n :image_id => @config[\"image_id\"],\n :key_name => @deploy.ssh_key_name,\n :instance_type => @config[\"size\"],\n :disable_api_termination => true,\n :min_count => 1,\n :max_count => 1\n }\n\n instance_descriptor[:iam_instance_profile] = getIAMProfile\n\n security_groups = myFirewallRules.map { |fw| fw.cloud_id }\n if security_groups.size > 0\n instance_descriptor[:security_group_ids] = security_groups\n else\n raise MuError, \"Didn't get any security groups assigned to be in #{@mu_name}, that shouldn't happen\"\n end\n\n if @config['private_ip']\n instance_descriptor[:private_ip_address] = @config['private_ip']\n end\n\n if [email protected]? and @config.has_key?(\"vpc\")\n subnet = mySubnets.sample\n if subnet.nil?\n raise MuError, \"Got null subnet id out of #{@config['vpc']}\"\n end\n MU.log \"Deploying #{@mu_name} into VPC #{@vpc.cloud_id} Subnet #{subnet.cloud_id}\"\n allowBastionAccess\n instance_descriptor[:subnet_id] = subnet.cloud_id\n end\n\n if [email protected]? and [email protected]?\n instance_descriptor[:user_data] = Base64.encode64(@userdata)\n end\n\n MU::Cloud::AWS::Server.waitForAMI(@config[\"image_id\"], region: @region, credentials: @credentials)\n\n instance_descriptor[:block_device_mappings] = MU::Cloud::AWS::Server.configureBlockDevices(image_id: @config[\"image_id\"], storage: @config['storage'], region: @region, credentials: @credentials)\n\n instance_descriptor[:monitoring] = {enabled: @config['monitoring']}\n\n if @tags and @tags.size > 0\n instance_descriptor[:tag_specifications] = [{\n :resource_type => \"instance\",\n :tags => @tags.keys.map { |k|\n { :key => k, :value => @tags[k] }\n }\n }]\n end\n\n MU.log \"Creating EC2 instance #{@mu_name}\", details: instance_descriptor\n\n instance = resp = nil\n loop_if = Proc.new {\n instance = resp.instances.first if resp and resp.instances\n resp.nil? or resp.instances.nil? or instance.nil?\n }\n\n bad_subnets = []\n mysubnet_ids = if mySubnets\n mySubnets.map { |s| s.cloud_id }\n end\n begin\n MU.retrier([Aws::EC2::Errors::InvalidGroupNotFound, Aws::EC2::Errors::InvalidSubnetIDNotFound, Aws::EC2::Errors::InvalidParameterValue], loop_if: loop_if, loop_msg: \"Waiting for run_instances to return #{@mu_name}\") {\n resp = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).run_instances(instance_descriptor)\n }\n rescue Aws::EC2::Errors::Unsupported => e\n bad_subnets << instance_descriptor[:subnet_id]\n better_subnet = (mysubnet_ids - bad_subnets).sample\n if e.message !~ /is not supported in your requested Availability Zone/ and\n (mysubnet_ids.nil? or mysubnet_ids.empty? or\n mysubnet_ids.size == bad_subnets.size or\n better_subnet.nil? or better_subnet == \"\")\n raise MuError.new e.message, details: mysubnet_ids\n end\n instance_descriptor[:subnet_id] = (mysubnet_ids - bad_subnets).sample\n if instance_descriptor[:subnet_id].nil?\n raise MuError.new \"Specified subnet#{bad_subnets.size > 1 ? \"s do\" : \" does\"} not support instance type #{instance_descriptor[:instance_type]}\", details: bad_subnets\n end\n MU.log \"One or more subnets does not support instance type #{instance_descriptor[:instance_type]}, attempting with #{instance_descriptor[:subnet_id]} instead\", MU::WARN, details: bad_subnets\n retry\n rescue Aws::EC2::Errors::InvalidRequest => e\n MU.log e.message, MU::ERR, details: instance_descriptor\n raise e\n end\n\n MU.log \"#{@mu_name} (#{instance.instance_id}) coming online\"\n\n instance\n end", "def create_aws_instance(config, name, instance_type=\"m3.medium\")\n config.ssh.pty = true\n config.vm.define name do |server|\n server.vm.box = AWS_BOX\n server.vm.provider :aws do |aws, override|\n aws.instance_type = instance_type\n aws.region = AWS_REGION\n aws.ami = AWS_AMI\n aws.keypair_name = AWS_PRIVATE_KEY\n override.ssh.username = AWS_SSH_USERNAME\n override.ssh.private_key_path = AWS_PRIVATE_KEY_PATH\n yield(aws,override,server)\n end\n end\nend", "def network\n TestLab::Utility.network(self.address)\n end", "def network\n @network ||= Network.default\n end", "def configure_instance(\n machine,\n instance_profile,\n provider = 'virtualbox'\n)\n\n\n return false unless lookup_values_yaml(instance_profile, ['providers', provider])\n\n configure_vagrant_box(machine, instance_profile)\n\n configure_instance_hardware(machine, instance_profile)\n\n configure_networking(machine, instance_profile)\n\n configure_communication(machine, instance_profile)\n\n configure_filesystems(machine, instance_profile, provider)\n\n execute_system_commands(machine, instance_profile, provider)\n\nend", "def createEc2Instance\n name = @config[\"name\"]\n node = @config['mu_name']\n\n instance_descriptor = {\n :image_id => @config[\"ami_id\"],\n :key_name => @deploy.ssh_key_name,\n :instance_type => @config[\"size\"],\n :disable_api_termination => true,\n :min_count => 1,\n :max_count => 1\n }\n\n arn = nil\n if @config['generate_iam_role']\n role = @deploy.findLitterMate(name: @config['name'], type: \"roles\")\n s3_objs = [\"#{@deploy.deploy_id}-secret\", \"#{role.mu_name}.pfx\", \"#{role.mu_name}.crt\", \"#{role.mu_name}.key\", \"#{role.mu_name}-winrm.crt\", \"#{role.mu_name}-winrm.key\"].map { |file| \n 'arn:'+(MU::Cloud::AWS.isGovCloud?(@config['region']) ? \"aws-us-gov\" : \"aws\")+':s3:::'+MU.adminBucketName+'/'+file\n }\n role.cloudobj.injectPolicyTargets(\"MuSecrets\", s3_objs)\n\n @config['iam_role'] = role.mu_name\n arn = role.cloudobj.createInstanceProfile\n# @cfm_role_name, @cfm_prof_name\n\n elsif @config['iam_role'].nil?\n raise MuError, \"#{@mu_name} has generate_iam_role set to false, but no iam_role assigned.\"\n end\n if !@config[\"iam_role\"].nil?\n if arn\n instance_descriptor[:iam_instance_profile] = {arn: arn}\n else\n instance_descriptor[:iam_instance_profile] = {name: @config[\"iam_role\"]}\n end\n end\n\n security_groups = []\n if @dependencies.has_key?(\"firewall_rule\")\n @dependencies['firewall_rule'].values.each { |sg|\n security_groups << sg.cloud_id\n }\n end\n\n if security_groups.size > 0\n instance_descriptor[:security_group_ids] = security_groups\n else\n raise MuError, \"Didn't get any security groups assigned to be in #{@mu_name}, that shouldn't happen\"\n end\n\n if !@config['private_ip'].nil?\n instance_descriptor[:private_ip_address] = @config['private_ip']\n end\n\n vpc_id = subnet = nil\n if [email protected]? and @config.has_key?(\"vpc\")\n subnet_conf = @config['vpc']\n subnet_conf = @config['vpc']['subnets'].first if @config['vpc'].has_key?(\"subnets\") and !@config['vpc']['subnets'].empty?\n tag_key, tag_value = subnet_conf['tag'].split(/=/, 2) if !subnet_conf['tag'].nil?\n\n subnet = @vpc.getSubnet(\n cloud_id: subnet_conf['subnet_id'],\n name: subnet_conf['subnet_name'],\n tag_key: tag_key,\n tag_value: tag_value\n )\n if subnet.nil?\n raise MuError, \"Got null subnet id out of #{subnet_conf['vpc']}\"\n end\n MU.log \"Deploying #{node} into VPC #{@vpc.cloud_id} Subnet #{subnet.cloud_id}\"\n punchAdminNAT\n instance_descriptor[:subnet_id] = subnet.cloud_id\n end\n\n if [email protected]? and [email protected]?\n instance_descriptor[:user_data] = Base64.encode64(@userdata)\n end\n\n MU::Cloud::AWS::Server.waitForAMI(@config[\"ami_id\"], region: @config['region'], credentials: @config['credentials'])\n\n # Figure out which devices are embedded in the AMI already.\n image = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_images(image_ids: [@config[\"ami_id\"]]).images.first\n ext_disks = {}\n if !image.block_device_mappings.nil?\n image.block_device_mappings.each { |disk|\n if !disk.device_name.nil? and !disk.device_name.empty? and !disk.ebs.nil? and !disk.ebs.empty?\n ext_disks[disk.device_name] = MU.structToHash(disk.ebs)\n end\n }\n end\n\n configured_storage = Array.new\n cfm_volume_map = {}\n if @config[\"storage\"]\n @config[\"storage\"].each { |vol|\n # Drop the \"encrypted\" flag if a snapshot for this device exists\n # in the AMI, even if they both agree about the value of said\n # flag. Apparently that's a thing now.\n if ext_disks.has_key?(vol[\"device\"])\n if ext_disks[vol[\"device\"]].has_key?(:snapshot_id)\n vol.delete(\"encrypted\")\n end\n end\n mapping, cfm_mapping = MU::Cloud::AWS::Server.convertBlockDeviceMapping(vol)\n configured_storage << mapping\n }\n end\n\n instance_descriptor[:block_device_mappings] = configured_storage\n instance_descriptor[:block_device_mappings].concat(@ephemeral_mappings)\n instance_descriptor[:monitoring] = {enabled: @config['monitoring']}\n\n MU.log \"Creating EC2 instance #{node}\"\n MU.log \"Instance details for #{node}: #{instance_descriptor}\", MU::DEBUG\n#\t\t\t\tif instance_descriptor[:block_device_mappings].empty?\n#\t\t\t\t\tinstance_descriptor.delete(:block_device_mappings)\n#\t\t\t\tend\n\n retries = 0\n begin\n response = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).run_instances(instance_descriptor)\n rescue Aws::EC2::Errors::InvalidGroupNotFound, Aws::EC2::Errors::InvalidSubnetIDNotFound, Aws::EC2::Errors::InvalidParameterValue => e\n if retries < 10\n if retries > 7\n MU.log \"Seeing #{e.inspect} while trying to launch #{node}, retrying a few more times...\", MU::WARN, details: instance_descriptor\n end\n sleep 10\n retries = retries + 1\n retry\n else\n raise MuError, e.inspect\n end\n end\n\n instance = response.instances.first\n MU.log \"#{node} (#{instance.instance_id}) coming online\"\n\n return instance\n\n end", "def configure_interfaces\n node_servers.each do |svinfo|\n gretap_interfaces(svinfo).each do |ifname, ifcfg|\n host_name = svinfo['hostname']\n virtual_addr = virtual_address(ifcfg)\n\n cloudconductor_server_interface \"#{host_name}_#{ifname}\" do\n action :create\n hostname host_name\n if_name ifname\n network ifcfg['network']\n security_groups ifcfg['security_groups']\n virtual_address virtual_addr\n end\n end\n end\nend", "def configure_vm(vm, **opts)\n\n vm.box = opts.fetch(:box, \"bento/ubuntu-16.04\")\n vm.network :private_network, ip: opts[:private_ip]\n\n vm.provider \"virtualbox\" do |vb|\n vb.memory = 4096\n vb.cpus = 2\n end\n \n # Disable default share, because we dont use it\n vm.synced_folder \".\", \"/vagrant\", disabled: true\nend", "def configure_cloud\n message \"Configuring master\"\n build_and_send_config_files_in_temp_directory\n remote_configure_instances\n \n nodes.each do |node|\n node.configure\n end \n end", "def cloud_networks_to_vpc(_options = {})\n return {} if ar_ems.nil?\n\n @cloud_networks_to_vpc ||= string_dropdown(ar_ems.cloud_networks)\n rescue => e\n logger(__method__).ui_exception(e)\n end", "def cleanup_network!\n # Abort if a private network has been defined\n machine.config.vm.networks.each do |cfg|\n return if cfg[0] == :private_network\n end\n machine.communicate.sudo(\"rm -f /etc/nixos/vagrant-network.nix\")\n end", "def set_network\n @network = Network.find(params[:id])\n end", "def private_networks(vm, host)\n if host.has_key?('private_networks')\n private_networks = host['private_networks']\n private_networks.each do |private_network|\n options = {}\n if private_network.key?('ip') && private_network['ip'] != 'dhcp'\n options[:ip] = private_network['ip']\n options[:netmask] = private_network['netmask'] ||= '255.255.255.0'\n else\n options[:type] = 'dhcp'\n end\n options[:mac] = private_network['mac'].gsub(/[-:]/, '') if private_network.key?('mac')\n options[:auto_config] = private_network['auto_config'] if private_network.key?('auto_config')\n options[:virtualbox__intnet] = private_network['intnet'] if private_network.key?('intnet')\n vm.network :private_network, options\n end\n end\nend", "def initialize(network_cloud_props)\n @logger = Bosh::Clouds::Config.logger\n @vip_network = nil\n\n network_cloud_props.networks.each do |net|\n if net.instance_of?(Bosh::AwsCloud::NetworkCloudProps::PublicNetwork)\n cloud_error(\"More than one vip network for '#{net.name}'\") if @vip_network\n @vip_network = net\n end\n end\n end", "def set_aws_connections\n\n @rs_to_aws_cloud_map = {\n 1 => AWS::EC2.new(region: 'us-east-1'),\n 3 => AWS::EC2.new(region: 'us-west-1'),\n 6 => AWS::EC2.new(region: 'us-west-2'),\n 4 => AWS::EC2.new(region: 'ap-southeast-1'),\n 8 => AWS::EC2.new(region: 'ap-southeast-2'),\n 5 => AWS::EC2.new(region: 'ap-northeast-1'),\n 7 => AWS::EC2.new(region: 'sa-east-1'),\n 2 => AWS::EC2.new(region: 'eu-west-1')\n }\nend", "def set_aws_connections\n\n @rs_to_aws_cloud_map = {\n 1 => AWS::EC2.new(region: 'us-east-1'),\n 3 => AWS::EC2.new(region: 'us-west-1'),\n 6 => AWS::EC2.new(region: 'us-west-2'),\n 4 => AWS::EC2.new(region: 'ap-southeast-1'),\n 8 => AWS::EC2.new(region: 'ap-southeast-2'),\n 5 => AWS::EC2.new(region: 'ap-northeast-1'),\n 7 => AWS::EC2.new(region: 'sa-east-1'),\n 2 => AWS::EC2.new(region: 'eu-west-1')\n }\nend", "def add_cloud_subnet_network_ports\n add_collection(network, :cloud_subnet_network_ports) do |builder|\n builder.add_properties(:manager_ref_allowed_nil => %i(cloud_subnet))\n end\n end", "def set_networking\n @networking = Networking.find(params[:id])\n end", "def validate_network_configuration!(network_name, root_options, network_options, driver)\n if root_options[:ip] &&\n driver.network_containing_address(root_options[:ip]) != network_name\n raise Errors::NetworkAddressInvalid,\n address: root_options[:ip],\n network_name: network_name\n end\n if network_options[:subnet] &&\n driver.network_containing_address(network_options[:subnet]) != network_name\n raise Errors::NetworkSubnetInvalid,\n subnet: network_options[:subnet],\n network_name: network_name\n end\n true\n end", "def update!(**args)\n @network = args[:network] if args.key?(:network)\n @no_external_ip_address = args[:no_external_ip_address] if args.key?(:no_external_ip_address)\n @subnetwork = args[:subnetwork] if args.key?(:subnetwork)\n end" ]
[ "0.7394466", "0.7293648", "0.7204996", "0.7027411", "0.70085776", "0.7008117", "0.68236405", "0.6780012", "0.67006415", "0.66699696", "0.66646504", "0.6614899", "0.6601932", "0.6547433", "0.6393244", "0.6375365", "0.63539976", "0.616765", "0.6159904", "0.61547697", "0.61106104", "0.60095143", "0.60078555", "0.59048754", "0.58985347", "0.58361506", "0.5832474", "0.5822085", "0.58153296", "0.5800422", "0.57479084", "0.57211846", "0.5692348", "0.5682974", "0.5677457", "0.5671342", "0.56492925", "0.56492925", "0.56453353", "0.5637815", "0.56372625", "0.56323475", "0.56046724", "0.5602294", "0.55976665", "0.55722517", "0.5561532", "0.5559082", "0.5547523", "0.554636", "0.554636", "0.5544221", "0.5544221", "0.5544221", "0.5544221", "0.5544221", "0.55432504", "0.55264074", "0.55212533", "0.5515669", "0.55152285", "0.55150896", "0.551446", "0.55138385", "0.55108994", "0.55108994", "0.55108994", "0.5486286", "0.5477798", "0.5475364", "0.5474306", "0.54651856", "0.5456733", "0.5443254", "0.5432038", "0.54314035", "0.54269356", "0.54252315", "0.5414412", "0.5398558", "0.5378292", "0.536616", "0.53658813", "0.5365014", "0.5354191", "0.5353239", "0.5348775", "0.5348769", "0.5345485", "0.5335013", "0.5333805", "0.5322733", "0.53225", "0.53222996", "0.53222996", "0.5320435", "0.53052485", "0.52978325", "0.52866435" ]
0.74011207
1
Creates a new EC2 AMI using stemcell image. This method can only be run on an EC2 instance, as image creation involves creating and mounting new EBS volume as local block device.
def create_stemcell(image_path, stemcell_properties) with_thread_name("create_stemcell(#{image_path}...)") do stemcell_properties.merge!(aws_properties['stemcell'] || {}) if stemcell_properties.has_key?('ami') all_ami_ids = stemcell_properties['ami'].values # select the correct image for the configured ec2 client available_image = @ec2_client.images.filter('image-id', all_ami_ids).first raise Bosh::Clouds::CloudError, "Stemcell does not contain an AMI at endpoint (#{@ec2_client.client.endpoint})" unless available_image "#{available_image.id} light" else create_ami_for_stemcell(image_path, stemcell_properties) end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_stemcell(image_path, stemcell_properties)\n with_thread_name(\"create_stemcell(#{image_path}...)\") do\n stemcell_properties.merge!(aws_properties['stemcell'] || {})\n\n if stemcell_properties.has_key?('ami')\n all_ami_ids = stemcell_properties['ami'].values\n\n # select the correct image for the configured ec2 client\n available_image = @ec2_resource.images(\n {\n filters: [{\n name: 'image-id',\n values: all_ami_ids\n }]\n }\n ).first\n raise Bosh::Clouds::CloudError, \"Stemcell does not contain an AMI at endpoint (#{@ec2_resource.client.endpoint})\" unless available_image\n\n \"#{available_image.id} light\"\n else\n create_ami_for_stemcell(image_path, stemcell_properties)\n end\n end\n end", "def create_stemcell(image_path, stemcell_properties)\n with_thread_name(\"create_stemcell(#{image_path}...)\") do\n props = @props_factory.stemcell_props(stemcell_properties)\n\n if props.is_light?\n # select the correct image for the configured ec2 client\n available_image = @ec2_resource.images(\n filters: [{\n name: 'image-id',\n values: props.ami_ids\n }]\n ).first\n raise Bosh::Clouds::CloudError, \"Stemcell does not contain an AMI in region #{@config.aws.region}\" unless available_image\n\n if props.encrypted\n copy_image_result = @ec2_client.copy_image(\n source_region: @config.aws.region,\n source_image_id: props.region_ami,\n name: \"Copied from SourceAMI #{props.region_ami}\",\n encrypted: props.encrypted,\n kms_key_id: props.kms_key_arn\n )\n\n encrypted_image_id = copy_image_result.image_id\n encrypted_image = @ec2_resource.image(encrypted_image_id)\n ResourceWait.for_image(image: encrypted_image, state: 'available')\n\n return encrypted_image_id.to_s\n end\n\n \"#{available_image.id} light\"\n else\n create_ami_for_stemcell(image_path, props)\n end\n end\n end", "def build_vm_image\n options.verbose? ? @@log.level = Logger::DEBUG : @@log.level = Logger::ERROR\n def_constants(guess_os(\"fedora\"))\n\n # Override the machine type to launch if necessary\n $amz_options[:instance_type] = options[:instance_type] if options[:instance_type]\n $amz_options[:block_device_mappings] = {\"/dev/sdb\" => \"ephemeral0\"}\n \n # Establish a new connection\n conn = connect(options.region)\n \n image = nil\n # Create a new builder instance\n if (options.region?nil)\n image = conn.images[AMI[\"us-east-1\"]]\n elsif AMI[options.region].nil?\n puts \"No AMI specified for region:\" + options.region\n exit 1\n else\n image = conn.images[AMI[options.region]]\n end\n\n puts \"Launching AMI: #{image.id} - #{image.name}\"\n instance = launch_instance(image, \"oso-image-builder\", 1, SSH_USER)\n hostname = instance.dns_name\n puts \"Done\"\n puts \"Hostname: #{hostname}\"\n ssh(hostname, 'su - -c \"setenforce 0\"' , 60, false, 1, SSH_USER)\n ssh(hostname, 'su - -c \"yum update -y\"' , 300, false, 1, SSH_USER)\n ssh(hostname, 'su - -c \"yum install -y appliance-tools qemu-img\"' , 60, false, 1, SSH_USER)\n scp_to(hostname, File.expand_path(\"#{__FILE__ }/../templates/openshift-origin.ks\"), '/home/ec2-user/openshift-origin.ks', 60, 5,SSH_USER)\n scp_to(hostname, File.expand_path(\"#{__FILE__ }/../templates/openshift-origin.vmx\"), '/home/ec2-user/openshift-origin.vmx', 60, 5,SSH_USER)\n scp_to(hostname, File.expand_path(\"#{__FILE__ }/../templates/openshift-origin.vbox\"), '/home/ec2-user/openshift-origin.vbox', 60, 5,SSH_USER) \n ssh(hostname, 'su - -c \"mv -f /home/ec2-user/openshift-origin.ks /mnt/\"' , 60, false, 1, SSH_USER)\n ssh(hostname, 'su - -c \"mkdir -p /mnt/tmp/build && mkdir -p /mnt/openshift-origin\"', 60, false, 1,SSH_USER)\n ssh(hostname, 'su - -c \"which VBoxManage 2>&1 > /dev/null || yum install -y http://download.virtualbox.org/virtualbox/4.2.8/VirtualBox-4.2-4.2.8_83876_fedora18-1.x86_64.rpm\"', 60, false, 1, SSH_USER)\n ssh(hostname, 'su - -c \"cd /mnt && appliance-creator -d -n openshift-origin -c openshift-origin.ks ' +\n '--format=vmdk --vmem=1024 --checksum --logfile=build.log --tmpdir=/mnt/tmp/build --cache /mnt/tmp/cache/\"', 2400, false, 1, SSH_USER)\n ssh(hostname, 'su - -c \"' + %{\n /bin/cp -f /mnt/openshift-origin.ks /mnt/openshift-origin/ &&\n /bin/mv -f /home/ec2-user/openshift-origin.v* /mnt/openshift-origin/ && \n mkdir -p /root/.VirtualBox/ &&\n /bin/cp -f /mnt/openshift-origin/openshift-origin.vbox /root/.VirtualBox/openshift-origin.vbox &&\n ln -sf /mnt/openshift-origin/openshift-origin-sda.vmdk /root/.VirtualBox/openshift-origin-sda.vmdk &&\n pushd /root/.VirtualBox/ &&\n VBoxManage registervm openshift-origin.vbox &&\n VBoxManage storageattach \\\\\\\"OpenShift Origin\\\\\\\" --storagectl SATA --type hdd --port 0 --medium openshift-origin-sda.vmdk &&\n /bin/cp -f /root/.VirtualBox/openshift-origin.vbox /mnt/openshift-origin/openshift-origin.vbox && \n popd && \n cd /mnt/openshift-origin && tar zcf openshift-origin.tgz * &&\n VBoxManage unregistervm \\\\\\\"OpenShift Origin\\\\\\\"\n } + '\"' , 2400, false, 1, SSH_USER)\n FileUtils.mkdir_p \"vm/\"\n scp_from(hostname, \"/mnt/openshift-origin/openshift-origin.tgz\", \"vm/\", 1200, SSH_USER)\n begin\n terminate_instance(hostname) if options.terminate?\n rescue\n # suppress termination errors - they have been logged already\n end\n end", "def create_image(params)\n instance_id = params['InstanceId']\n instance_id = instance_id.split('-')[1]\n\n vm = VirtualMachine.new(\n VirtualMachine.build_xml(instance_id),\n @client)\n\n rc = vm.info\n if OpenNebula::is_error?(rc)\n rc.ec2_code = \"InvalidInstanceID.NotFound\"\n return rc\n end\n\n image_id = vm.disk_saveas(1,\n params[\"Name\"],\n OpenNebula::Image::IMAGE_TYPES[0])\n\n # TODO Add AMI Tags\n # TODO A new persistent image should be created for each instance\n\n if OpenNebula::is_error?(image_id)\n return image_id\n end\n\n erb_version = params['Version']\n\n response = ERB.new(File.read(@config[:views]+\"/create_image.erb\"))\n return response.result(binding), 200\n end", "def create options = {}\n resp = case\n when options[:instance_id]\n client.create_image(options)\n when options[:image_location] || options[:root_device_name]\n if kernel = options.delete(:kernel)\n options[:kernel_id] = kernel.id\n end\n if ramdisk = options.delete(:ramdisk)\n options[:ramdisk_id] = ramdisk.id\n end\n options[:block_device_mappings] =\n translate_block_device_mappings(options[:block_device_mappings]) if\n options[:block_device_mappings]\n client.register_image(options)\n else\n raise(ArgumentError,\n \"expected instance_id, image_location, \" +\n \"or root_device_name\")\n end\n Image.new(resp.image_id, :config => config)\n end", "def create_agent_ami\n return unless ami_creation_needed?\n # AMI does not exist\n logger.info(\"Creating agent AMI for #{self.region}...\")\n clean_instance = ec2.instances.create(new_ec2_instance_attrs(base_ami, [find_security_group.id]))\n begin\n perform_instance_checks(clean_instance)\n build_ami(clean_instance)\n rescue Exception => ex\n logger.error(\"Failed to create instance on #{self.region}: #{ex.message}, terminating temporary instance...\")\n raise(ex)\n ensure\n terminate_instance(clean_instance) if clean_instance\n end\n end", "def register_hailstorm_ami(instance)\n new_ami = ec2.images.create(\n name: ami_id,\n instance_id: instance.instance_id,\n description: 'AMI for distributed performance testing with Hailstorm'\n )\n wait_for(\"Hailstorm AMI #{ami_id} to be created\") { new_ami.state == :available }\n raise(Hailstorm::AmiCreationFailure.new(self.region, new_ami.state_reason)) unless new_ami.state == :available\n new_ami.id\n end", "def instance_ec2\n @@cache[:fedora_image] = ImageService::create_image(nil,\n :broker_image_id => 'fedora',\n :provider_image_id => 'ami-bafcf3ce',\n :provider_id => @@cache[:ec2_provider].id,\n )\n @@cache[:instance_ec2] = LaunchService.launch(Seeding[:pool_ec2].id, 'test instance '+Time.now.to_i.to_s, Seeding[:hwp_1].name, Seeding[:fedora_image].broker_image_id,\n #:flavor => 'm1-small',\n :keyname => 'mpovolny'\n )\n end", "def create!\n EC2.clone_interface!(instance.eth0, description)\n tag!\n puts \"Created secondary ENI (#{interface.id})\"\n end", "def create_aws_instance(config, name, instance_type=\"m3.medium\")\n config.ssh.pty = true\n config.vm.define name do |server|\n server.vm.box = AWS_BOX\n server.vm.provider :aws do |aws, override|\n aws.instance_type = instance_type\n aws.region = AWS_REGION\n aws.ami = AWS_AMI\n aws.keypair_name = AWS_PRIVATE_KEY\n override.ssh.username = AWS_SSH_USERNAME\n override.ssh.private_key_path = AWS_PRIVATE_KEY_PATH\n yield(aws,override,server)\n end\n end\nend", "def createEc2Instance\n\t\t name = @server[\"name\"]\n\t\t node = @server['mu_name']\n\t\t\tbegin\n\t\t\t\t@server['iam_role'] = MU::Server.createIAMProfile(\"Server-\"+name, base_profile: @server['iam_role'], extra_policies: @server['iam_policies'])\n\t\t\trescue Aws::EC2::Errors::RequestLimitExceeded => e\n\t\t\t\tsleep 10\n\t\t\t\tretry\n\t\t\tend\n\t\t\t@server['iam_role'] = @server['iam_role']\n\n\t\t\tbegin\n\t\t\t\[email protected]\n\t\t\trescue Aws::EC2::Errors::RequestLimitExceeded => e\n\t\t\t\tsleep 10\n\t\t\t\tretry\n\t\t\tend\n\n\t\t instance_descriptor = {\n\t\t :image_id => @server[\"ami_id\"],\n\t\t :key_name => @deploy.keypairname,\n\t\t :instance_type => @server[\"size\"],\n\t\t :disable_api_termination => true,\n\t\t :min_count => 1,\n\t\t :max_count => 1,\n\t\t\t\t:network_interfaces => [\n\t\t\t\t\t{\n\t\t\t\t\t\t:associate_public_ip_address => name[\"associate_public_ip\"]\n\t\t\t\t\t}\n\t\t\t\t]\n\t\t }\n\t\t\t\n\t\t\tif !@server['private_ip'].nil?\n\t\t\t\tinstance_descriptor[:private_ip_address] = @server['private_ip']\n\t\t\tend\n\n\t\t\tvpc_id=subnet_id=nat_host_name=nat_ssh_user = nil\n\t\t\tsubnet_retries = 0\n\t\t\tif !@server[\"vpc\"].nil?\n\t\t\t\tbegin\n\t\t\t\t\tvpc_id, subnet_ids, nat_host_name, nat_ssh_user = MU::VPC.parseVPC(@server['vpc'])\n\t\t\t\trescue Aws::EC2::Errors::ServiceError => e\n\t\t\t\t\tMU.log e.message, MU::ERR, details: @server\n\t\t\t\t\tif subnet_retries < 5\n\t\t\t\t\t subnet_retries = subnet_retries + 1\n\t\t\t\t\t sleep 15\n\t\t\t\t\t retry\n\t\t\t\t\tend\n\t\t\t\t\traise e\n\t\t\t\tend\n\t\t\t\tsubnet_id = subnet_ids.first\n\t\t\t\tif subnet_id.nil? or subnet_id.empty?\n\t\t\t\t\tMU.log \"Got null Subnet id out of #{@server['vpc']}\", MU::ERR\n\t\t\t\t\traise \"deploy failure\"\n\t\t\t\tend\n\n\t\t\t\tMU.log \"Deploying #{node} into VPC #{vpc_id} Subnet #{subnet_id}\"\n\n\t\t\t\tif !@server[\"vpc\"][\"nat_host_name\"].nil? or !@server[\"vpc\"][\"nat_host_id\"].nil?\n\t\t\t\t\tadmin_sg = MU::Server.punchAdminNAT(@server, node)\n\t\t\t\telse\n\t\t\t\t\tadmin_sg = MU::FirewallRule.setAdminSG(vpc_id: vpc_id, region: @server['region'])\n\t\t\t\tend\n\n\t\t\t\tinstance_descriptor[:subnet_id] = subnet_id\n\t\t\t\tnode_sg = MU::FirewallRule.createEc2SG(\n\t\t\t\t\t\t@server[\"name\"].upcase,\n\t\t\t\t\t\t@server[\"ingress_rules\"],\n\t\t\t\t\t\tdescription: \"SG holes for #{node}\",\n\t\t\t\t\t\tvpc_id: vpc_id,\n\t\t\t\t\t\tregion: @server['region']\n\t\t\t\t)\n\t\t\telse\n\t\t\t\tadmin_sg = MU::FirewallRule.setAdminSG(region: @server['region'])\n\t\t\t\tnode_sg = MU::FirewallRule.createEc2SG(\n\t\t\t\t\t\t@server[\"name\"].upcase,\n\t\t\t\t\t\t@server[\"ingress_rules\"],\n\t\t\t\t\t\tdescription: \"SG holes for #{node}\",\n\t\t\t\t\t\tregion: @server['region']\n\t\t\t\t)\n\t\t\tend\n\t\t\tsecurity_groups = Array.new\n\t\t\tsecurity_groups << admin_sg\n\t\t\tsecurity_groups << node_sg\n\t\t\tif !@server[\"add_firewall_rules\"].nil?\n\t\t\t\t@server[\"add_firewall_rules\"].each { |acl|\n\t\t\t\t\tsg = MU::FirewallRule.find(sg_id: acl[\"rule_id\"], name: acl[\"rule_name\"], region: @server['region'])\n\t\t\t\t\tif sg.nil?\n\t\t\t\t\t\tMU.log \"Couldn't find dependent security group #{acl} for server #{node}\", MU::ERR\n\t\t\t\t\t\traise \"deploy failure\"\n\t\t\t\t\tend\n\t\t\t\t\tsecurity_groups << sg.group_id\n\t\t\t\t}\n\t\t\tend\n\n\t\t\tinstance_descriptor[:security_group_ids] = security_groups\n\n\t\t if [email protected]? and [email protected]?\n\t\t instance_descriptor[:user_data] = Base64.encode64(@userdata)\n\t\t end\n\n\t\t if !@server[\"iam_role\"].nil?\n\t\t instance_descriptor[:iam_instance_profile] = { name: @server[\"iam_role\"]}\n\t\t end\n\n\t\t\tconfigured_storage = Array.new\n\t\t\tif @server[\"storage\"]\n\t\t\t\t@server[\"storage\"].each { |vol|\n\t\t\t\t\tconfigured_storage << MU::Server.convertBlockDeviceMapping(vol)\n\t\t\t\t}\n\t\t\tend\n\t\t\n\t\t\tMU::Server.waitForAMI(@server[\"ami_id\"], region: @server['region'])\n\n\t\t\tinstance_descriptor[:block_device_mappings] = configured_storage\n\t\t\tinstance_descriptor[:block_device_mappings].concat(@ephemeral_mappings)\n\n\t\t\tinstance_descriptor[:monitoring] = { enabled: @server['monitoring'] }\n\n\t\t\tMU.log \"Creating EC2 instance #{node}\"\n\t\t\tMU.log \"Instance details for #{node}: #{instance_descriptor}\", MU::DEBUG\n#\t\t\t\tif instance_descriptor[:block_device_mappings].empty?\n#\t\t\t\t\tinstance_descriptor.delete(:block_device_mappings)\n#\t\t\t\tend\n#pp instance_descriptor[:block_device_mappings]\n\t\t\tretries = 0\n\t\t\tbegin\n\t\t\t\tresponse = MU.ec2(@server['region']).run_instances(instance_descriptor)\n\t\t\trescue Aws::EC2::Errors::InvalidGroupNotFound, Aws::EC2::Errors::InvalidSubnetIDNotFound, Aws::EC2::Errors::InvalidParameterValue, Aws::EC2::Errors::RequestLimitExceeded => e\n\t\t\t\tif retries < 10\n\t\t\t\t\tif retries > 7\n\t\t\t\t\t\tMU.log \"Seeing #{e.inspect} while trying to launch #{node}, retrying a few more times...\", MU::WARN, details: instance_descriptor\n\t\t\t\t\tend\n\t\t\t\t\tsleep 10\n\t\t\t\t\tretries = retries + 1\n\t\t\t\t\tretry\n\t\t\t\telse\n\t\t\t\t\traise e\n\t\t\t\tend\n\t\t\tend\n\n\t\t\tinstance = response.instances.first\n\t\t\tMU.log \"#{node} (#{instance.instance_id}) coming online\"\n\n\n\t\t\treturn instance\n\n\t\tend", "def createEc2Instance\n name = @config[\"name\"]\n node = @config['mu_name']\n\n instance_descriptor = {\n :image_id => @config[\"ami_id\"],\n :key_name => @deploy.ssh_key_name,\n :instance_type => @config[\"size\"],\n :disable_api_termination => true,\n :min_count => 1,\n :max_count => 1\n }\n\n arn = nil\n if @config['generate_iam_role']\n role = @deploy.findLitterMate(name: @config['name'], type: \"roles\")\n s3_objs = [\"#{@deploy.deploy_id}-secret\", \"#{role.mu_name}.pfx\", \"#{role.mu_name}.crt\", \"#{role.mu_name}.key\", \"#{role.mu_name}-winrm.crt\", \"#{role.mu_name}-winrm.key\"].map { |file| \n 'arn:'+(MU::Cloud::AWS.isGovCloud?(@config['region']) ? \"aws-us-gov\" : \"aws\")+':s3:::'+MU.adminBucketName+'/'+file\n }\n role.cloudobj.injectPolicyTargets(\"MuSecrets\", s3_objs)\n\n @config['iam_role'] = role.mu_name\n arn = role.cloudobj.createInstanceProfile\n# @cfm_role_name, @cfm_prof_name\n\n elsif @config['iam_role'].nil?\n raise MuError, \"#{@mu_name} has generate_iam_role set to false, but no iam_role assigned.\"\n end\n if !@config[\"iam_role\"].nil?\n if arn\n instance_descriptor[:iam_instance_profile] = {arn: arn}\n else\n instance_descriptor[:iam_instance_profile] = {name: @config[\"iam_role\"]}\n end\n end\n\n security_groups = []\n if @dependencies.has_key?(\"firewall_rule\")\n @dependencies['firewall_rule'].values.each { |sg|\n security_groups << sg.cloud_id\n }\n end\n\n if security_groups.size > 0\n instance_descriptor[:security_group_ids] = security_groups\n else\n raise MuError, \"Didn't get any security groups assigned to be in #{@mu_name}, that shouldn't happen\"\n end\n\n if !@config['private_ip'].nil?\n instance_descriptor[:private_ip_address] = @config['private_ip']\n end\n\n vpc_id = subnet = nil\n if [email protected]? and @config.has_key?(\"vpc\")\n subnet_conf = @config['vpc']\n subnet_conf = @config['vpc']['subnets'].first if @config['vpc'].has_key?(\"subnets\") and !@config['vpc']['subnets'].empty?\n tag_key, tag_value = subnet_conf['tag'].split(/=/, 2) if !subnet_conf['tag'].nil?\n\n subnet = @vpc.getSubnet(\n cloud_id: subnet_conf['subnet_id'],\n name: subnet_conf['subnet_name'],\n tag_key: tag_key,\n tag_value: tag_value\n )\n if subnet.nil?\n raise MuError, \"Got null subnet id out of #{subnet_conf['vpc']}\"\n end\n MU.log \"Deploying #{node} into VPC #{@vpc.cloud_id} Subnet #{subnet.cloud_id}\"\n punchAdminNAT\n instance_descriptor[:subnet_id] = subnet.cloud_id\n end\n\n if [email protected]? and [email protected]?\n instance_descriptor[:user_data] = Base64.encode64(@userdata)\n end\n\n MU::Cloud::AWS::Server.waitForAMI(@config[\"ami_id\"], region: @config['region'], credentials: @config['credentials'])\n\n # Figure out which devices are embedded in the AMI already.\n image = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_images(image_ids: [@config[\"ami_id\"]]).images.first\n ext_disks = {}\n if !image.block_device_mappings.nil?\n image.block_device_mappings.each { |disk|\n if !disk.device_name.nil? and !disk.device_name.empty? and !disk.ebs.nil? and !disk.ebs.empty?\n ext_disks[disk.device_name] = MU.structToHash(disk.ebs)\n end\n }\n end\n\n configured_storage = Array.new\n cfm_volume_map = {}\n if @config[\"storage\"]\n @config[\"storage\"].each { |vol|\n # Drop the \"encrypted\" flag if a snapshot for this device exists\n # in the AMI, even if they both agree about the value of said\n # flag. Apparently that's a thing now.\n if ext_disks.has_key?(vol[\"device\"])\n if ext_disks[vol[\"device\"]].has_key?(:snapshot_id)\n vol.delete(\"encrypted\")\n end\n end\n mapping, cfm_mapping = MU::Cloud::AWS::Server.convertBlockDeviceMapping(vol)\n configured_storage << mapping\n }\n end\n\n instance_descriptor[:block_device_mappings] = configured_storage\n instance_descriptor[:block_device_mappings].concat(@ephemeral_mappings)\n instance_descriptor[:monitoring] = {enabled: @config['monitoring']}\n\n MU.log \"Creating EC2 instance #{node}\"\n MU.log \"Instance details for #{node}: #{instance_descriptor}\", MU::DEBUG\n#\t\t\t\tif instance_descriptor[:block_device_mappings].empty?\n#\t\t\t\t\tinstance_descriptor.delete(:block_device_mappings)\n#\t\t\t\tend\n\n retries = 0\n begin\n response = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).run_instances(instance_descriptor)\n rescue Aws::EC2::Errors::InvalidGroupNotFound, Aws::EC2::Errors::InvalidSubnetIDNotFound, Aws::EC2::Errors::InvalidParameterValue => e\n if retries < 10\n if retries > 7\n MU.log \"Seeing #{e.inspect} while trying to launch #{node}, retrying a few more times...\", MU::WARN, details: instance_descriptor\n end\n sleep 10\n retries = retries + 1\n retry\n else\n raise MuError, e.inspect\n end\n end\n\n instance = response.instances.first\n MU.log \"#{node} (#{instance.instance_id}) coming online\"\n\n return instance\n\n end", "def createEc2Instance\n\n instance_descriptor = {\n :image_id => @config[\"image_id\"],\n :key_name => @deploy.ssh_key_name,\n :instance_type => @config[\"size\"],\n :disable_api_termination => true,\n :min_count => 1,\n :max_count => 1\n }\n\n instance_descriptor[:iam_instance_profile] = getIAMProfile\n\n security_groups = myFirewallRules.map { |fw| fw.cloud_id }\n if security_groups.size > 0\n instance_descriptor[:security_group_ids] = security_groups\n else\n raise MuError, \"Didn't get any security groups assigned to be in #{@mu_name}, that shouldn't happen\"\n end\n\n if @config['private_ip']\n instance_descriptor[:private_ip_address] = @config['private_ip']\n end\n\n if [email protected]? and @config.has_key?(\"vpc\")\n subnet = mySubnets.sample\n if subnet.nil?\n raise MuError, \"Got null subnet id out of #{@config['vpc']}\"\n end\n MU.log \"Deploying #{@mu_name} into VPC #{@vpc.cloud_id} Subnet #{subnet.cloud_id}\"\n allowBastionAccess\n instance_descriptor[:subnet_id] = subnet.cloud_id\n end\n\n if [email protected]? and [email protected]?\n instance_descriptor[:user_data] = Base64.encode64(@userdata)\n end\n\n MU::Cloud::AWS::Server.waitForAMI(@config[\"image_id\"], region: @region, credentials: @credentials)\n\n instance_descriptor[:block_device_mappings] = MU::Cloud::AWS::Server.configureBlockDevices(image_id: @config[\"image_id\"], storage: @config['storage'], region: @region, credentials: @credentials)\n\n instance_descriptor[:monitoring] = {enabled: @config['monitoring']}\n\n if @tags and @tags.size > 0\n instance_descriptor[:tag_specifications] = [{\n :resource_type => \"instance\",\n :tags => @tags.keys.map { |k|\n { :key => k, :value => @tags[k] }\n }\n }]\n end\n\n MU.log \"Creating EC2 instance #{@mu_name}\", details: instance_descriptor\n\n instance = resp = nil\n loop_if = Proc.new {\n instance = resp.instances.first if resp and resp.instances\n resp.nil? or resp.instances.nil? or instance.nil?\n }\n\n bad_subnets = []\n mysubnet_ids = if mySubnets\n mySubnets.map { |s| s.cloud_id }\n end\n begin\n MU.retrier([Aws::EC2::Errors::InvalidGroupNotFound, Aws::EC2::Errors::InvalidSubnetIDNotFound, Aws::EC2::Errors::InvalidParameterValue], loop_if: loop_if, loop_msg: \"Waiting for run_instances to return #{@mu_name}\") {\n resp = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).run_instances(instance_descriptor)\n }\n rescue Aws::EC2::Errors::Unsupported => e\n bad_subnets << instance_descriptor[:subnet_id]\n better_subnet = (mysubnet_ids - bad_subnets).sample\n if e.message !~ /is not supported in your requested Availability Zone/ and\n (mysubnet_ids.nil? or mysubnet_ids.empty? or\n mysubnet_ids.size == bad_subnets.size or\n better_subnet.nil? or better_subnet == \"\")\n raise MuError.new e.message, details: mysubnet_ids\n end\n instance_descriptor[:subnet_id] = (mysubnet_ids - bad_subnets).sample\n if instance_descriptor[:subnet_id].nil?\n raise MuError.new \"Specified subnet#{bad_subnets.size > 1 ? \"s do\" : \" does\"} not support instance type #{instance_descriptor[:instance_type]}\", details: bad_subnets\n end\n MU.log \"One or more subnets does not support instance type #{instance_descriptor[:instance_type]}, attempting with #{instance_descriptor[:subnet_id]} instead\", MU::WARN, details: bad_subnets\n retry\n rescue Aws::EC2::Errors::InvalidRequest => e\n MU.log e.message, MU::ERR, details: instance_descriptor\n raise e\n end\n\n MU.log \"#{@mu_name} (#{instance.instance_id}) coming online\"\n\n instance\n end", "def store_image instance, tags\n begin\n \n puts \"waiting 2 minutes before starting to take the image...\"\n sleep 120\n puts \"creating image...\"\n \n image = @ec2.images.create( \n :instance_id => instance.id,\n :no_reboot => true,\n :description => \"automaticaly created #{tags[ 'image_type' ]} image\",\n :name => \"#{tags[ 'image_type' ]} #{Digest::SHA1.hexdigest tags.inspect}\" )\n \n wait_for_image image\n \n tags.each do | key, value |\n image.add_tag( key, :value => value )\n end \n ensure\n stop_instance instance\n end\n end", "def create # rubocop:disable Metrics/AbcSize\n inst_details = AttrFinder.new(@instanceparameters)\n inst_details.options = @options\n inst_details.validate = @validate\n inst_details.function = 'server'\n BmcAuthenticate.new(@options)\n request = OracleBMC::Core::Models::LaunchInstanceDetails.new\n ssh_public_key = @instanceparameters['server']['ssh-key']\n request.availability_domain = inst_details.ad\n request.compartment_id = inst_details.compartment\n request.display_name = @instanceparameters['server']['display_name']\n request.image_id = inst_details.image\n request.shape = @instanceparameters['server']['shape']\n request.subnet_id = inst_details.subnet\n request.metadata = { 'ssh_authorized_keys' => ssh_public_key }\n api = OracleBMC::Core::ComputeClient.new\n response = api.launch_instance(request)\n @instance_id = response.data.id\n compartment(inst_details.compartment)\n running_instance = api.get_instance(@instance_id).wait_until(:lifecycle_state,\n OracleBMC::Core::Models::Instance::LIFECYCLE_STATE_RUNNING,\n max_interval_seconds: 5, max_wait_seconds: 300)\n if @instanceparameters['server']['attachments']\n @instanceparameters['server']['attachments'].each do |vol|\n attach(@instance_id, vol['volume'])\n end\n end\n running_instance\n end", "def build_ami(instance)\n provision(instance)\n logger.info { \"Finalizing changes for #{self.region} AMI...\" }\n self.agent_ami = register_hailstorm_ami(instance)\n logger.info { \"New AMI##{self.agent_ami} on #{self.region} created successfully, cleaning up...\" }\n end", "def create_instances\n min_count = max_count = @bs.number_of_nodes\n puts \"\\nCreating #{max_count} on-demand instance(s)\"\n options = {\n 'ClientToken' => generate_token,\n 'KeyName' => Chef::Config[:knife][:aws_ssh_key_id],\n 'InstanceType' => @bs.flavor,\n 'SubnetId' => @bs[:novpc] ? nil : @bs.subnet_id,\n 'Placement.AvailabilityZone' => @bs.mixins.az.data,\n 'SecurityGroupId' => @bs.mixins.sg.data\n }\n options['EbsOptimized'] = !! @bs[:ebs_optimized]\n\n ## REVIEW\n if ami.root_device_type == \"ebs\"\n ami_map = ami.block_device_mapping.first\n block_device_mapping = {\n 'DeviceName' => ami_map['deviceName'],\n 'Ebs.VolumeSize' => ami_map['volumeSize'].to_s,\n 'Ebs.DeleteOnTermination' => ami_map['deleteOnTermination']\n }\n options['BlockDeviceMapping'] = [block_device_mapping]\n end\n\n ## Optionally only include mapped devices\n ## This way we get all of the ephemeral drives, some unmapped however\n if @bs.mixins.volume.data[:ephemeral_available]\n ephmap = @bs.mixins.volume.data.ephemeral_available.each_with_index.map do |d,i|\n {\n 'VirtualName' => \"ephemeral#{i}\",\n 'DeviceName' => d\n }\n end\n options['BlockDeviceMapping'].concat( ephmap )\n end\n\n if (max_count == 1) and @bs[:private_ip_address]\n options['PrivateIpAddress'] = @bs.private_ip_address\n puts \"Assigning IP ADDRESS : #{options['PrivateIpAddress']}\"\n end\n\n if Chef::Config[:knife][:aws_user_data]\n begin\n options['UserData']= File.read(Chef::Config[:knife][:aws_user_data])\n rescue\n ui.warn(\"Cannot read #{Chef::Config[:knife][:aws_user_data]}:\"\\\n \" #{$!.inspect}. Ignoring option.\")\n end\n end\n\n # -----------------------------------------------------------------\n tries = 5\n print_table(options, 'Launch Config')\n begin\n puts \"\\nSending request...\"\n response = connection.run_instances(@bs.image, min_count,\n max_count, options)\n ui.msg(response.inspect)\n rescue Exception => e\n ui.warn(\"#{e.message}\\nException creating instances\")\n if (tries -= 1) <= 0\n ui.warn(\"\\n\\nMax tries reached. Exiting.\\n\\n\")\n exit 1\n else\n ui.msg(\"Trying again.\\n\")\n retry\n end\n end\n # now we have our servers\n instances = response.body['instancesSet']\n # select only instances that have instanceId key and collect those ids\n # into an array\n @bs[:instance_ids] =\n instances.select {|i| i.has_key?('instanceId')}.collect do |i|\n i['instanceId']\n end\n\n puts \"\\nNumber of instances started: #{@bs.instance_ids.size}\\n\"\n sleep 10\n puts \"Getting servers..\"\n # collect an array of servers retrieved based on the instance ids we\n # obtained above\n @bs[:servers] = @bs.instance_ids.collect do |id|\n begin\n server = connection.servers.get(id)\n rescue Exception => e\n sleep 7\n retry\n end\n raise Ec2Error.new(\"server #{id} was nil\") if server.nil?\n server\n end\n end", "def launch\n puts \"==> Creating EC2 instance...\"\n\n @instance = @aws_ec2.instances.create( @config.env.merge( { \"key_name\" => key_name, \"security_groups\" => [security_group_name] } ) )\n @instance.tag(\"environment\", {value: @config.environment})\n\n while @instance.status == :pending\n print \".\"\n sleep 2\n end\n\n # Sleep for 30 more seconds\n 15.times do\n print \".\"\n sleep 2\n end\n puts \".\" # new line\n\n puts \"==> Successfully created EC2 instance '#{@instance.id}'\"\n end", "def aws_instance_create(opts)\n AWS::EC2::InstanceCollection.new.create(\n image_id: Rails.configuration.x.aws[Rails.configuration.x.aws['region']][\"ami_#{self.os}\"], \n private_ip_address: self.ip_address,\n key_name: Rails.configuration.x.aws['ec2_key_pair_name'],\n user_data: self.generate_init,\n instance_type: \"t2.small\",\n subnet: self.subnet.driver_id\n )\n end", "def create_stemcell(image_path, cloud_properties)\n with_thread_name(\"create_stemcell(#{image_path}...)\") do\n stemcell_creator = StemcellCreator.new(@logger, @openstack, cloud_properties)\n stemcell = stemcell_creator.create(image_path, @stemcell_public_visibility)\n stemcell.id\n end\n end", "def create_stemcell(image_path, cloud_properties)\n with_thread_name(\"create_stemcell(#{image_path})\") do\n begin\n @logger.debug(\"Image Path: #{image_path}\")\n @logger.debug(\"Cloud properties: #{cloud_properties}\")\n Dir.mktmpdir do |tmp_dir|\n image_name = \"#{cloud_properties['name']}/\" \\\n \"#{cloud_properties['version']}\"\n @logger.debug(\"Image name: #{image_name}\")\n image_location = unpack_image(tmp_dir, image_path)\n stemcell_id = @image_manager.create_image(image_name,\n 'DISK_IMAGE',\n @container_uuid,\n image_location)\n @logger.debug(\"Stemcell ID is #{stemcell_id}\")\n stemcell_id\n end\n rescue => e\n @logger.error(e)\n cloud_error(e.message)\n end\n end\n end", "def create_ec2_instance(attrs)\n instance = ec2.instances.create(attrs)\n perform_instance_checks(instance)\n instance\n end", "def spin_up_instance ami_name, vpc_id, key_name, security_group_id, subnet_id, instance_type = \"t2.micro\"\n resp = client.run_instances({\n dry_run: false,\n image_id: ami_name, # required\n min_count: 1, # required\n max_count: 1, # required\n key_name: key_name,\n instance_type: instance_type, # accepts t1.micro, t2.nano, t2.micro, t2.small, t2.medium, t2.large, m1.small, m1.medium, m1.large, m1.xlarge, m3.medium, m3.large, m3.xlarge, m3.2xlarge, m4.large, m4.xlarge, m4.2xlarge, m4.4xlarge, m4.10xlarge, m4.16xlarge, m2.xlarge, m2.2xlarge, m2.4xlarge, cr1.8xlarge, r3.large, r3.xlarge, r3.2xlarge, r3.4xlarge, r3.8xlarge, x1.16xlarge, x1.32xlarge, i2.xlarge, i2.2xlarge, i2.4xlarge, i2.8xlarge, hi1.4xlarge, hs1.8xlarge, c1.medium, c1.xlarge, c3.large, c3.xlarge, c3.2xlarge, c3.4xlarge, c3.8xlarge, c4.large, c4.xlarge, c4.2xlarge, c4.4xlarge, c4.8xlarge, cc1.4xlarge, cc2.8xlarge, g2.2xlarge, g2.8xlarge, cg1.4xlarge, p2.xlarge, p2.8xlarge, p2.16xlarge, d2.xlarge, d2.2xlarge, d2.4xlarge, d2.8xlarge\n monitoring: {\n enabled: true, # required\n },\n network_interfaces: [\n {\n subnet_id: subnet_id,\n groups: [security_group_id],\n device_index: 0,\n associate_public_ip_address: true\n }\n ],\n\n instance_initiated_shutdown_behavior: \"stop\", # accepts stop, terminate\n })\n\n error \"Error starting EC2 instance #{resp.inspect}\" if resp.instances.nil? || resp.instances.size == 0\n\n return resp.instances[0]\n end", "def create(config)\n PlatformOps::Utils.validated_config config, %i(ami_id ssh_user)\n\n @ami_id = config[:ami_id]\n @ssh_user = config[:ssh_user]\n @ingress_cidrs = combine_ingress_addresses(config[:ingress_ip], config[:ingress_cidrs])\n @ssh_public_key_path = config[:ssh_public_key] || '~/.ssh/id_rsa.pub'\n @ssh_private_key_path = config[:ssh_private_key] || '~/.ssh/id_rsa'\n @ssh_poll_private_ip = config[:ssh_poll_private_ip]\n @security_group_connections = config[:security_group_connections] || []\n @eip_allocation_id = config[:eip_allocation_id]\n @instance_type = config[:instance_type] || 't2.medium'\n @cidr = config[:cidr]\n\n begin\n route_table_id = create_route_table\n subnet_id = create_subnet(route_table_id, @cidr)\n security_group_id = create_security_group(security_group_connections)\n instance_id = create_instance(subnet_id, security_group_id)\n instance = wait_for_instance(instance_id)\n associate_eip(instance_id) if @eip_allocation_id\n instance\n rescue Interrupt, StandardError => e\n logger.error e\n\n destroy\n\n raise\n end\n end", "def create_image(options)\n data = JSON.generate(:createImage => options)\n response = @compute.connection.csreq(\"POST\",@svrmgmthost,\"#{@svrmgmtpath}/servers/#{URI.encode(self.id.to_s)}/action\",@svrmgmtport,@svrmgmtscheme,{'content-type' => 'application/json'},data)\n OpenStack::Exception.raise_exception(response) unless response.code.match(/^20.$/)\n image_id = response[\"Location\"].split(\"/images/\").last\n OpenStack::Compute::Image.new(@compute, image_id)\n end", "def create_vm(agent_id, stemcell_id, vm_type, network_spec, disk_locality = nil, environment = nil)\n with_thread_name(\"create_vm(#{agent_id}, ...)\") do\n # do this early to fail fast\n\n target_groups = vm_type.fetch('lb_target_groups', [])\n if target_groups.length > 0\n alb_accessible?\n end\n\n requested_elbs = vm_type.fetch('elbs', [])\n if requested_elbs.length > 0\n elb_accessible?\n end\n\n stemcell = StemcellFinder.find_by_id(@ec2_resource, stemcell_id)\n\n begin\n instance, block_device_agent_info = @instance_manager.create(\n agent_id,\n stemcell.image_id,\n vm_type,\n network_spec,\n (disk_locality || []),\n environment,\n options,\n )\n\n target_groups.each do |target_group_name|\n target_group = LBTargetGroup.new(client: @alb_client, group_name: target_group_name)\n target_group.register(instance.id)\n end\n\n requested_elbs.each do |requested_elb_name|\n requested_elb = ClassicLB.new(client: @elb_client, elb_name: requested_elb_name)\n requested_elb.register(instance.id)\n end\n\n logger.info(\"Creating new instance '#{instance.id}'\")\n\n NetworkConfigurator.new(network_spec).configure(@ec2_resource, instance)\n\n registry_settings = initial_agent_settings(\n agent_id,\n network_spec,\n environment,\n stemcell.root_device_name,\n block_device_agent_info\n )\n registry.update_settings(instance.id, registry_settings)\n\n instance.id\n rescue => e # is this rescuing too much?\n logger.error(%Q[Failed to create instance: #{e.message}\\n#{e.backtrace.join(\"\\n\")}])\n instance.terminate(fast_path_delete?) if instance\n raise e\n end\n end\n end", "def set_ami_spec\n ami_arch = @@ec2.describe_images([self.ami_id]).first\n if (ami_arch[:aws_architecture] == \"i386\" && self.ami_spec.blank? && self.spot_price.blank?)\n self.ami_spec = \"c1.medium\"\n self.spot_price = 0.50\n elsif (ami_arch[:aws_architecture] == \"x86_64\" && self.ami_spec.blank? && self.spot_price.blank?)\n self.ami_spec = \"m1.large\"\n self.spot_price = 1.00\n end\n end", "def configure_instance(aws_node, private_ip_address, node_name, node_config)\n # Spin up EC2 instances\n aws_node.vm.provider :aws do |ec2, override|\n ec2.keypair_name = KEYPAIR_NAME\n ec2.access_key_id = ACCESS_KEY_ID\n ec2.secret_access_key = SECRET_ACCESS_KEY\n ec2.security_groups = SECURITY_GROUPS\n override.ssh.private_key_path = PRIVATE_KEY_PATH\n\n # read region, ami etc from json.\n ec2.region = AWS_CFG['region']\n ec2.subnet_id = AWS_CFG['subnet_id']\n ec2.availability_zone = AWS_CFG['region'] + AWS_CFG['availability_zone']\n ec2.ami = node_config['ami_id']\n ec2.instance_type = node_config['instance_type']\n ec2.private_ip_address = private_ip_address\n ec2.associate_public_ip = true\n\n if node_config.key?('volume_size')\n # Size in GB\n # (untested)\n ec2.block_device_mapping = [{ 'DeviceName' => '/dev/sda1', 'Ebs.VolumeSize' => node_config['volume_size'] }]\n end\n\n override.ssh.username = AWS_CFG['ssh_username']\n\n # Collect tags (can't be longer than 250 chars)\n ec2.tags = ({})\n ec2.tags['Name'] = node_name[0..245]\n ec2.tags['Type'] = 'Hyperledger'\n ec2.tags['Version'] = VERSION\n ec2.tags['Fabric'] = node_config['fabric'].map { |f| f['role'] }.join(',')[0..245]\n end\nend", "def create_vm(agent_id, stemcell_id, vm_type, network_spec, disk_locality = nil, environment = nil)\n with_thread_name(\"create_vm(#{agent_id}, ...)\") do\n # do this early to fail fast\n stemcell = StemcellFinder.find_by_id(@ec2_client, stemcell_id)\n\n begin\n instance, block_device_agent_info = @instance_manager.create(\n agent_id,\n stemcell.image_id,\n vm_type,\n network_spec,\n (disk_locality || []),\n environment,\n options,\n )\n\n logger.info(\"Creating new instance '#{instance.id}'\")\n\n NetworkConfigurator.new(network_spec).configure(@ec2_client, instance)\n\n registry_settings = initial_agent_settings(\n agent_id,\n network_spec,\n environment,\n stemcell.root_device_name,\n block_device_agent_info\n )\n registry.update_settings(instance.id, registry_settings)\n\n instance.id\n rescue => e # is this rescuing too much?\n logger.error(%Q[Failed to create instance: #{e.message}\\n#{e.backtrace.join(\"\\n\")}])\n instance.terminate(fast_path_delete?) if instance\n raise e\n end\n end\n end", "def index\n\n credentials = Aws::Credentials.new('AKIAJ2JD2EKKFVDSR37A', 'cnZUnzuyYPqUevEPb045VJUnW55VR+rUCQrplzd/')\n ec2 = Aws::EC2::Client.new(\n region: \"us-east-1\",\n credentials: credentials\n )\n #i = ec2.instances.create(:image_id => \"ami-e3106686\")\n resp = ec2.run_instances({\n dry_run: true,\n image_id: \"ami-e3106686\", # required\n min_count: 1, # required\n max_count: 1, # required\n instance_type: \"t1.micro\", # accepts t1.micro, m1.small, m1.medium, m1.large, m1.xlarge, m3.medium, m3.large, m3.xlarge, m3.2xlarge, m4.large, m4.xlarge, m4.2xlarge, m4.4xlarge, m4.10xlarge, t2.micro, t2.small, t2.medium, t2.large, m2.xlarge, m2.2xlarge, m2.4xlarge, cr1.8xlarge, i2.xlarge, i2.2xlarge, i2.4xlarge, i2.8xlarge, hi1.4xlarge, hs1.8xlarge, c1.medium, c1.xlarge, c3.large, c3.xlarge, c3.2xlarge, c3.4xlarge, c3.8xlarge, c4.large, c4.xlarge, c4.2xlarge, c4.4xlarge, c4.8xlarge, cc1.4xlarge, cc2.8xlarge, g2.2xlarge, cg1.4xlarge, r3.large, r3.xlarge, r3.2xlarge, r3.4xlarge, r3.8xlarge, d2.xlarge, d2.2xlarge, d2.4xlarge, d2.8xlarge\n placement: {\n tenancy: \"default\", # accepts default, dedicated\n },\n\n block_device_mappings: [\n {\n virtual_name: \"String\",\n device_name: \"String\",\n ebs: {\n snapshot_id: \"String\",\n volume_size: 1,\n delete_on_termination: true,\n volume_type: \"standard\", # accepts standard, io1, gp2\n iops: 1,\n encrypted: true,\n },\n\n },\n ],\n monitoring: {\n enabled: true, # required\n },\n disable_api_termination: true,\n instance_initiated_shutdown_behavior: \"stop\", # accepts stop, terminate\n network_interfaces: [\n {\n delete_on_termination: true,\n private_ip_addresses: [\n {\n private_ip_address: \"172.31.2.177\", # required\n primary: true,\n },\n ],\n secondary_private_ip_address_count: 1,\n associate_public_ip_address: true,\n },\n ],\n ebs_optimized: true,\n })\n @ec2_instances = Ec2Instance.all\n end", "def create_stemcell(image_path, cloud_properties)\n @telemetry_manager.monitor('initialize') do\n _init_azure\n end\n with_thread_name(\"create_stemcell(#{image_path}, #{cloud_properties})\") do\n extras = {\n 'stemcell' => \"#{cloud_properties.fetch('name', 'unknown_name')}-#{cloud_properties.fetch('version', 'unknown_version')}\"\n }\n @telemetry_manager.monitor('create_stemcell', extras: extras) do\n if has_light_stemcell_property?(cloud_properties)\n @light_stemcell_manager.create_stemcell(cloud_properties)\n elsif @use_managed_disks\n @stemcell_manager2.create_stemcell(image_path, cloud_properties)\n else\n @stemcell_manager.create_stemcell(image_path, cloud_properties)\n end\n end\n end\n end", "def allocate_image(action_handler, image_spec, image_options, machine_spec, machine_options)\n raise \"#{self.class} does not implement create_image\"\n end", "def create_server\n return nil if created? # only create a server if it does not already exist\n\n fog_description = fog_description_for_launch\n Chef::Log.debug(JSON.generate(fog_description)) # .dup.tap{|hsh| hsh[:user_data] = \"...\" }\n @fog_server = ClusterChef.connection.servers.create(fog_description)\n end", "def create\n file = Tempfile.new(\"oneimage-#{resource[:name]}\")\n File.chmod(0644, file.path)\n\n template = ERB.new <<-EOF\nNAME = \"<%= resource[:name] %>\"\n<% if resource[:description] %>DESCRIPTION = \"<%= resource[:description] %>\"<% end%>\n<% if resource[:type] %>TYPE = <%= resource[:type].upcase %><% end%>\n<% if resource[:persistent] %>PERSISTENT = <%= resource[:persistent] %><% end%>\n<% if resource[:dev_prefix] %>DEV_PREFIX = \"<%= resource[:dev_prefix] %>\"<% end%>\n<% if resource[:driver] %>DRIVER = \"<%= resource[:driver] %>\"<% end %>\n<% if resource[:path] %>PATH = <%= resource[:path] %><% end%>\n<% if resource[:source] %>SOURCE = <%= resource[:source] %><% end%>\n<% if resource[:fstype] %>FSTYPE = <%= resource[:fstype] %><% end%>\n<% if resource[:size] %>SIZE = <%= resource[:size] %><% end%>\nEOF\n\n tempfile = template.result(binding)\n self.debug \"Creating image using tempfile: #{tempfile}\"\n file.write(tempfile)\n file.close\n output = \"oneimage create -d #{resource[:datastore]} #{file.path} \", self.class.login\n `#{output}`\n end", "def create_instance(credentials, image_id, opts)\n new_vapp = nil\n vapp_opts = {} #assemble options to pass to Fog::Terremark::Real.instantiate_vapp_template\n terremark_hwp = hardware_profiles(credentials, {:name => 'default'}).first #sanity check values against default\n name = opts[:name]\n if not name\n name = \"inst#{Time.now.to_i}\"\n end\n if name.length > USER_NAME_MAX\n raise \"Parameter name must be #{USER_NAME_MAX} characters or less\"\n end\n unless ( (terremark_hwp.include?(:cpu, opts[:hwp_cpu].to_i)) &&\n (terremark_hwp.include?(:memory, opts[:hwp_memory].to_i)) ) then\n raise Deltacloud::Exceptions::ValidationFailure.new(\n StandardError.new(\"Error with cpu and/or memory values. you said cpu->#{opts[:hwp_cpu]} and mem->#{opts[:hwp_memory]}\")\n )\n end\n vapp_opts['cpus'] = opts[:hwp_cpu]\n vapp_opts['memory'] = opts[:hwp_memory]\n safely do\n terremark_client = new_client(credentials)\n#######\n#FIXME# what happens if there is an issue getting the new vapp id? (eg even though created succesfully)\n#######\n vapp_id = terremark_client.instantiate_vapp_template(name, image_id, vapp_opts).body['href'].split('/').last\n new_vapp = terremark_client.get_vapp(vapp_id)\n return convert_instance(new_vapp, terremark_client, credentials.user) #return an Instance object\n end\n end", "def initialize(flavor = 1, image = 112, name = 'my_server', instance = nil)\n region = config.get(:region)\n connection = self.class.find_service(region)\n\n # Get our SSH key to attach it to the server.\n instance ? @node = instance : @node = build(connection, flavor, image, name)\n end", "def initialize(aki, sak, region, prefix, instance_id = open(\"http://169.254.169.254/latest/meta-data/instance-id\").read)\n\n @instance_id = instance_id\n @prefix = prefix\n\n @compute = Fog::Compute.new({:provider => 'AWS', :aws_access_key_id => aki, :aws_secret_access_key => sak, :region => region })\n end", "def create\n\t\tregion = resource[:availability_zone].to_s.gsub(/.$/,'') \n\t\tcompute = Fog::Compute.new(:provider => 'aws', :region => \"#{region}\")\n\t\tprint \"ebsvol[aws]->create: Region is #{region}\\n\" if $debug\n\t\tprint \"ebsvol[aws]->create: Availability_zone is #{resource[:availability_zone]}\\n\" if $debug\n\t\t# create the requested volume\n\t\tresponse = compute.create_volume(resource[:availability_zone],resource[:size],resource[:snapshot])\t\n\t\tif (response.status == 200)\n\t\t\tvolumeid = response.body['volumeId']\n\t\t\tprint \"ebsvol[aws]->create: I created volume #{volumeid}.\\n\" if $debug\n\t\t\t# now tag the volume with volumename so we can identify it by name\n\t\t\t# and not the volumeid\n\t\t\tresponse = compute.create_tags(volumeid,{ :Name => resource[:volume_name] })\n\t\t\tif (response.status == 200)\n\t\t\t\tprint \"ebsvol[aws]->create: I tagged #{volumeid} with Name = #{resource[:volume_name]}\\n\" if $debug\n\t\t\tend\n\t\t\t# Check if I need to attach it to an ec2 instance.\n\t\t\tattachto = resource[:attached_to].to_s\n\t\t\tprint \"attachto is #{attachto}\\n\" if $debug\n\t\t\tif ( attachto != '' )\n\t\t\t\tif ( attachto == 'me')\n\t\t\t\t\tinstance = instanceinfo(compute,myname(compute))\n\t\t\t\telse\n\t\t\t\t\tinstance = instanceinfo(compute,attachto)\n\t\t\t\tend\n\t\t\t\tif ( resource[:device] != nil )\n\t\t\t\t\t# try to attach the volume to requested instance\n\t\t\t\t\tprint \"attach the volume\\n\" if $debug\n\t\t\t\t\tvolume = volinfo(compute,resource[:volume_name])\n\t\t\t\t\tattachvol(compute,volume,instance,resource[:device])\n\t\t\t\telse\n\t\t\t\t\traise \"ebsvol[aws]->create: Sorry, I can't attach a volume with out a device to attach to!\"\n\t\t\t\tend\n\t\t\tend\n\t\telse\n\t\t\traise \"ebsvol[aws]->create: I couldn't create the ebs volume, sorry!\"\n\t\tend\n\tend", "def install_system_image()\n unless @image_tarball_path and File.exists?(@image_tarball_path)\n raise RuntimeError, 'Invalid image specified'\n end\n\n decompressor = ''\n if @image_tarball_path.end_with?('.tgz')\n decompressor = '--gunzip'\n elsif @image_tarball_path.end_with?('.gz')\n decompressor = '--gunzip'\n elsif @image_tarball_path.end_with?('bz2')\n decompressor = '--bzip2'\n elsif @image_tarball_path.end_with?('lzma')\n decompessor = '--lzma'\n elsif @image_tarball_path.end_with?('lzip')\n decompressor = '--lzip'\n end\n\n # We only install to the first OS partition, for now (TODO)\n os_part = first_os_partition()\n\n # mount os partition, unpack the image on it, unmount it\n Dir.mktmpdir do |mountdir|\n begin\n os_part_path = File.join('/dev/disk/by-label', os_part.label)\n execute!(\"mount #{os_part_path} #{mountdir}\")\n\n execute!(['tar ',\n decompressor,\n '--extract',\n \"--file=#{@image_tarball_path}\",\n # Perms from the image should be retained.\n # Our job is to only install image to disk.\n '--preserve-permissions',\n '--numeric-owner',\n \"-C #{mountdir} .\"].join(' '))\n\n # write out the fstab file\n fstab_file_path = File.join(mountdir, '/etc/fstab')\n if File.exists?(fstab_file_path)\n info(\"Image already contains an fstab file, not generating one\")\n else\n Tempfile.open('fstab') { |f|\n f.puts('# This file is autogenerated')\n f.puts(fstab_contents(os_part.label))\n f.sync; f.fsync # flush ruby buffers and OS buffers\n execute!(\"cp #{f.path} #{fstab_file_path}\")\n }\n end\n ensure\n execute!(\"umount #{mountdir}\")\n end\n end\n\n execute!('sync')\n\n nil\n end", "def amazon_linux_init(region: self.region,\n release: '2015.09.1',\n storage: :ebs,\n arch: :x86_64,\n ebs_type: :gp2,\n filters: {})\n parameter_ami 'AmazonLinux', AmazonLinux.get_ami(region: region,\n release: release,\n storage: storage,\n arch: arch,\n ebs_type: ebs_type,\n filters: filters)\n end", "def make_forensic_image(options)\n drive_path = %x(VBoxManage list hdds | grep '#{options[:project_dir].split('/').last}').sub(/\\ALocation:\\s*/, '').sub(/\\n/, '')\n # drive_path = %x(VBoxManage list hdds | grep '#{options[:project_dir].split('/').last}').sub(/\\ALocation:\\s*|\\n\\Z/, '')\n drive_name = drive_path.split('/').last\n\n options[:image_output_location] = \"#{options[:project_dir]}/#{drive_name}\".sub(/.vmdk|.vdi/, '') unless options.has_key? :image_output_location\n\n unless options.has_key? :no_vm_shutdown\n ## Ensure all vms are shutdown\n system \"cd '#{options[:project_dir]}' && vagrant halt\"\n\n if options.has_key? :create_raw_image\n create_dd_image(drive_path, options[:image_output_location])\n end\n\n if options.has_key? :create_ewf_image\n create_ewf_image(drive_path, options[:image_output_location])\n end\n\n if options.has_key? :delete_vm_after_image_creation\n delete_virtualbox_vm(options[:vm_name])\n end\n else\n @colour.error 'Cannot create forensic image as --no-vm-shutdown option is set to true'\n end\n\n return options\nend", "def register_image(options)\n data = {}\n conn = @ec2_main.environment.connection\n if conn != nil\n if @ec2_main.settings.openstack\n # openstack\n elsif ((conn.class).to_s).start_with? \"Fog::Compute::AWS\"\n bm = options[:block_device_mappings]\n bm_fog = {}\n bm_fog['SnapshotId'] = bm[:ebs_snapshot_id]\n bm_fog['DeviceName'] = bm[:device_name]\n bm_fog['DeleteOnTermation'] = bm[:ebs_delete_on_termination]\n opts_fog = {}\n opts_fog['KernelId'] = options[:kernel_id]\n opts_fog['RamdiskId'] = options[:ramdisk_id]\n opts_fog['Architecture'] = options[:architecture]\n response = conn.register_image(options[:name], options[:description], options[:root_device_name], [bm_fog], opts_fog)\n if response.status = 200\n data = response.body['imageId']\n else\n raise \"Error #{response.status} #{response.body['Message']}\"\n end\n else\n data = conn.register_image(options)\n end\n else\n raise \"Connection Error\"\n end\n return data\n end", "def boot(opts)\n opts[:flavor] ||= 'standard.xsmall'\n opts[:image] ||= /Ubuntu Precise/\n opts[:sec_groups] ||= ['default']\n opts[:key_name] ||= 'default'\n opts[:region] ||= 'az-2.region-a.geo-1'\n\n raise 'no name provided' if !opts[:name] or opts[:name].empty?\n\n cleanup opts[:name]\n private_key = new_key opts[:name]\n write_key(private_key, File.expand_path('~/.ssh/hpcloud-keys/' + opts[:region] + '/'))\n\n server = @os.create_server(\n :imageRef => image_id(opts[:image]),\n :flavorRef => flavor_id(opts[:flavor]),\n :key_name => private_key[:name],\n :security_groups => opts[:sec_groups],\n :name => opts[:name])\n\n wait(300) do\n server = @os.server(server.id)\n raise 'error booting vm' if server.status == 'ERROR'\n server.status == 'ACTIVE'\n end\n sleep 60\n\n {\n :ip => public_ip(server),\n :user => 'ubuntu',\n :key => private_key[:private_key]\n }\n end", "def create_esb_server (config, hostname, ip1, ip2)\n config.vm.define hostname do |esb|\n esb.vm.provider \"virtualbox\" do |provider|\n provider.customize [\"modifyvm\", :id, \"--memory\", 2048]\n end\n\n esb.vm.network \"private_network\", ip: ip1\n esb.vm.host_name = hostname\n\n esb.vm.network \"private_network\", ip: ip2\n end\nend", "def register_image(params)\n # Get the Image ID\n image_id = params['ImageLocation']\n\n if image_id =~ /ami\\-(.+)/\n image_id = $1\n end\n\n image = ImageEC2.new(Image.build_xml(image_id.to_i), @client)\n rc = image.info\n if OpenNebula.is_error?(rc)\n return rc\n end\n\n if image[\"EBS_VOLUME\"] == \"YES\"\n return OpenNebula::Error.new(\"The image you are trying to register\"\\\n \" is already a volume\")\n elsif image[\"EBS_SNAPSHOT\"] == \"YES\"\n return OpenNebula::Error.new(\"The image you are trying to register\"\\\n \" is already an snapshot\")\n end\n\n image.add_element('TEMPLATE', {\"EC2_AMI\" => \"YES\"})\n rc = image.update\n if OpenNebula.is_error?(rc)\n return rc\n end\n\n erb_version = params['Version']\n\n response = ERB.new(File.read(@config[:views]+\"/register_image.erb\"))\n return response.result(binding), 200\n end", "def create_kvm_instance(name, opts={})\n cmd = \"virt-install -n #{name} --ram #{opts[:memory]} --disk \" <<\n \"path=#{File.join(KVM_HOME, 'storage', \"#{name}.qcow2\")},device=disk,bus=virtio,format=qcow2 \" <<\n \"-v --import --noautoconsole --vcpus=#{opts[:vcpus]},maxvcpus=#{opts[:maxvcpus]}\" \n unless(system(cmd))\n raise \"Failed to create KVM instance!\"\n end \nend", "def aws_instance_elastic_ip_create(instance)\n log \"AWS: creating ElasticIP for Instance '#{instance.id}'\"\n # get elastic ip object\n elastic_ip = aws_call('aws_elastic_ip_create')\n log \"AWS: created ElasticIP '#{elastic_ip.public_ip}'\"\n\n # this is interesting, perhaps elastic ips dont have statuses like other resources, or else why not use our helper fn?\n log \"AWS: waiting for ElasticIP '#{elastic_ip.public_ip}' to exist\"\n Timeout.timeout(360) { sleep 1 while not aws_call('aws_obj_exists?', obj: elastic_ip) }\n\n # give our NAT vm its elastic IP!\n log \"AWS: associating ElastipIP '#{elastic_ip.public_ip}' with Instance '#{instance.id}'\"\n aws_call(\n 'aws_instance_elastic_ip_associate',\n instance: instance,\n elastic_ip: elastic_ip,\n errs: { AWS::EC2::Errors::InvalidAllocationID::NotFound => 60 }\n )\n \n # update ip_address_public attribute\n self.update_attribute(:ip_address_public, elastic_ip.public_ip)\n end", "def ec2_instance(instance_id)\n EC2Instance.new(instance_id)\n end", "def ec2_instance(instance_id)\n EC2Instance.new(instance_id)\n end", "def create\n @instance = Instance.new(params[:instance])\n\n respond_to do |format|\n if @instance.save\n\n system(\"ssh root@#{CloudGui::Application.config.hypervisor_ip} \\\"exec /data/cloud/scripts/provision.sh #{params[:instance][:cpus]} #{params[:instance][:memory]} #{params[:instance][:baseimage]} 2>&1 | tee /data/cloud/logs/cloud_gui.log\\\"\")\n\n format.html { redirect_to @instance, notice: 'Instance was successfully created.' }\n format.json { render json: @instance, status: :created, location: @instance }\n else\n format.html { render action: \"new\" }\n format.json { render json: @instance.errors, status: :unprocessable_entity }\n end\n end\n end", "def ec2\n Fog::Compute::AWS.new(aws_access_key_id: @aws_access_key_id, aws_secret_access_key: @aws_secret_access_key)\n end", "def create_ewf_image(drive_path ,image_output_location)\n ## Make E01 image\n @colour.notify \"Creating E01 image with path #{image_output_location}.E01\"\n @colour.notify 'This may take a while:'\n @colour.notify \"E01 image #{image_output_location}.E01 created\" if system \"ftkimager '#{drive_path}' '#{image_output_location}' --e01\"\nend", "def create\n ec2 = self.class.new_ec2(@resource.value(:user), @resource.value(:password))\n group = @resource.value(:name)\n begin\n ec2.describe_security_groups({:group_name => group})\n rescue Exception => e\n ec2.create_security_group({ \n :group_name => group,\n :group_description => @resource.value(:desc)\n })\n end\n # if instance in that security group exists, start it\n # otherwise just create a new instance \n ec2.run_instances(\n { :image_id => @resource.value(:image),\n # security groups\n :security_group => group,\n :instance_type => @resource.value(:type)\n })\n end", "def create_instance(credentials, image_id, opts)\n racks = new_client( credentials )\n hwp_id = opts[:hwp_id] || 1\n name = Time.now.to_s\n if (opts[:name]) then name = opts[:name] end\n safely do\n return convert_srv_to_instance(racks.start_server(image_id, hwp_id, name))\n end\n end", "def create_image(instance_id, create_opts={})\n create_resource :image, { :instance_id => instance_id }.merge(create_opts)\n end", "def create_image(instance_id, create_opts={})\n create_resource :image, { :instance_id => instance_id }.merge(create_opts)\n end", "def create\n @ec2_instance = Ec2Instance.new(ec2_instance_params)\n\n respond_to do |format|\n if @ec2_instance.save\n format.html { redirect_to @ec2_instance, notice: 'Ec2 instance was successfully created.' }\n format.json { render :show, status: :created, location: @ec2_instance }\n else\n format.html { render :new }\n format.json { render json: @ec2_instance.errors, status: :unprocessable_entity }\n end\n end\n end", "def boot_aws_inception_vm\n say \"\" # glowing whitespace\n\n unless settings[\"inception\"][\"ip_address\"]\n say \"Provisioning IP address for inception VM...\"\n settings[\"inception\"][\"ip_address\"] = acquire_ip_address\n save_settings!\n end\n\n unless settings[\"inception\"] && settings[\"inception\"][\"server_id\"]\n username = \"ubuntu\"\n size = \"m1.small\"\n ip_address = settings[\"inception\"][\"ip_address\"]\n key_name = settings[\"inception\"][\"key_pair\"][\"name\"]\n say \"Provisioning #{size} for inception VM...\"\n inception_vm_attributes = {\n :groups => [settings[\"inception\"][\"security_group\"]],\n :key_name => key_name,\n :private_key_path => inception_vm_private_key_path,\n :flavor_id => size,\n :bits => 64,\n :username => \"ubuntu\",\n :public_ip_address => ip_address\n }\n if vpc?\n raise \"must create subnet before creating VPC inception VM\" unless settings[\"subnet\"] && settings[\"subnet\"][\"id\"]\n inception_vm_attributes[:subnet_id] = settings[\"subnet\"][\"id\"]\n inception_vm_attributes[:private_ip_address] = \"10.0.0.5\"\n end\n server = provider.bootstrap(inception_vm_attributes)\n unless server\n error \"Something mysteriously cloudy happened and fog could not provision a VM. Please check your limits.\"\n end\n\n settings[\"inception\"].delete(\"create_new\")\n settings[\"inception\"][\"server_id\"] = server.id\n settings[\"inception\"][\"username\"] = username\n save_settings!\n end\n\n server ||= fog_compute.servers.get(settings[\"inception\"][\"server_id\"])\n\n unless settings[\"inception\"][\"disk_size\"]\n disk_size = DEFAULT_INCEPTION_VOLUME_SIZE # Gb\n device = \"/dev/sdi\"\n provision_and_mount_volume(server, disk_size, device)\n\n settings[\"inception\"][\"disk_size\"] = disk_size\n settings[\"inception\"][\"disk_device\"] = device\n save_settings!\n end\n\n # settings[\"inception\"][\"host\"] is used externally to determine\n # if an inception VM has been assigned already; so we leave it\n # until last in this method to set this setting.\n # This way we can always rerun the CLI and rerun this method\n # and idempotently get an inception VM\n unless settings[\"inception\"][\"host\"]\n settings[\"inception\"][\"host\"] = server.dns_name\n save_settings!\n end\n\n confirm \"Inception VM has been created\"\n display_inception_ssh_access\n end", "def image_creation(image_name)\n image_path = Rails.root.join('public', 'images', image_name).to_s\n uploader = Dragonfly[:images]\n uploaded_image = uploader.fetch_file(image_path)\n image = Image.create image: uploaded_image\n\n rescue Error => e\n print \"Error creating image: \" + e\nend", "def create_vm(agent_id, stemcell_id, vm_type, network_spec, disk_locality = [], environment = nil)\n raise Bosh::Clouds::CloudError, \"Cannot create VM without registry with CPI v2 and stemcell api version #{@stemcell_api_version}. Registry not configured.\" if [email protected]_configured? && @stemcell_api_version < 2\n\n with_thread_name(\"create_vm(#{agent_id}, ...):v2\") do\n network_props = @props_factory.network_props(network_spec)\n\n registry, dns = nil\n registry = {endpoint: @config.registry.endpoint} if @config.registry_configured?\n\n network_with_dns = network_props.dns_networks.first\n dns = {nameserver: network_with_dns.dns} unless network_with_dns.nil?\n agent_settings = AgentSettings.new(registry, network_props, dns)\n agent_settings.environment = environment\n agent_settings.agent_id = agent_id\n\n #TODO : should use networks from core create_vm in future\n instance_id, networks = @cloud_core.create_vm(agent_id, stemcell_id, vm_type, network_props, agent_settings, disk_locality, environment) do\n |instance_id, settings|\n @registry.update_settings(instance_id, settings.agent_settings) if @stemcell_api_version < 2\n end\n\n [instance_id, network_spec]\n end\n end", "def upload_image(extract_path, packaged_files)\n image_service = Fog::Image.new({\n :provider => 'OpenStack',\n :openstack_api_key => @os_password,\n :openstack_username => @os_username,\n :openstack_auth_url => @os_auth_url,\n :openstack_tenant => @os_tenant,\n })\n\n aki = \"#{extract_path}/#{packaged_files.find{|x| x =~ /vmlinuz$/}}\"\n ami = \"#{extract_path}/#{packaged_files.find{|x| x =~ /\\.img$/}}\"\n ari = \"#{extract_path}/#{packaged_files.find{|x| x =~ /initrd$/}}\"\n\n @log.info 'Uploding AKI ...'\n aki = image_service.images.create :name => \"#{File.basename(aki, '-vmlinuz')}-aki\",\n :size => File.size(aki),\n :disk_format => 'aki',\n :container_format => 'aki',\n :location => aki\n @log.info 'Uploading ARI ...'\n ari = image_service.images.create :name => \"#{File.basename(ari, '-initrd')}-ari\",\n :size => File.size(ari),\n :disk_format => 'ari',\n :container_format => 'ari',\n :location => ari\n @log.info 'Uploading AMI ...'\n image_service.images.create :name => \"#{File.basename(ari, '-initrd')}\",\n :size => File.size(ami),\n :disk_format => 'ami',\n :container_format => 'ami',\n :location => ami,\n :properties => {\n 'kernel_id' => aki.id,\n 'ramdisk_id' => ari.id\n }\n end", "def image(file)\n entry = @defn[@latest.name]\n endian = @vm.bigendian ? \"be\" : \"le\"\n init_user(entry)\n File.open(file, 'w') do |f|\n f.puts \"// vmimage #{endian}#{@vm.databits}\"\n @vm.dump(0, @vm.dot, true) do |s|\n f.puts s\n end\n end\n end", "def run_me\n instance_id = \"\"\n region = \"\"\n # Print usage information and then stop.\n if ARGV[0] == \"--help\" || ARGV[0] == \"-h\"\n puts \"Usage: ruby ec2-ruby-example-elastic-ips.rb \" \\\n \"INSTANCE_ID REGION\"\n # Replace us-west-2 with the AWS Region you're using for Amazon EC2.\n puts \"Example: ruby ec2-ruby-example-elastic-ips.rb \" \\\n \"i-033c48ef067af3dEX us-west-2\"\n exit 1\n # If no values are specified at the command prompt, use these default values.\n elsif ARGV.count.zero?\n instance_id = \"i-033c48ef067af3dEX\"\n # Replace us-west-2 with the AWS Region you're using for Amazon EC2.\n region = \"us-west-2\"\n # Otherwise, use the values as specified at the command prompt.\n else\n instance_id = ARGV[0]\n region = ARGV[1]\n end\n\n ec2_client = Aws::EC2::Client.new(region: region)\n\n unless instance_exists?(ec2_client, instance_id)\n puts \"Cannot find instance with ID '#{instance_id}'. Stopping program.\"\n exit 1\n end\n\n puts \"Addresses for instance with ID '#{instance_id}' before allocating \" \\\n \"Elastic IP address:\"\n describe_addresses_for_instance(ec2_client, instance_id)\n\n puts \"Allocating Elastic IP address...\"\n allocation_id = allocate_elastic_ip_address(ec2_client)\n if allocation_id.start_with?(\"Error\")\n puts \"Stopping program.\"\n exit 1\n else\n puts \"Elastic IP address created with allocation ID '#{allocation_id}'.\"\n end\n\n puts \"Associating Elastic IP address with instance...\"\n association_id = associate_elastic_ip_address_with_instance(\n ec2_client,\n allocation_id,\n instance_id\n )\n if association_id.start_with?(\"Error\")\n puts \"Stopping program. You must associate the Elastic IP address yourself.\"\n exit 1\n else\n puts \"Elastic IP address associated with instance with association ID \" \\\n \"'#{association_id}'.\"\n end\n\n puts \"Addresses for instance after allocating Elastic IP address:\"\n describe_addresses_for_instance(ec2_client, instance_id)\n\n puts \"Releasing the Elastic IP address from the instance...\"\n if elastic_ip_address_released?(ec2_client, allocation_id) == false\n puts \"Stopping program. You must release the Elastic IP address yourself.\"\n exit 1\n else\n puts \"Address released.\"\n end\n\n puts \"Addresses for instance after releasing Elastic IP address:\"\n describe_addresses_for_instance(ec2_client, instance_id)\nend", "def run\n node = Node.new(:instance_type => Aws.instance_type, :instance_id => Aws.instance_id)\n node.save\n write_pid\n process_loop\n end", "def create_image(name, type, container_uuid, url)\n @logger.debug(\"Request for creating image #{name}.\")\n if url.start_with?('http', 'nfs')\n create_image_with_url(name, type, container_uuid, url)\n else\n create_image_with_local(name, type, container_uuid, url)\n end\n rescue => e\n raise e\n end", "def create\n @ami = Ami.new(params[:ami])\n\n respond_to do |format|\n if @ami.save\n format.html { redirect_to @ami, notice: 'Ami was successfully created.' }\n format.json { render json: @ami, status: :created, location: @ami }\n else\n format.html { render action: \"new\" }\n format.json { render json: @ami.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_vm(opts = {})\n opts = opts.with_indifferent_access.reverse_merge(\n VSphereConfig.create_vm_defaults.symbolize_keys || {}\n )\n\n opts[:template_path] = compute.image if compute.image\n\n VSphere.with_connection do |vs|\n dest_folder = opts[:dest_folder]\n vm_name = opts[:name] || \"#{compute.image}-\"\\\n \"#{SecureRandom.hex(4)}-#{Time.new.strftime(\"%Y%m%d\")}\"\n exception_cb = lambda do |_p1|\n LabManager.logger.warn(\n \"Failed attempt to create virtual machine: template_name: #{opts[:template_path]}\"\\\n \", vm_name: #{vm_name}\"\n )\n end\n\n Retryable.retryable(\n tries: VSphereConfig.create_vm_defaults[:vm_clone_retry_count],\n on: [RbVmomi::Fault, CreateVMError, Fog::Compute::Vsphere::NotFound],\n exception_cb: exception_cb,\n sleep: ->(n) { Random.rand(n*3..n*3+10.0) }\n ) do\n LabManager.logger.info \"creating machine with name: #{vm_name} options: #{opts.inspect}\"\n machine = vs.vm_clone(\n 'datacenter' => opts[:datacenter],\n 'datastore' => opts[:datastore],\n 'template_path' => opts[:template_path],\n 'name' => vm_name,\n 'cluster' => opts[:cluster],\n 'linked_clone' => opts[:linked_clone],\n 'dest_folder' => dest_folder,\n 'power_on' => opts[:power_on],\n 'wait' => true\n )\n\n setup_network_interfaces({\n uuid: machine['new_vm']['uuid'],\n connection: vs,\n network_name: VSphereConfig.create_vm_defaults[:network_interface_name]\n })\n\n fail CreateVMError, \"Creation of (#{vm_name}) machine failed, retrying\" unless machine || machine['vm_ref']\n set_provider_data(machine['new_vm'], vs: vs)\n end\n\n add_machine_to_drs_rule(\n vs,\n group: opts[:add_to_drs_group],\n machine: \"#{dest_folder}/#{vm_name}\",\n datacenter: opts[:datacenter]\n ) if opts[:add_to_drs_group]\n end\n poweron_vm unless compute.provider_data['power_state'] == 'poweredOn'\n rescue\n # Try to free unsuccessfully started/configured/... VM\n begin\n terminate_vm\n rescue\n nil\n end if instance_uuid\n raise\n end", "def create_custom_stemcell\n if generated_stemcell\n say \"Skipping stemcell creation as one sits in the tmp folder waiting patiently...\"\n else\n say \"Creating new stemcell for '#{bosh_provider.green}'...\"\n chdir(repos_dir) do\n clone_or_update_repository(\"bosh\", bosh_git_repo)\n chdir(\"bosh\") do\n sh \"bundle install --without development test\"\n sh \"sudo bundle exec rake stemcell:basic['#{bosh_provider}']\"\n sh \"sudo chown -R vcap:vcap /var/tmp/bosh/agent-*\"\n end\n end\n end\n end", "def toKitten(**_args)\n bok = {\n \"cloud\" => \"AWS\",\n \"credentials\" => @credentials,\n \"cloud_id\" => @cloud_id,\n \"region\" => @region\n }\n\n if !cloud_desc\n MU.log \"toKitten failed to load a cloud_desc from #{@cloud_id}\", MU::ERR, details: @config\n return nil\n end\n\n asgs = MU::Cloud.resourceClass(\"AWS\", \"ServerPool\").find(\n instance_id: @cloud_id,\n region: @region,\n credentials: @credentials\n )\n if asgs.size > 0\n MU.log \"#{@mu_name} is an Autoscale node, will be adopted under server_pools\", MU::DEBUG, details: asgs\n return nil\n end\n\n bok['name'] = @cloud_id\n if cloud_desc.tags and !cloud_desc.tags.empty?\n bok['tags'] = MU.structToHash(cloud_desc.tags, stringify_keys: true)\n realname = MU::Adoption.tagsToName(bok['tags'])\n if realname\n bok['name'] = realname\n bok['name'].gsub!(/[^a-zA-Z0-9_\\-]/, \"_\")\n end\n end\n\n bok['size'] = cloud_desc.instance_type\n\n if cloud_desc.vpc_id\n bok['vpc'] = MU::Config::Ref.get(\n id: cloud_desc.vpc_id,\n cloud: \"AWS\",\n credentials: @credentials,\n type: \"vpcs\",\n )\n end\n\n if !cloud_desc.source_dest_check\n bok['src_dst_check'] = false\n end\n\n bok['image_id'] = cloud_desc.image_id\n\n ami = MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).describe_images(image_ids: [bok['image_id']]).images.first\n\n if ami.nil? or ami.empty?\n MU.log \"#{@mu_name} source image #{bok['image_id']} no longer exists\", MU::WARN\n bok.delete(\"image_id\")\n end\n\n if cloud_desc.block_device_mappings and !cloud_desc.block_device_mappings.empty?\n vol_map = {}\n MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).describe_volumes(\n volume_ids: cloud_desc.block_device_mappings.map { |d| d.ebs.volume_id if d.ebs }\n ).volumes.each { |vol|\n vol_map[vol.volume_id] = vol\n }\n cloud_desc.block_device_mappings.each { |disk|\n if ami and ami.block_device_mappings\n is_ami_disk = false\n ami.block_device_mappings.each { |ami_dev|\n is_ami_disk = true if ami_dev.device_name == disk.device_name\n }\n next if is_ami_disk\n end\n disk_desc = { \"device\" => disk.device_name }\n if disk.ebs and disk.ebs.volume_id and vol_map[disk.ebs.volume_id]\n disk_desc[\"size\"] = vol_map[disk.ebs.volume_id].size\n disk_desc[\"delete_on_termination\"] = disk.ebs.delete_on_termination\n if vol_map[disk.ebs.volume_id].encrypted\n disk_desc['encrypted'] = true\n end\n if vol_map[disk.ebs.volume_id].iops\n disk_desc['iops'] = vol_map[disk.ebs.volume_id].iops\n end\n disk_desc[\"volume_type\"] = vol_map[disk.ebs.volume_id].volume_type\n end\n bok['storage'] ||= []\n bok['storage'] << disk_desc\n }\n end\n\n cloud_desc.network_interfaces.each { |int|\n if !bok['vpc'] and int.vpc_id\n bok['vpc'] = MU::Config::Ref.get(\n id: int.vpc_id,\n cloud: \"AWS\",\n credentials: @credentials,\n region: @region,\n subnet_id: int.subnet_id,\n habitat: MU::Config::Ref.get(\n id: int.owner_id,\n cloud: \"AWS\",\n credentials: @credentials\n )\n )\n end\n\n int.private_ip_addresses.each { |priv_ip|\n if !priv_ip.primary\n bok['add_private_ips'] ||= 0\n bok['add_private_ips'] += 1\n end\n if priv_ip.association and priv_ip.association.public_ip \n bok['associate_public_ip'] = true\n if priv_ip.association.ip_owner_id != \"amazon\"\n bok['static_ip'] = {\n \"assign_ip\" => true,\n \"ip\" => priv_ip.association.public_ip\n }\n end\n end\n }\n\n if int.groups.size > 0\n\n require 'mu/providers/aws/firewall_rule'\n ifaces = MU::Cloud.resourceClass(\"AWS\", \"FirewallRule\").getAssociatedInterfaces(int.groups.map { |sg| sg.group_id }, credentials: @credentials, region: @region)\n done_local_rules = false\n int.groups.each { |sg|\n if !done_local_rules and ifaces[sg.group_id].size == 1\n sg_desc = MU::Cloud.resourceClass(\"AWS\", \"FirewallRule\").find(cloud_id: sg.group_id, credentials: @credentials, region: @region).values.first\n if sg_desc\n bok[\"ingress_rules\"] = MU::Cloud.resourceClass(\"AWS\", \"FirewallRule\").rulesToBoK(sg_desc.ip_permissions)\n bok[\"ingress_rules\"].concat(MU::Cloud.resourceClass(\"AWS\", \"FirewallRule\").rulesToBoK(sg_desc.ip_permissions_egress, egress: true))\n done_local_rules = true\n next\n end\n end\n bok['add_firewall_rules'] ||= []\n bok['add_firewall_rules'] << MU::Config::Ref.get(\n id: sg.group_id,\n cloud: \"AWS\",\n credentials: @credentials,\n type: \"firewall_rules\",\n region: @region\n )\n }\n end\n }\n\n# XXX go get the got-damned instance profile\n\n bok\n end", "def create_instance(security_groups, key, user_data, size, region)\n @instances = nil\n Instance.new(@@ec2.run_instances(id, 1, 1, security_groups, key, user_data, nil, size, nil, nil, region).first)\n end", "def create_image(name, metadata = {})\n requires :id\n response = service.create_image(id, name, metadata)\n begin\n image_id = response.headers[\"Location\"].match(/\\/([^\\/]+$)/)[1]\n Fog::Compute::HPV2::Image.new(:collection => service.images, :service => service, :id => image_id)\n rescue\n nil\n end\n end", "def create_ec2_ebs_volume opts\n Ec2EbsVolume.create opts.merge :account => self\n end", "def create_image(vdu)\n\t\tname = get_resource_name\n\t\t\n\t\traise CustomException::NoExtensionError, \"#{vdu['vm_image']} does not have a file extension\" if vdu['vm_image_format'].empty?\n\t\traise CustomException::InvalidExtensionError, \"#{vdu['vm_image']} has an invalid extension. Allowed extensions: ami, ari, aki, vhd, vmdk, raw, qcow2, vdi and iso\" unless ['ami', 'ari', 'aki', 'vhd', 'vmdk', 'raw', 'qcow2', 'vdi', 'iso'].include? vdu['vm_image_format']\n\n\t\[email protected]_list << Image.new(name, vdu['vm_image_format'], vdu['vm_image'])\n\t\tname\n\tend", "def find_or_create_target(target_type, aws_instance_options)\n if target_type.downcase == 'aws'\n # Check or create new cluster on AWS\n if File.exist?(\"#{aws_instance_options[:cluster_name]}.json\")\n puts \"It appears that a cluster for #{aws_instance_options[:cluster_name]} is already running.\"\n puts \"If this is not the case then delete ./#{aws_instance_options[:cluster_name]}.json file.\"\n puts \"Or run 'bundle exec rake clean'\"\n puts 'Will try to continue'\n\n # Load AWS instance\n aws = OpenStudio::Aws::Aws.new\n aws.load_instance_info_from_file(\"#{aws_instance_options[:cluster_name]}.json\")\n server_dns = \"http://#{aws.os_aws.server.data.dns}\"\n puts \"Server IP address #{server_dns}\"\n\n else\n puts \"Creating cluster for #{aws_instance_options[:user_id]}\"\n puts 'Starting cluster...'\n\n # Don't use the old API (Version 1)\n ami_version = aws_instance_options[:os_server_version][0] == '2' ? 3 : 2\n aws_options = {\n ami_lookup_version: 3,\n openstudio_server_version: aws_instance_options[:os_server_version]\n }\n aws = OpenStudio::Aws::Aws.new(aws_options)\n\n server_options = {\n instance_type: aws_instance_options[:server_instance_type],\n user_id: aws_instance_options[:user_id],\n tags: aws_instance_options[:aws_tags]\n }\n\n worker_options = {\n instance_type: aws_instance_options[:worker_instance_type],\n user_id: aws_instance_options[:user_id],\n tags: aws_instance_options[:aws_tags]\n }\n\n start_time = Time.now\n\n # Create the server & worker\n aws.create_server(server_options)\n aws.save_cluster_info(\"#{aws_instance_options[:cluster_name]}.json\")\n aws.print_connection_info\n aws.create_workers(aws_instance_options[:worker_node_number], worker_options)\n aws.save_cluster_info(\"#{aws_instance_options[:cluster_name]}.json\")\n aws.print_connection_info\n server_dns = \"http://#{aws.os_aws.server.data.dns}\"\n\n puts \"Cluster setup in #{(Time.now - start_time).round} seconds. Awaiting analyses.\"\n puts \"Server IP address is #{server_dns}\"\n end\n OpenStudio::Analysis::ServerApi.new(hostname: server_dns)\n else\n OpenStudio::Analysis::ServerApi.new(hostname: lookup_target_url(target_type))\n end\nend", "def create_policy_role_EC2\n\n AWS.config(\n :access_key_id => ENV[\"S3_ACCESS_KEY\"], \n :secret_access_key => ENV[\"S3_SECRET_KEY\"])\n\n # naming policy \n role_name = 'ec2-start-stop'\n policy_name = 'ec2-start-stop'\n profile_name = 'ec2-start-stop' \n instance_profile_name = 'inst-ec2-start-stop' \n\n # building a custom policy \n policy = AWS::IAM::Policy.new\n policy.allow(\n :actions => [\"ec2:StartInstances\",\"ec2:StopInstances\"],\n :resources => '*')\n\n # EC2 can generate session credentials\n assume_role_policy_document = '{\"Version\":\"2008-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Service\":[\"ec2.amazonaws.com\"]},\"Action\":[\"sts:AssumeRole\"]}]}'\n \n # creating a role\n $iam.client.create_role(\n :role_name => role_name,\n :assume_role_policy_document => assume_role_policy_document)\n\n # adding policy to role\n $iam.client.put_role_policy(\n :role_name => role_name,\n :policy_name => policy_name,\n :policy_document => policy.to_json)\n\n # creating a profile for the role\n response = $iam.client.create_instance_profile(\n :instance_profile_name => instance_profile_name)\n \n # ARN\n profile_arn = response[:instance_profile][:arn]\n \n $iam.client.add_role_to_instance_profile(\n :instance_profile_name => instance_profile_name,\n :role_name => role_name)\n\n # you can use the profile name or ARN as the :iam_instance_profile option\n $ec2 = AWS::EC2.new\n $ec2.instances.create(:image_id => \"ami-inst-id-1\", :iam_instance_profile => profile_name)\n\n redirect_to iams_path, notice: 'Added Policy and Role for EC2'\n \n end", "def create_image_with_local(name, type, container_uuid, path)\n @logger.debug(\"Creating image from file path #{path}\")\n container_id = JSON.parse(\n @client.get('v2.0', \"storage_containers/#{container_uuid}\")\n )['id'].split('::').last\n @logger.debug(\"Container ID is #{container_id}\")\n spec = { name: name, image_type: type, annotation: name }\n @logger.debug(\"Image creation specs => #{spec}\")\n task = JSON.parse(@client.post('v2.0', 'images', spec.to_json))\n task_uuid = task['task_uuid']\n image_uuid = TaskManager.wait_on_task(task_uuid, @client, @logger)\n task = JSON.parse(\n @client.put('v0.8', \"images/#{image_uuid}/upload\",\n File.open(path), nil,\n { 'X-Nutanix-Destination-Container' => container_id })\n )\n task_uuid = task['task_uuid']\n # Timeout is set to 60 minutes as image upload may take time\n TaskManager.wait_on_task(task_uuid, @client, @logger, 60 * 60)\n image_uuid\n rescue => e\n raise e\n end", "def ec2\n @ec2 ||= EC2::Base.new(:access_key_id => \"not a key\", :secret_access_key => \"not a key\")\n end", "def postBoot(instance_id = nil)\n @cloud_id ||= instance_id\n _node, _config, deploydata = describe(cloud_id: @cloud_id)\n\n raise MuError, \"Couldn't find instance #{@mu_name} (#{@cloud_id})\" if !cloud_desc\n return false if !MU::MommaCat.lock(@cloud_id+\"-orchestrate\", true)\n return false if !MU::MommaCat.lock(@cloud_id+\"-groom\", true)\n\n getIAMProfile\n\n finish = Proc.new { |status|\n MU::MommaCat.unlock(@cloud_id+\"-orchestrate\")\n MU::MommaCat.unlock(@cloud_id+\"-groom\")\n return status\n }\n\n MU::Cloud::AWS.createStandardTags(\n @cloud_id,\n region: @region,\n credentials: @credentials,\n optional: @config['optional_tags'],\n nametag: @mu_name,\n othertags: @config['tags']\n )\n\n # Make double sure we don't lose a cached mu_windows_name value.\n if (windows? or !@config['active_directory'].nil?)\n @mu_windows_name ||= deploydata['mu_windows_name']\n end\n\n loop_if = Proc.new {\n !cloud_desc(use_cache: false) or cloud_desc.state.name != \"running\"\n }\n MU.retrier([Aws::EC2::Errors::ServiceError], max: 30, wait: 40, loop_if: loop_if) { |retries, _wait|\n if cloud_desc and cloud_desc.state.name == \"terminated\"\n logs = if !@config['basis'].nil?\n pool = @deploy.findLitterMate(type: \"server_pools\", name: @config[\"name\"])\n if pool\n MU::Cloud::AWS.autoscale(region: @region, credentials: @credentials).describe_scaling_activities(auto_scaling_group_name: pool.cloud_id).activities\n else\n nil\n end\n end\n raise MuError.new, \"#{@cloud_id} appears to have been terminated mid-bootstrap!\", details: logs\n end\n if retries % 3 == 0\n MU.log \"Waiting for EC2 instance #{@mu_name} (#{@cloud_id}) to be ready...\", MU::NOTICE\n end\n }\n\n allowBastionAccess\n\n setAlarms\n\n # Unless we're planning on associating a different IP later, set up a\n # DNS entry for this thing and let it sync in the background. We'll come\n # back to it later.\n if @config['static_ip'].nil? and !@named\n MU::MommaCat.nameKitten(self)\n @named = true\n end\n\n if !@config['src_dst_check'] and !@config[\"vpc\"].nil?\n MU.log \"Disabling source_dest_check #{@mu_name} (making it NAT-worthy)\"\n MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).modify_instance_attribute(\n instance_id: @cloud_id,\n source_dest_check: { value: false }\n )\n end\n\n # Set console termination protection. Autoscale nodes won't set this\n # by default.\n MU::Cloud::AWS.ec2(region: @region, credentials: @credentials).modify_instance_attribute(\n instance_id: @cloud_id,\n disable_api_termination: { value: true}\n )\n\n tagVolumes\n configureNetworking\n saveCredentials\n\n if !@config['image_then_destroy']\n notify\n end\n\n finish.call(false) if !bootstrapGroomer\n\n # Make sure we got our name written everywhere applicable\n if !@named\n MU::MommaCat.nameKitten(self)\n @named = true\n end\n\n finish.call(true)\n end", "def create\n\t\t\t@subnet_id = Net::HTTP.get(URI.parse(\"#{URL}network/interfaces/macs/#{@macs_arr.first}/subnet-id\"))\n\t\t\tresp = ec2.create_network_interface(subnet_id: \"#{@subnet_id}\")\n\t\t\t@network_interface_id = resp[:network_interface][:network_interface_id]\n\t\t\tputs \"created interface with id #{@network_interface_id}\"\n\t\tend", "def ec2\n @ec2 ||= aws_init { AWS::EC2.new }\n end", "def create_ec2_elastic_ip opts\n Ec2ElasticIp.create opts.merge :account => self\n end", "def initialize(*args)\n super\n @action = :create_ec2\nend", "def allocate(session, template)\n write_task('rvpe.image.allocate', session) do\n image_def = ResourceFile::Parser.load_yaml(template)\n t = Transfer.find_by_name(image_def[ResourceFile::Image::TRANSFER])[0]\n # check fields\n err_msg_suffix = ' in Image file.'\n # check name\n _name = image_def[ResourceFile::Image::NAME]\n unless _name\n t.cleanup\n raise 'Specify ' + ResourceFile::Image::NAME + err_msg_suffix\n end\n unless Image.find_by_name(_name, session, -2).empty?\n t.cleanup\n raise \"Image[#{_name}] already exists. Use another name.\"\n end\n # check type\n _type = image_def[ResourceFile::Image::TYPE]\n _type = 'OS' unless _type\n # check attributes\n if image_def[ResourceFile::Image::PUBLIC] &&\n image_def[ResourceFile::Image::PERSISTENT]\n t.cleanup\n raise \"An image can't be public and persistent at the same time.\"\n end\n _public = image_def[ResourceFile::Image::PUBLIC]\n if _public\n _public = 'YES' # yaml automatically converted 'YES' to true\n else\n _public = 'NO'\n end\n _persistent = image_def[ResourceFile::Image::PERSISTENT]\n if _persistent\n _persistent = 'YES' # yaml automatically converted 'YES' to true\n else\n _persistent = 'NO'\n end\n # check bus\n _bus = image_def[ResourceFile::Image::IO_BUS]\n _bus = 'virtio' unless _bus\n case _bus.downcase\n when 'virtio'\n _dev_prefix = 'vd'\n when 'ide'\n _dev_prefix = 'hd'\n else\n _dev_prefix = 'sd'\n end\n # check nic model\n _nic_model = image_def[ResourceFile::Image::NIC_MODEL]\n _nic_model = 'virtio' unless _nic_model\n # check image file\n begin\n image_sanity_check(t.path)\n rescue => e\n t.cleanup\n raise e\n end\n\n one_template = <<EOT\nNAME = \"#{_name}\"\nDESCRIPTION = \"#{image_def[ResourceFile::Image::DESCRIPTION]}\"\nTYPE = \"#{_type}\"\nPUBLIC = \"#{_public}\"\nPERSISTENT = \"#{_persistent}\"\nBUS = \"#{_bus}\"\nDEV_PREFIX = \"#{_dev_prefix}\"\nNIC_MODEL = \"#{_nic_model}\"\nPATH = \"#{image_def[ResourceFile::Image::PATH]}\"\nEOT\n rc = call_one_xmlrpc('one.image.allocate', session, one_template)\n\n if rc[0]\n # move OS image to the image storage directory\n rc2 = call_one_xmlrpc('one.image.info', session, rc[1])\n unless rc2[0]\n t.cleanup\n raise rc2[1]\n end\n doc = REXML::Document.new(rc2[1])\n FileUtils.mv(t.path, doc.get_text('IMAGE/SOURCE').value)\n end\n t.cleanup\n rc\n end\n end", "def create_server(name, image, flavor, networks = nil, keypair = nil, security_group = nil, metadata = nil)\n data = { \n \"server\" => { \n \"name\" => name,\n \"imageRef\" => image,\n \"flavorRef\" => flavor,\n } \n }\n unless networks.nil?\n data[\"server\"][\"networks\"] = networks \n end\n unless keypair.nil?\n data[\"server\"][\"key_name\"] = keypair\n end\n unless security_group.nil?\n data[\"server\"][\"security_group\"] = security_group \n end\n return post_request(address(\"/servers\"), data, @token)\n end", "def convert_image(image)\n Image.new(\n :id => image[\"id\"],\n :name => image[\"name\"],\n :owner_id => image[\"owner\"],\n :description => image[\"description\"],\n :architecture => \"i386\",\t# TODO: parse this from supportedInstanceType IDs w/ HW profile lookup\n :state => @@IMAGE_STATE_MAP[image[\"state\"]]\n )\n end", "def run\n super\n\n # Get the AWS Credentials\n aws_keys = get_aws_keys_from_entity_type(_get_entity_type_string)\n return unless aws_keys.access_key && aws_keys.secret_key\n\n return unless aws_keys_valid?(aws_keys.access_key, aws_keys.secret_key, aws_keys.session_token)\n\n regions = retrieve_region_list\n instance_collection = regions.map do |r|\n retrieve_instances(r, aws_keys.access_key, aws_keys.secret_key, aws_keys.session_token)\n end\n\n instance_collection.compact!\n return if instance_collection.size.zero?\n\n create_ec2_instances(instance_collection)\n end", "def postBoot(instance_id = nil)\n if !instance_id.nil?\n @cloud_id = instance_id\n end\n node, config, deploydata = describe(cloud_id: @cloud_id)\n instance = cloud_desc\n raise MuError, \"Couldn't find instance #{@mu_name} (#{@cloud_id})\" if !instance\n @cloud_id = instance.instance_id\n return false if !MU::MommaCat.lock(instance.instance_id+\"-orchestrate\", true)\n return false if !MU::MommaCat.lock(instance.instance_id+\"-groom\", true)\n\n MU::MommaCat.createStandardTags(instance.instance_id, region: @config['region'], credentials: @config['credentials'])\n MU::MommaCat.createTag(instance.instance_id, \"Name\", node, region: @config['region'], credentials: @config['credentials'])\n\n if @config['optional_tags']\n MU::MommaCat.listOptionalTags.each { |key, value|\n MU::MommaCat.createTag(instance.instance_id, key, value, region: @config['region'], credentials: @config['credentials'])\n }\n end\n\n if !@config['tags'].nil?\n @config['tags'].each { |tag|\n MU::MommaCat.createTag(instance.instance_id, tag['key'], tag['value'], region: @config['region'], credentials: @config['credentials'])\n }\n end\n MU.log \"Tagged #{node} (#{instance.instance_id}) with MU-ID=#{MU.deploy_id}\", MU::DEBUG\n\n # Make double sure we don't lose a cached mu_windows_name value.\n if windows? or !@config['active_directory'].nil?\n if @mu_windows_name.nil?\n @mu_windows_name = deploydata['mu_windows_name']\n end\n end\n\n retries = -1\n max_retries = 30\n begin\n if instance.nil? or instance.state.name != \"running\"\n retries = retries + 1\n if !instance.nil? and instance.state.name == \"terminated\"\n raise MuError, \"#{@cloud_id} appears to have been terminated mid-bootstrap!\"\n end\n if retries % 3 == 0\n MU.log \"Waiting for EC2 instance #{node} (#{@cloud_id}) to be ready...\", MU::NOTICE\n end\n sleep 40\n # Get a fresh AWS descriptor\n instance = MU::Cloud::Server.find(cloud_id: @cloud_id, region: @config['region'], credentials: @config['credentials']).values.first\n if instance and instance.state.name == \"terminated\"\n raise MuError, \"EC2 instance #{node} (#{@cloud_id}) terminating during bootstrap!\"\n end\n end\n rescue Aws::EC2::Errors::ServiceError => e\n if retries < max_retries\n MU.log \"Got #{e.inspect} during initial instance creation of #{@cloud_id}, retrying...\", MU::NOTICE, details: instance\n retries = retries + 1\n retry\n else\n raise MuError, \"Too many retries creating #{node} (#{e.inspect})\"\n end\n end while instance.nil? or (instance.state.name != \"running\" and retries < max_retries)\n\n punchAdminNAT\n\n\n # If we came up via AutoScale, the Alarm module won't have had our\n # instance ID to associate us with itself. So invoke that here.\n # XXX might be possible to do this with regular alarm resources and\n # dependencies now\n if !@config['basis'].nil? and @config[\"alarms\"] and !@config[\"alarms\"].empty?\n @config[\"alarms\"].each { |alarm|\n alarm_obj = MU::MommaCat.findStray(\n \"AWS\",\n \"alarms\",\n region: @config[\"region\"],\n deploy_id: @deploy.deploy_id,\n name: alarm['name']\n ).first\n alarm[\"dimensions\"] = [{:name => \"InstanceId\", :value => @cloud_id}]\n\n if alarm[\"enable_notifications\"]\n topic_arn = MU::Cloud::AWS::Notification.createTopic(alarm[\"notification_group\"], region: @config[\"region\"], credentials: @config['credentials'])\n MU::Cloud::AWS::Notification.subscribe(arn: topic_arn, protocol: alarm[\"notification_type\"], endpoint: alarm[\"notification_endpoint\"], region: @config[\"region\"], credentials: @config[\"credentials\"])\n alarm[\"alarm_actions\"] = [topic_arn]\n alarm[\"ok_actions\"] = [topic_arn]\n end\n\n alarm_name = alarm_obj ? alarm_obj.cloud_id : \"#{node}-#{alarm['name']}\".upcase\n\n MU::Cloud::AWS::Alarm.setAlarm(\n name: alarm_name,\n ok_actions: alarm[\"ok_actions\"],\n alarm_actions: alarm[\"alarm_actions\"],\n insufficient_data_actions: alarm[\"no_data_actions\"],\n metric_name: alarm[\"metric_name\"],\n namespace: alarm[\"namespace\"],\n statistic: alarm[\"statistic\"],\n dimensions: alarm[\"dimensions\"],\n period: alarm[\"period\"],\n unit: alarm[\"unit\"],\n evaluation_periods: alarm[\"evaluation_periods\"],\n threshold: alarm[\"threshold\"],\n comparison_operator: alarm[\"comparison_operator\"],\n region: @config[\"region\"],\n credentials: @config['credentials']\n )\n }\n end\n\n # We have issues sometimes where our dns_records are pointing at the wrong node name and IP address.\n # Make sure that doesn't happen. Happens with server pools only\n if @config['dns_records'] && !@config['dns_records'].empty?\n @config['dns_records'].each { |dnsrec|\n if dnsrec.has_key?(\"name\")\n if dnsrec['name'].start_with?(MU.deploy_id.downcase) && !dnsrec['name'].start_with?(node.downcase)\n MU.log \"DNS records for #{node} seem to be wrong, deleting from current config\", MU::WARN, details: dnsrec\n dnsrec.delete('name')\n dnsrec.delete('target')\n end\n end\n }\n end\n\n # Unless we're planning on associating a different IP later, set up a\n # DNS entry for this thing and let it sync in the background. We'll come\n # back to it later.\n if @config['static_ip'].nil? && !@named\n MU::MommaCat.nameKitten(self)\n @named = true\n end\n\n if !@config['src_dst_check'] and !@config[\"vpc\"].nil?\n MU.log \"Disabling source_dest_check #{node} (making it NAT-worthy)\"\n MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).modify_instance_attribute(\n instance_id: @cloud_id,\n source_dest_check: {:value => false}\n )\n end\n\n # Set console termination protection. Autoscale nodes won't set this\n # by default.\n MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).modify_instance_attribute(\n instance_id: @cloud_id,\n disable_api_termination: {:value => true}\n )\n\n has_elastic_ip = false\n if !instance.public_ip_address.nil?\n begin\n resp = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_addresses(public_ips: [instance.public_ip_address])\n if resp.addresses.size > 0 and resp.addresses.first.instance_id == @cloud_id\n has_elastic_ip = true\n end\n rescue Aws::EC2::Errors::InvalidAddressNotFound => e\n # XXX this is ok to ignore, it means the public IP isn't Elastic\n end\n end\n\n win_admin_password = nil\n ec2config_password = nil\n sshd_password = nil\n if windows?\n ssh_keydir = \"#{Etc.getpwuid(Process.uid).dir}/.ssh\"\n ssh_key_name = @deploy.ssh_key_name\n\n if @config['use_cloud_provider_windows_password']\n win_admin_password = getWindowsAdminPassword\n elsif @config['windows_auth_vault'] && !@config['windows_auth_vault'].empty?\n if @config[\"windows_auth_vault\"].has_key?(\"password_field\")\n win_admin_password = @groomer.getSecret(\n vault: @config['windows_auth_vault']['vault'],\n item: @config['windows_auth_vault']['item'],\n field: @config[\"windows_auth_vault\"][\"password_field\"]\n )\n else\n win_admin_password = getWindowsAdminPassword\n end\n\n if @config[\"windows_auth_vault\"].has_key?(\"ec2config_password_field\")\n ec2config_password = @groomer.getSecret(\n vault: @config['windows_auth_vault']['vault'],\n item: @config['windows_auth_vault']['item'],\n field: @config[\"windows_auth_vault\"][\"ec2config_password_field\"]\n )\n end\n\n if @config[\"windows_auth_vault\"].has_key?(\"sshd_password_field\")\n sshd_password = @groomer.getSecret(\n vault: @config['windows_auth_vault']['vault'],\n item: @config['windows_auth_vault']['item'],\n field: @config[\"windows_auth_vault\"][\"sshd_password_field\"]\n )\n end\n end\n\n win_admin_password = MU.generateWindowsPassword if win_admin_password.nil?\n ec2config_password = MU.generateWindowsPassword if ec2config_password.nil?\n sshd_password = MU.generateWindowsPassword if sshd_password.nil?\n\n # We're creating the vault here so when we run\n # MU::Cloud::Server.initialSSHTasks and we need to set the Windows\n # Admin password we can grab it from said vault.\n creds = {\n \"username\" => @config['windows_admin_username'],\n \"password\" => win_admin_password,\n \"ec2config_username\" => \"ec2config\",\n \"ec2config_password\" => ec2config_password,\n \"sshd_username\" => \"sshd_service\",\n \"sshd_password\" => sshd_password\n }\n @groomer.saveSecret(vault: @mu_name, item: \"windows_credentials\", data: creds, permissions: \"name:#{@mu_name}\")\n end\n\n subnet = nil\n if [email protected]? and @config.has_key?(\"vpc\") and !instance.subnet_id.nil?\n subnet = @vpc.getSubnet(\n cloud_id: instance.subnet_id\n )\n if subnet.nil?\n raise MuError, \"Got null subnet id out of #{@config['vpc']} when asking for #{instance.subnet_id}\"\n end\n end\n\n if !subnet.nil?\n if !subnet.private? or (!@config['static_ip'].nil? and !@config['static_ip']['assign_ip'].nil?)\n if !@config['static_ip'].nil?\n if !@config['static_ip']['ip'].nil?\n public_ip = MU::Cloud::AWS::Server.associateElasticIp(instance.instance_id, classic: false, ip: @config['static_ip']['ip'])\n elsif !has_elastic_ip\n public_ip = MU::Cloud::AWS::Server.associateElasticIp(instance.instance_id)\n end\n end\n end\n\n nat_ssh_key, nat_ssh_user, nat_ssh_host, canonical_ip, ssh_user, ssh_key_name = getSSHConfig\n if subnet.private? and !nat_ssh_host and !MU::Cloud::AWS::VPC.haveRouteToInstance?(cloud_desc, region: @config['region'], credentials: @config['credentials'])\n raise MuError, \"#{node} is in a private subnet (#{subnet}), but has no NAT host configured, and I have no other route to it\"\n end\n\n # If we've asked for additional subnets (and this @config is not a\n # member of a Server Pool, which has different semantics), create\n # extra interfaces to accomodate.\n if !@config['vpc']['subnets'].nil? and @config['basis'].nil?\n device_index = 1\n @vpc.subnets { |subnet|\n subnet_id = subnet.cloud_id\n MU.log \"Adding network interface on subnet #{subnet_id} for #{node}\"\n iface = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).create_network_interface(subnet_id: subnet_id).network_interface\n MU::MommaCat.createStandardTags(iface.network_interface_id, region: @config['region'], credentials: @config['credentials'])\n MU::MommaCat.createTag(iface.network_interface_id, \"Name\", node+\"-ETH\"+device_index.to_s, region: @config['region'], credentials: @config['credentials'])\n\n if @config['optional_tags']\n MU::MommaCat.listOptionalTags.each { |key, value|\n MU::MommaCat.createTag(iface.network_interface_id, key, value, region: @config['region'], credentials: @config['credentials'])\n }\n end\n\n if !@config['tags'].nil?\n @config['tags'].each { |tag|\n MU::MommaCat.createTag(iface.network_interface_id, tag['key'], tag['value'], region: @config['region'], credentials: @config['credentials'])\n }\n end\n\n MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).attach_network_interface(\n network_interface_id: iface.network_interface_id,\n instance_id: instance.instance_id,\n device_index: device_index\n )\n device_index = device_index + 1\n }\n end\n elsif !@config['static_ip'].nil?\n if !@config['static_ip']['ip'].nil?\n public_ip = MU::Cloud::AWS::Server.associateElasticIp(instance.instance_id, classic: true, ip: @config['static_ip']['ip'])\n elsif !has_elastic_ip\n public_ip = MU::Cloud::AWS::Server.associateElasticIp(instance.instance_id, classic: true)\n end\n end\n\n\n if !@config['image_then_destroy']\n notify\n end\n\n MU.log \"EC2 instance #{node} has id #{instance.instance_id}\", MU::DEBUG\n\n @config[\"private_dns_name\"] = instance.private_dns_name\n @config[\"public_dns_name\"] = instance.public_dns_name\n @config[\"private_ip_address\"] = instance.private_ip_address\n @config[\"public_ip_address\"] = instance.public_ip_address\n\n ext_mappings = MU.structToHash(instance.block_device_mappings)\n\n # Root disk on standard CentOS AMI\n # tagVolumes(instance.instance_id, \"/dev/sda\", \"Name\", \"ROOT-\"+MU.deploy_id+\"-\"+@config[\"name\"].upcase)\n # Root disk on standard Ubuntu AMI\n # tagVolumes(instance.instance_id, \"/dev/sda1\", \"Name\", \"ROOT-\"+MU.deploy_id+\"-\"+@config[\"name\"].upcase)\n\n # Generic deploy ID tag\n # tagVolumes(instance.instance_id)\n\n # Tag volumes with all our standard tags.\n # Maybe replace tagVolumes with this? There is one more place tagVolumes is called from\n volumes = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_volumes(filters: [name: \"attachment.instance-id\", values: [instance.instance_id]])\n volumes.each { |vol|\n vol.volumes.each { |volume|\n volume.attachments.each { |attachment|\n MU::MommaCat.listStandardTags.each_pair { |key, value|\n MU::MommaCat.createTag(attachment.volume_id, key, value, region: @config['region'], credentials: @config['credentials'])\n\n if attachment.device == \"/dev/sda\" or attachment.device == \"/dev/sda1\"\n MU::MommaCat.createTag(attachment.volume_id, \"Name\", \"ROOT-#{MU.deploy_id}-#{@config[\"name\"].upcase}\", region: @config['region'], credentials: @config['credentials'])\n else\n MU::MommaCat.createTag(attachment.volume_id, \"Name\", \"#{MU.deploy_id}-#{@config[\"name\"].upcase}-#{attachment.device.upcase}\", region: @config['region'], credentials: @config['credentials'])\n end\n }\n\n if @config['optional_tags']\n MU::MommaCat.listOptionalTags.each { |key, value|\n MU::MommaCat.createTag(attachment.volume_id, key, value, region: @config['region'], credentials: @config['credentials'])\n }\n end\n\n if @config['tags']\n @config['tags'].each { |tag|\n MU::MommaCat.createTag(attachment.volume_id, tag['key'], tag['value'], region: @config['region'], credentials: @config['credentials'])\n }\n end\n }\n }\n }\n\n canonical_name = instance.public_dns_name\n canonical_name = instance.private_dns_name if !canonical_name or nat_ssh_host != nil\n @config['canonical_name'] = canonical_name\n\n if !@config['add_private_ips'].nil?\n instance.network_interfaces.each { |int|\n if int.private_ip_address == instance.private_ip_address and int.private_ip_addresses.size < (@config['add_private_ips'] + 1)\n MU.log \"Adding #{@config['add_private_ips']} extra private IP addresses to #{instance.instance_id}\"\n MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).assign_private_ip_addresses(\n network_interface_id: int.network_interface_id,\n secondary_private_ip_address_count: @config['add_private_ips'],\n allow_reassignment: false\n )\n end\n }\n notify\n end\n\n begin\n if @config['groom'].nil? or @config['groom']\n if windows?\n # kick off certificate generation early; WinRM will need it\n cert, key = @deploy.nodeSSLCerts(self)\n if @config.has_key?(\"basis\")\n @deploy.nodeSSLCerts(self, true)\n end\n if [email protected]?\n session = getWinRMSession(50, 60, reboot_on_problems: true)\n initialWinRMTasks(session)\n begin\n session.close\n rescue Exception\n # this is allowed to fail- we're probably rebooting anyway\n end\n else # for an existing Windows node: WinRM, then SSH if it fails\n begin\n session = getWinRMSession(1, 60)\n rescue Exception # yeah, yeah\n session = getSSHSession(1, 60)\n # XXX maybe loop at least once if this also fails?\n end\n end\n else\n session = getSSHSession(40, 30)\n initialSSHTasks(session)\n end\n end\n rescue BootstrapTempFail\n sleep 45\n retry\n ensure\n session.close if !session.nil? and !windows?\n end\n\n if @config[\"existing_deploys\"] && !@config[\"existing_deploys\"].empty?\n @config[\"existing_deploys\"].each { |ext_deploy|\n if ext_deploy[\"cloud_id\"]\n found = MU::MommaCat.findStray(\n @config['cloud'],\n ext_deploy[\"cloud_type\"],\n cloud_id: ext_deploy[\"cloud_id\"],\n region: @config['region'],\n dummy_ok: false\n ).first\n\n MU.log \"Couldn't find existing resource #{ext_deploy[\"cloud_id\"]}, #{ext_deploy[\"cloud_type\"]}\", MU::ERR if found.nil?\n @deploy.notify(ext_deploy[\"cloud_type\"], found.config[\"name\"], found.deploydata, mu_name: found.mu_name, triggering_node: @mu_name)\n elsif ext_deploy[\"mu_name\"] && ext_deploy[\"deploy_id\"]\n MU.log \"#{ext_deploy[\"mu_name\"]} / #{ext_deploy[\"deploy_id\"]}\"\n found = MU::MommaCat.findStray(\n @config['cloud'],\n ext_deploy[\"cloud_type\"],\n deploy_id: ext_deploy[\"deploy_id\"],\n mu_name: ext_deploy[\"mu_name\"],\n region: @config['region'],\n dummy_ok: false\n ).first\n\n MU.log \"Couldn't find existing resource #{ext_deploy[\"mu_name\"]}/#{ext_deploy[\"deploy_id\"]}, #{ext_deploy[\"cloud_type\"]}\", MU::ERR if found.nil?\n @deploy.notify(ext_deploy[\"cloud_type\"], found.config[\"name\"], found.deploydata, mu_name: ext_deploy[\"mu_name\"], triggering_node: @mu_name)\n else\n MU.log \"Trying to find existing deploy, but either the cloud_id is not valid or no mu_name and deploy_id where provided\", MU::ERR\n end\n }\n end\n\n # See if this node already exists in our config management. If it does,\n # we're done.\n if @groomer.haveBootstrapped?\n MU.log \"Node #{node} has already been bootstrapped, skipping groomer setup.\", MU::NOTICE\n if @config['groom'].nil? or @config['groom']\n @groomer.saveDeployData\n end\n MU::MommaCat.unlock(instance.instance_id+\"-orchestrate\")\n MU::MommaCat.unlock(instance.instance_id+\"-groom\")\n return true\n end\n\n begin\n @groomer.bootstrap if @config['groom'].nil? or @config['groom']\n rescue MU::Groomer::RunError\n MU::MommaCat.unlock(instance.instance_id+\"-groom\")\n MU::MommaCat.unlock(instance.instance_id+\"-orchestrate\")\n return false\n end\n\n # Make sure we got our name written everywhere applicable\n if !@named\n MU::MommaCat.nameKitten(self)\n @named = true\n end\n\n MU::MommaCat.unlock(instance.instance_id+\"-groom\")\n MU::MommaCat.unlock(instance.instance_id+\"-orchestrate\")\n return true\n end", "def convert_image(catalog_vapp_template, account_name)\n name = catalog_vapp_template['name']\n #much fudging ensues\n #arch = name.scan(/(36|24).bit/).first\n #k enuf o'that now!\n arch = \"n/a\" #Leaving out entirely as we don't get one from terremark (could parse but its a fudge)\n Image.new( {\n :id => catalog_vapp_template['href'].split('/').last,\n :name => catalog_vapp_template['name'],\n :architecture => arch,\n :owner_id => account_name,\n :description => catalog_vapp_template['name']\n })\n end", "def create_kvm(kvm_name, template, options)\n ensure_name_availability!(kvm_name)\n clone_kvm(kvm_name, template)\n device = mount_kvm_volume(kvm_name)\n address = update_kvm_ip(kvm_name)\n update_hostname(kvm_name)\n make_console_accessible(kvm_name)\n unmount_kvm_volume(kvm_name, device)\n create_kvm_instance(kvm_name, options)\n puts \"KVM Node #{kvm_name} available at: #{address}\"\nend", "def create_target_mock\n ref = self.service_name.downcase.gsub(/ /, '-')\n create_row = {\n ref: ref,\n display_name: self.service_name,\n type: 'instance',\n iaas_type: 'ec2',\n iaas_properties: {},\n project_id: self.project.id\n }\n Model.create_from_row(self.project.model_handle(:target), create_row, convert: true, ret_obj: { model_name: :target_instance })\n end", "def run_instance options = {}\n instances = InstanceCollection.new(:config => config)\n instances.create(options.merge(:image => self))\n end", "def run\n ngen_auth\n @validate = Validator.new\n stack = EcoSystem.new\n @instanceparameters = stack.yaml_reader(config[:yaml])\n stack.validate = @validate\n config[:action] = 'create'\n stack.options = config\n stack.supress_output ='1'\n stack.instanceparameters = @instanceparameters\n inst_result = stack.opt_parse\n ho_hum\n \n inst_result.each do |server|\n config[:inst] = server['server']['display_name']\n instance = {}\n # puts chef_attrs = server['server']['userdata'].at(0)['chef']\n chef_attrs = server['server']['userdata'].at(0)['chef'] if !server['server']['userdata'].at(0)['chef'].nil?\n chef_attrs.each do |attr, value|\n instance[attr] = value\n end\n chef_node_configuration(instance)\n config[:chef_node_name] = config[:inst]\n inst_details = AttrFinder.new(server)\n inst_details.options = config\n inst_details.validate = @validate\n inst_details.function = 'server' \n inst = InstanceClient.new\n inst.validate = @validate\n inst.options = config\n inst.supress_output ='1'\n inst.instanceparameters = @instanceparameters\n ssh_host = inst.list_instance_ip(inst_details.compartment, inst_details.instance).at(1)\n bootstrap_for_linux_node(ssh_host).run\n node_attributes(ssh_host, 'IaaS')\n end\n end", "def postBoot(instance_id = nil)\n if !instance_id.nil?\n @cloud_id = instance_id\n end\n\n node, _config, deploydata = describe(cloud_id: @cloud_id)\n instance = cloud_desc\n raise MuError, \"Couldn't find instance of #{@mu_name} (#{@cloud_id})\" if !instance\n return false if !MU::MommaCat.lock(@cloud_id+\"-orchestrate\", true)\n return false if !MU::MommaCat.lock(@cloud_id+\"-groom\", true)\n\n# MU::Cloud::AWS.createStandardTags(@cloud_id, region: @config['region'])\n# MU::Cloud::AWS.createTag(@cloud_id, \"Name\", node, region: @config['region'])\n#\n# if @config['optional_tags']\n# MU::MommaCat.listOptionalTags.each { |key, value|\n# MU::Cloud::AWS.createTag(@cloud_id, key, value, region: @config['region'])\n# }\n# end\n#\n# if !@config['tags'].nil?\n# @config['tags'].each { |tag|\n# MU::Cloud::AWS.createTag(@cloud_id, tag['key'], tag['value'], region: @config['region'])\n# }\n# end\n# MU.log \"Tagged #{node} (#{@cloud_id}) with MU-ID=#{MU.deploy_id}\", MU::DEBUG\n#\n # Make double sure we don't lose a cached mu_windows_name value.\n if windows? or !@config['active_directory'].nil?\n if @mu_windows_name.nil?\n @mu_windows_name = deploydata['mu_windows_name']\n end\n end\n\n# punchAdminNAT\n#\n#\n# # If we came up via AutoScale, the Alarm module won't have had our\n# # instance ID to associate us with itself. So invoke that here.\n# if !@config['basis'].nil? and @config[\"alarms\"] and !@config[\"alarms\"].empty?\n# @config[\"alarms\"].each { |alarm|\n# alarm_obj = MU::MommaCat.findStray(\n# \"AWS\",\n# \"alarms\",\n# region: @config[\"region\"],\n# deploy_id: @deploy.deploy_id,\n# name: alarm['name']\n# ).first\n# alarm[\"dimensions\"] = [{:name => \"InstanceId\", :value => @cloud_id}]\n#\n# if alarm[\"enable_notifications\"]\n# topic_arn = MU::Cloud::AWS::Notification.createTopic(alarm[\"notification_group\"], region: @config[\"region\"])\n# MU::Cloud::AWS::Notification.subscribe(arn: topic_arn, protocol: alarm[\"notification_type\"], endpoint: alarm[\"notification_endpoint\"], region: @config[\"region\"])\n# alarm[\"alarm_actions\"] = [topic_arn]\n# alarm[\"ok_actions\"] = [topic_arn]\n# end\n#\n# alarm_name = alarm_obj ? alarm_obj.cloud_id : \"#{node}-#{alarm['name']}\".upcase\n#\n# MU::Cloud::AWS::Alarm.setAlarm(\n# name: alarm_name,\n# ok_actions: alarm[\"ok_actions\"],\n# alarm_actions: alarm[\"alarm_actions\"],\n# insufficient_data_actions: alarm[\"no_data_actions\"],\n# metric_name: alarm[\"metric_name\"],\n# namespace: alarm[\"namespace\"],\n# statistic: alarm[\"statistic\"],\n# dimensions: alarm[\"dimensions\"],\n# period: alarm[\"period\"],\n# unit: alarm[\"unit\"],\n# evaluation_periods: alarm[\"evaluation_periods\"],\n# threshold: alarm[\"threshold\"],\n# comparison_operator: alarm[\"comparison_operator\"],\n# region: @config[\"region\"]\n# )\n# }\n# end\n#\n# # We have issues sometimes where our dns_records are pointing at the wrong node name and IP address.\n# # Make sure that doesn't happen. Happens with server pools only\n# if @config['dns_records'] && !@config['dns_records'].empty?\n# @config['dns_records'].each { |dnsrec|\n# if dnsrec.has_key?(\"name\")\n# if dnsrec['name'].start_with?(MU.deploy_id.downcase) && !dnsrec['name'].start_with?(node.downcase)\n# MU.log \"DNS records for #{node} seem to be wrong, deleting from current config\", MU::WARN, details: dnsrec\n# dnsrec.delete('name')\n# dnsrec.delete('target')\n# end\n# end\n# }\n# end\n\n # Unless we're planning on associating a different IP later, set up a\n # DNS entry for this thing and let it sync in the background. We'll\n # come back to it later.\n if @config['static_ip'].nil? && !@named\n MU::MommaCat.nameKitten(self)\n @named = true\n end\n\n _nat_ssh_key, _nat_ssh_user, nat_ssh_host, _canonical_ip, _ssh_user, _ssh_key_name = getSSHConfig\n if !nat_ssh_host and !MU::Cloud.resourceClass(\"Google\", \"VPC\").haveRouteToInstance?(cloud_desc, credentials: @config['credentials'])\n# XXX check if canonical_ip is in the private ranges\n# raise MuError, \"#{node} has no NAT host configured, and I have no other route to it\"\n end\n\n # See if this node already exists in our config management. If it does,\n # we're done.\n if @groomer.haveBootstrapped?\n MU.log \"Node #{node} has already been bootstrapped, skipping groomer setup.\", MU::NOTICE\n @groomer.saveDeployData\n MU::MommaCat.unlock(@cloud_id+\"-orchestrate\")\n MU::MommaCat.unlock(@cloud_id+\"-groom\")\n return true\n end\n\n @groomer.bootstrap\n\n # Make sure we got our name written everywhere applicable\n if !@named\n MU::MommaCat.nameKitten(self)\n @named = true\n end\n\n MU::MommaCat.unlock(@cloud_id+\"-groom\")\n MU::MommaCat.unlock(@cloud_id+\"-orchestrate\")\n return true\n end", "def new_auto_scaling \n AWS::AutoScaling.new(:auto_scaling_endpoint => \"autoscaling.#{AMI_REGION}.amazonaws.com\")\nend", "def set_up_server\n node = Chef::Node.new\n node.name 'nothing'\n node.automatic[:platform] = 'kitchen_metal'\n node.automatic[:platform_version] = 'kitchen_metal'\n Chef::Config.local_mode = true\n run_context = Chef::RunContext.new(node, {},\n Chef::EventDispatch::Dispatcher.new(Chef::Formatters::Doc.new(STDOUT,STDERR)))\n recipe_exec = Chef::Recipe.new('kitchen_vagrant_metal',\n 'kitchen_vagrant_metal', run_context)\n\n # We require a platform, but layout in driver is optional\n recipe_exec.instance_eval get_platform_recipe\n recipe = get_driver_recipe\n recipe_exec.instance_eval recipe if recipe\n return run_context\n end", "def createECSService\n puts \"Creating ECS service #{$CLUSTER_NAME}...\"\n puts `ecs-cli compose \\\n --project-name #{$CONTAINER_NAME} \\\n service up \\\n --region #{$REGION} \\\n --cluster #{$CLUSTER_NAME} \\\n --launch-type EC2 \\\n --target-group-arn #{$TARGET_GROUP_ARN} \\\n --container-name #{$CONTAINER_NAME} \\\n --container-port #{$CONTAINER_PORT} \\\n --role ecsServiceRole`\nend", "def post_storage(request)\n # --- Check OCCI XML from POST ---\n if request.params['occixml'] == nil\n error_msg = \"OCCI XML representation of Image\" +\n \" not present in the request\"\n error = OpenNebula::Error.new(error_msg)\n return error, 400\n end\n\n # --- Create and Add the new Image ---\n occixml = request.params['occixml']\n occixml = occixml[:tempfile].read if occixml.class == Hash\n\n image = ImageOCCI.new(\n Image.build_xml,\n @client,\n occixml,\n request.params['file'])\n\n # --- Generate the template and Allocate the new Instance ---\n template = image.to_one_template\n return template, 500 if OpenNebula.is_error?(template)\n\n rc = image.allocate(template, @config[:datastore_id]||1)\n if OpenNebula.is_error?(rc)\n return rc, CloudServer::HTTP_ERROR_CODE[rc.errno]\n end\n\n image.info\n #wait until image is ready to return\n while (image.state_str == 'LOCKED') && (image['RUNNING_VMS'] == '0') do\n sleep IMAGE_POLL_SLEEP_TIME\n image.info\n end\n\n # --- Prepare XML Response ---\n return to_occi_xml(image, :code=>201)\n end", "def create_new_asg config\n delete_launch_configs\n\n auto_scaling = new_auto_scaling\n\n #\n # 1. create the launh configuration\n #\n options = {\n :security_groups => [AMI_SECURITY_GROUP],\n :key_pair => AMI_KEY_PAIR,\n :user_data => user_data\n }\n\n launch_config = auto_scaling.launch_configurations.create(\n launch_config_name, \n config[\"ami\"],\n AMI_INSTANCE_TYPE,\n options\n )\n\n #\n # now create the asg\n #\n\n tags = [\n {:key => \"server\", :value => APP_NAME},\n {:key => \"build\", :value => VERSION},\n {:key => \"env\", :value => APP_ENV}\n ]\n\n options = {\n :load_balancers => [AMI_ELB],\n :launch_configuration => launch_config,\n :availability_zones => [AMI_AZ],\n :min_size => 1,\n :max_size => 1,\n :tags => tags\n }\n\n puts \"creating asg\"\n puts \"\\toptions => #{options}\"\n puts \"\\ttags => #{tags}\"\n auto_scaling.groups.create(launch_config_name, options)\nend", "def create_resource\n # response = ec2.run_instances(\n # node_config(max_count: 1, self.to_h)\n # ).instances.first\n\n instance_attr_accessor response\n # id = @response[:instance_id]\n begin\n ec2.wait_until(:instance_running, instance_ids: [id]) do\n logger.info \"waiting for #{ids.count} Neurons to start...\"\n end\n rescue Aws::Waiters::Errors::WaiterFailed => e\n # TODO: retry stuff\n # redo unless (count += 1 <=3 )\n end\n\n yield self if block_given?\n self\n end", "def createEc2SSHKey\n\t\t\treturn [@keypairname, @ssh_private_key, @ssh_public_key] if [email protected]?\n\t\t keyname=\"deploy-#{MU.mu_id}\"\n\t\t\tkeypair = MU.ec2(MU.myRegion).create_key_pair(key_name: keyname)\n\t\t\t@keypairname = keyname\n\t\t @ssh_private_key = keypair.key_material\n\t\t\tMU.log \"SSH Key Pair '#{keyname}' fingerprint is #{keypair.key_fingerprint}\"\n\t\t\n\t\t if !File.directory?(\"#{@myhome}/.ssh\") then\n\t\t\t\tMU.log \"Creating #{@myhome}/.ssh\", MU::DEBUG\n\t\t Dir.mkdir(\"#{@myhome}/.ssh\", 0700)\n\t\t end\n\t\t\n\t\t # Plop this private key into our local SSH key stash\n\t\t\tMU.log \"Depositing key '#{keyname}' into #{@myhome}/.ssh/#{keyname}\", MU::DEBUG\n\t\t ssh_keyfile = File.new(\"#{@myhome}/.ssh/#{keyname}\", File::CREAT|File::TRUNC|File::RDWR, 0600)\n\t\t ssh_keyfile.puts @ssh_private_key\n\t\t ssh_keyfile.close\n\n\t\t\t# Drag out the public key half of this\n\t\t\t@ssh_public_key = %x{/usr/bin/ssh-keygen -y -f #{@myhome}/.ssh/#{keyname}}\n\t\t\t@ssh_public_key.chomp!\n\n\t\t\t# Replicate this key in all regions\n\t\t\tMU::Config.listRegions.each { |region|\n\t\t\t\tnext if region == MU.myRegion\n\t\t\t\tMU.log \"Replicating #{keyname} to #{region}\", MU::DEBUG, details: @ssh_public_key\n\t\t\t\tMU.ec2(region).import_key_pair(\n\t\t\t\t\tkey_name: @keypairname,\n\t\t\t\t\tpublic_key_material: @ssh_public_key\n\t\t\t\t)\n\t\t\t}\n\n# XXX This library code would be nicer... except it can't do PKCS8.\n#\t\t\tfoo = OpenSSL::PKey::RSA.new(@ssh_private_key)\n#\t\t\tbar = foo.public_key\n\n\t\t\tsleep 3\n\t\t return [keyname, keypair.key_material, @ssh_public_key]\n\t\tend" ]
[ "0.7089384", "0.6847267", "0.6640451", "0.6573715", "0.6484274", "0.63905066", "0.63649774", "0.6353124", "0.62630993", "0.625121", "0.62494844", "0.62214583", "0.6199846", "0.6118713", "0.6085152", "0.6058307", "0.6039063", "0.6016051", "0.5984712", "0.5980602", "0.59506595", "0.59456", "0.5864854", "0.5854807", "0.57622296", "0.5737816", "0.5712299", "0.5621053", "0.5617455", "0.5580339", "0.55753475", "0.55641973", "0.5553557", "0.5547984", "0.55449766", "0.55331975", "0.5505163", "0.55005103", "0.5422686", "0.53835696", "0.53528285", "0.5349192", "0.5337183", "0.5300102", "0.528684", "0.5275684", "0.52735466", "0.5224473", "0.5224473", "0.52230114", "0.52144057", "0.51807237", "0.51600677", "0.5153663", "0.5153378", "0.5153378", "0.5144363", "0.5128438", "0.512747", "0.51209986", "0.5115049", "0.5085596", "0.50782", "0.507625", "0.50631654", "0.5057754", "0.5057086", "0.5046085", "0.5041852", "0.5025332", "0.50241196", "0.49996674", "0.4992163", "0.4990497", "0.49896428", "0.49811772", "0.49729356", "0.4955336", "0.4942877", "0.49395815", "0.493554", "0.49260154", "0.4924222", "0.4923931", "0.49185523", "0.4906455", "0.49060133", "0.49030614", "0.48991486", "0.4892993", "0.48879373", "0.4877515", "0.48747912", "0.4869662", "0.48567653", "0.48565364", "0.48544306", "0.48501474", "0.4849844", "0.4847673" ]
0.6965684
1
Delete a stemcell and the accompanying snapshots
def delete_stemcell(stemcell_id) with_thread_name("delete_stemcell(#{stemcell_id})") do stemcell = StemcellFinder.find_by_id(@ec2_client, stemcell_id) stemcell.delete end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_stemcell(stemcell_id)\n with_thread_name(\"delete_stemcell(#{stemcell_id})\") do\n stemcell = StemcellFinder.find_by_id(@ec2_resource, stemcell_id)\n stemcell.delete\n end\n end", "def delete_stemcell(stemcell_id)\n with_thread_name(\"delete_stemcell(#{stemcell_id})\") do\n stemcell = StemcellFinder.find_by_id(@ec2_resource, stemcell_id)\n stemcell.delete\n end\n end", "def delete_stemcell(stemcell_cid)\n @telemetry_manager.monitor('initialize') do\n _init_azure\n end\n with_thread_name(\"delete_stemcell(#{stemcell_cid})\") do\n @telemetry_manager.monitor('delete_stemcell', id: stemcell_cid) do\n if is_light_stemcell_cid?(stemcell_cid)\n @light_stemcell_manager.delete_stemcell(stemcell_cid)\n elsif @use_managed_disks\n @stemcell_manager2.delete_stemcell(stemcell_cid)\n else\n @stemcell_manager.delete_stemcell(stemcell_cid)\n end\n end\n end\n end", "def delete_from_disk; end", "def delete_fusion_vm_snapshot(options)\n clone_list = []\n if options['clone'].to_s.match(/\\*/) or options['clone'].to_s.match(/all/)\n clone_list = get_fusion_vm_snapshots(options)\n clone_list = clone_list.split(\"\\n\")[1..-1]\n else\n clone_list[0] = options['clone']\n end\n clone_list.each do |clone|\n fusion_vmx_file = get_fusion_vm_vmx_file(options)\n message = \"Information:\\tDeleting snapshot \"+clone+\" for #{options['vmapp']} VM \"+options['name']\n command = \"'#{options['vmrun']}' -T fusion deleteSnapshot '#{fusion_vmx_file}' '#{clone}'\"\n execute_command(options,message,command)\n end\n return\nend", "def del\n delete\n end", "def delete_snapshot(snapshot_id)\n end", "def wipe_snapshots_data; end", "def wipe_snapshots_data; end", "def destroy\n @document_part.destroy\n end", "def delete\n super do\n @strands.each do |strand|\n Cluster.redis.hdel LOOKUP, strand.id\n end\n Cluster.redis.hdel TOPICS, @id\n end\n end", "def delete\n \n end", "def destroy\n @cell_automaton.destroy\n file_path = Rails.root.join(\"public\", \"cell_automatons\", \"#{current_user.id}\")\n file_name = file_path.to_s + \"/\" + @cell_automaton.id.to_s\n File.delete(\"#{file_name}.rb\")\n \n redirect_to cell_automatons_url, notice: 'Cell automaton was successfully destroyed.'\n end", "def destroy\n super do\n graph.delete [source.to_term, nil, nil]\n parent.delete [parent, nil, source.to_term]\n end\n end", "def destroy\n # http://api.rubyonrails.org/classes/ActiveRecord/Associations/ClassMethods.html#module-ActiveRecord::Associations::ClassMethods-label-Delete+or+destroy-3F\n # @transect.transect_admin_editor.each(&:destroy)\n @transect.tree_plots.each(&:destroy)\n @transect.destroy\n respond_to do |format|\n format.html { redirect_to transects_url, notice: 'Transect was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy; delete end", "def delete\n if options.master?\n delete_master(options)\n elsif options.slave?\n delete_slave(options)\n else\n invoke :help, [:delete]\n end\n end", "def deleteEssence\n \n begin\n \n # Gets @filename and @path\n getPathAndFilename \n name = @filename \n filepath = @path\n puts \"filepath: \" + filepath \n puts \"filename: \" + name\n \n # Find the device \n device = User.find_by_username(params[:username]).devices.find_by_dev_name(params[:devicename])\n \n # Essence can't be deleted from a virtual container\n if device.dev_type == \"virtual_container\"\n render :text => \"Error: Essence can't be deleted from a virtual container'\", :status => 409\n return \n end\n \n file = nil\n if device != nil\n file = device.devfiles.find(:first, :conditions => [\"name = ? and path = ?\", name, filepath])\n if file == nil\n puts \"FILE NOT FOUND: \" + name\n render :text => \"Error. File's metadata can not be found.\", :status => 404\n return\n end\n \n if params[:blob_hash]\n blob = file.blobs.find(:first, :conditions => [\"blob_hash = ?\", params[:blob_hash]])\n else\n blob = file.blobs.find_by_follower_id(nil)\n end\n end\n \n # Checks that the file's and it's version's metadata can be found in database.\n if file == nil or blob == nil\n puts \"Blob not found for file: \" + name\n render :text => \"Error. File's metadata can not be found.\", :status => 404\n return\n end\n \n if blob.uploaded == false\n render :text => \"Error: Essence of the file was not on the server.\", :status => 409\n return\n end\n \n # Remove the fileupload entry\n fup = Fileupload.find_by_blob_id(blob.id)\n if fup != nil\n fup.destroy\n end\n \n # Update blob not to be uploaded and upload_requested to nil\n blob.update_attribute(:uploaded, false)\n blob.update_attribute(:upload_requested, nil)\n \n \n # Remove the actual essence\n deletepath = \"public/devfiles/\" + file.device_id.to_s + \"/\" + blob.blob_hash + \"_\" + file.name\n \n if File.exists?(deletepath)\n FileUtils.rm_f(deletepath)\n puts \"deleted the essence...\"\n else\n puts \"Essence not found and could not be deleted...\"\n end\n \n rescue => exp\n putsE(exp)\n render :text => \"There was an error when trying to delete the essence from the server\", :status => 409\n return\n end\n \n render :text => \"Essence of the file deleted from the server\", :status => 200\n return\n end", "def destroy\n result = nil\n obj = self.inst_strip_braces(self.object)\n if obj\n # first delete the record from viewable list\n result = Rhom::RhomDbAdapter::delete_from_table(Rhom::TABLE_NAME,\n {\"object\"=>obj})\n # now add delete operation\n result = Rhom::RhomDbAdapter::insert_into_table(Rhom::TABLE_NAME,\n {\"source_id\"=>self.get_inst_source_id,\n \"object\"=>obj,\n \"update_type\"=>'delete'})\n end\n result\n end", "def destroy\n @shelter.destroy\n\n head :no_content\n end", "def delete(params = {})\n response = client.delete \"/_snapshot/{repository}/{snapshot}\", update_params(params, action: \"snapshot.delete\", rest_api: \"snapshot.delete\")\n response.body\n end", "def destroy\n Rails.logger.debug {\"destroying gridfs file #{@id}\"}\n if persisted?\n Photo.mongo_client.database.fs.find(:_id=>BSON::ObjectId.from_string(@id)).delete_one\n end\n end", "def delete_snapshot snapshot\n subscriber.delete_snapshot snapshot: snapshot_path(snapshot)\n end", "def delete_cell_index_arrays(study)\n cluster_file_ids = study.study_files.where(file_type: 'Cluster').pluck(:id)\n cluster_ids = study.cluster_groups.pluck(:id)\n cursor = DataArray.where(\n name: 'index', array_type: 'cells', linear_data_type: 'ClusterGroup', :linear_data_id.in => cluster_ids,\n study_id: study.id, :study_file_ids.in => cluster_file_ids\n )\n cursor.delete_all if cursor.exists?\n study.cluster_groups.update_all(indexed: false)\n end", "def destroy\n self.littles.each do | little|\n little.big_id = nil;\n end\n self.positions.each do | pos |\n pos.dke_info_id = nil;\n end\n return super\n end", "def destroy\n @cell.destroy\n respond_to do |format|\n format.html { redirect_to cells_url, notice: 'Cell was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @cell.destroy\n respond_to do |format|\n format.html { redirect_to cells_url, notice: 'Cell was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n save_graph = RDF::Graph.new\n fill_save_graph save_graph\n save_graph.each do |s|\n puts s.inspect\n end\n Db.delete_data( save_graph, :graph => klass.object_graph )\n end", "def delete_tables\n delete_characters\n delete_kiosks\n delete_traps\n delete_buttons\n end", "def destroy\n @snapshot.destroy\n\n respond_to do |format|\n format.html { redirect_to(snapshots_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @expectation_stem = RiGse::ExpectationStem.find(params[:id])\n @expectation_stem.destroy\n\n respond_to do |format|\n format.html { redirect_to(expectation_stems_url) }\n format.xml { head :ok }\n end\n end", "def remove\n conf = {:path=>\"#{RAILS_ROOT}/index/#{RAILS_ENV}/rip\"}\n index = Ferret::Index::Index.new(conf)\n\n mrokhashs = params[:parts][:part].collect {|p| p[:mrokhash]}\n parts = Part.find_all_by_mrokhash(mrokhashs)\n logged_in_user.parts.delete(parts)\n parts.each do |part|\n update_user_in_field(:index => index, :part => part, :remove => true)\n remove_cache_pages(part.rip_id) if part.rip_id\n end\n head :ok\n end", "def delete_all\n Neo.db.execute_query(\"#{initial_match} OPTIONAL MATCH (n0)-[r]-() DELETE n0,r\")\n end", "def delete_ontologies_and_submissions\n LinkedData::SampleData::Ontology.delete_ontologies_and_submissions\n end", "def delete!\n clear!\n delete\n end", "def delete_checksums\n # puts \"Del: #{@basename}\"\n @pkgdb.query(\"delete from checksums where basename = '#{@basename}'\")\n end", "def delete\n \n end", "def delete\n CMark.node_unlink(@pointer)\n end", "def do_remove_from_termination (term)\r\n term.aln_path_id = nil\r\n term.aln_path = nil\r\n term.save\r\n end", "def destroy\n delete ''\n end", "def delete(atom)\n form_data = { atom_id: atom.id }\n @hive_party.post \"/atoms/#{atom.id}/deletions\", form_data\n end", "def destroy\n @action_runner.run(:box_remove, { :box_name => @name, :box_directory => @directory })\n end", "def destroy\n @norma.destroy\n end", "def destroy\n @cell_set.destroy\n respond_to do |format|\n format.html { redirect_to cell_sets_url, notice: 'Cell set was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n PhotoInSlayder.where(slyder: @slayder.id).each do |trash|\n puts \"we delete \" + trash.to_s\n trash.destroy\n trash.save\n puts \"trash need be destroyed\"\n puts trash.to_s\n end\n @slayder.destroy\n respond_to do |format|\n format.html { redirect_to action: \"index\", notice: 'Slayder was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def snap_delete(volume,snapshot)\n output = @filer.invoke(\"snapshot-delete\",\"volume\",volume,\"snapshot\",snapshot)\n if (output.results_status() == \"failed\")\n raise \"Error #{output.results_errno} deleting snapshot #{snapshot} on #{volume}: #{output.results_reason()}\\n\"\n end\n end", "def __remove_cluster_data\n FileUtils.rm_rf arguments[:path_data]\n end", "def destroy\n @dna.destroy\n\n head :no_content\n end", "def destroy\n @snap = Snap.find(params[:id])\n @snap.destroy\n\n respond_to do |format|\n format.html { redirect_to(snaps_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n if @summary.destroy\n \n end\n end", "def trash\n self.attributes = {\n :page_id => nil,\n :container_id => nil,\n :cell_id => nil,\n :folded => true,\n :public => false\n }\n self.remove_from_list\n end", "def destroy\n \n end", "def destroy\n \n end", "def delete! snaps\n if snaps.count > 0\n VfSnapshots.verbose \"\\n#{snaps.count} to delete, here we go.\"\n else\n VfSnapshots.verbose \"\\nNothing to delete.\"\n end\n begin\n snaps.each_with_index do |snapshot,idx|\n VfSnapshots.verbose \"[#{idx+1} of #{snaps.length}] Deleting #{account.name} #{snapshot.description}\"\n snapshot.delete\n # puts \"Sleeping...\"\n # sleep 1\n # puts \"Woke!\"\n end\n rescue Aws::EC2::Errors::ResourceLimitExceeded\n VfSnapshots.verbose \"\\nThrottled!\"\n exit\n end\n end", "def delete_snapshot(name)\n Fission::Action::Snapshot::Deleter.new(self).delete_snapshot(name)\n end", "def remove_data!\n connector.remove_torrent @ids, true\n @deleted = true\n end", "def delete_from_dor\n CocinaObjectStore.destroy(druid)\n AdministrativeTags.destroy_all(identifier: druid)\n ObjectVersion.where(druid:).destroy_all\n Event.where(druid:).destroy_all\n end", "def destroy\n @storyboard_cell = StoryboardCell.find(params[:id])\n @storyboard_cell.destroy\n\n respond_to do |format|\n format.html { redirect_to storyboard_cells_url }\n format.json { head :ok }\n end\n end", "def destroy\n @cellar = Cellar.find(params[:id])\n @cellar.destroy\n\n respond_to do |format|\n format.html { redirect_to(cellars_url) }\n format.xml { head :ok }\n end\n end", "def purge\n self.files.each do |f|\n f.destroy\n end\n self.commits.each do |c|\n c.destroy\n end\n end", "def destroy\n @sneaker.destroy\n Sneaker.reindex\n respond_to do |format|\n format.html { redirect_to sneaker_url, notice: 'Pin was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def delete\n @text.slice!(@cursor, 1)\n end", "def destroy\n @term.destroy!\n\n head :ok\n end", "def run_on_deletion(paths)\n end", "def run_on_deletion(paths)\n end", "def delete_snapshot(vm, name)\n snapshot = enumerate_snapshots(vm).find { |s| s.name == name }\n\n # No snapshot matching \"name\"\n return nil if snapshot.nil?\n\n task = snapshot.snapshot.RemoveSnapshot_Task(removeChildren: false)\n\n if block_given?\n task.wait_for_progress do |progress|\n yield progress unless progress.nil?\n end\n else\n task.wait_for_completion\n end\n end", "def destroy\n @leaf_spot_imm_search.destroy\n respond_to do |format|\n format.html { redirect_to leaf_spot_imm_searches_url, notice: 'Leaf spot imm search was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def eject!\n #removes the first node\n node = @store.first\n @store.remove(node.key)\n\n #get rid of the map's reference to the deleted node\n @map.delete(node.key)\n end", "def destroy\n packet_id = @cell.packet_id\n @cell.destroy\n respond_to do |format|\n format.html { redirect_to cells_url }\n format.json { head :no_content }\n end\n PacketManager.instance.update_packet( Packet.find(packet_id) )\n end", "def destroy_instance_docs!\n doc_list = instance_design_doc.view(:all)\n destroy_count = doc_list['total_rows']\n return destroy_count if destroy_count < 1\n \n docs = instance_database.get_bulk(doc_list['rows'].map {|rh| rh['id']})\n docs['rows'].each {|rh| instance_database.delete_doc(rh['doc'], false)}\n instance_database.bulk_delete\n\n destroy_count\n end", "def destroy_instance_docs!\n doc_list = instance_design_doc.view(:all)\n destroy_count = doc_list['total_rows']\n return destroy_count if destroy_count < 1\n \n docs = instance_database.get_bulk(doc_list['rows'].map {|rh| rh['id']})\n docs['rows'].each {|rh| instance_database.delete_doc(rh['doc'], false)}\n instance_database.bulk_delete\n\n destroy_count\n end", "def destroy\n\t\t@cell = Cell.find(params[:id])\n\t\[email protected]\n\n\t\trespond_to do |format|\n\t\t\tformat.html { redirect_to cells_url }\n\t\t\tformat.json { head :no_content }\n\t\tend\n\tend", "def snap_delete(volume,snapshot)\n output = @filer.invoke(\"snapshot-create\",\"volume\",volume,\"snapshot\",snapshot)\n if (output.results_status() == \"failed\")\n raise \"Error #{output.results_errno} creating snapshot #{snapshot} on #{volume}: #{output.results_reason()}\\n\"\n end\n end", "def remove!\n self.results.each{ |r| r.remove! }\n self.metadata.remove!\n end", "def destroy\n @late_mark_master.destroy\n @late_mark_masters = LateMarkMaster.all\n end", "def destroy!; end", "def delete(path)\n self.stage { |idx| idx.delete(path) }\n end", "def delete(command)\n pp @client.files.delete(clean_up(command[1]))\n end", "def delete_children(id, flowcell)\n @session.flowcell_lane.dataset.filter(:flowcell_id => id).delete\n end", "def delete_snapshot(name)\n result = get_snapshot(name)\n response = @client.rest_delete(result['uri'], { 'If-Match' => result['eTag'] }, @api_version)\n @client.response_handler(response)\n true\n end", "def delete(*arguments, &block)\n objects = q(*arguments, &block).map { |o| o.removed_from_bibliography(self) }\n @data = @data - objects\n objects.length == 1 ? objects[0] : objects\n end", "def delete_snat\n super\n end", "def teardown\n @executor.start_section \"Vundle\"\n @executor.execute \"Removing Vim Folder\", \"rm -r #{vim_folder}\" do |output|\n raise TaskSkipped.new(\"Folder not found\") if output.include? \"No such file or directory\"\n end\n end", "def destroy\n @snap.update(viewed: true)\n end", "def rm_part\n FileUtils.rm part if File.exist?(part)\n end", "def destroy\n saber_segment = find_saber_segment\n saber_segment.destroy\n head :no_content\n end", "def test_f11_Delete_missing_view\n W('f11a');\n\n Metakit::Storage.open(\"f11a\", 1) {|s1|\n v1 = s1.get_as(\"a\");\n v1.set_size(10);\n\n s1.commit();\n }\n # D(f11a);\n R('f11a');\n end", "def delete\n Song.transaction do\n self.status = :deleted\n self.save!\n\n # sets deleted = 0\n self.mixes.destroy_all\n\n # actually deletes\n self.mlabs.destroy_all\n self.featurings.destroy_all\n end\n end", "def destroy\n @term.destroy\n head :no_content\n end", "def delete(sector)\n @sectors.delete(sector) \n end", "def destroy\n # workaround\n @manifestation.identifiers.destroy_all\n @manifestation.creators.destroy_all\n @manifestation.contributors.destroy_all\n @manifestation.publishers.destroy_all\n @manifestation.bookmarks.destroy_all if defined?(EnjuBookmark)\n @manifestation.reload\n @manifestation.destroy\n\n respond_to do |format|\n format.html { redirect_to manifestations_url, notice: t('controller.successfully_deleted', model: t('activerecord.models.manifestation')) }\n format.json { head :no_content }\n end\n end", "def destroy\n @medium_map_cell = MediumMapCell.find(params[:id])\n @medium_map_cell.destroy\n\n respond_to do |format|\n format.html { redirect_to medium_map_cells_url }\n format.json { head :no_content }\n end\n end", "def destroy\n ValidNetwork.find_by_guid(segment_config_network.guid).destroy if segment_config_network &&ValidNetwork.find_by_guid(segment_config_network.guid)\n meas_locations.each {|mloc| MeasLocation.find_by_guid(mloc.guid).destroy if mloc && MeasLocation.find_by_guid(mloc.guid)}\n super\n end", "def destroy\n @thesis_supervision.destroy\n respond_to do |format|\n format.html { redirect_to thesis_supervisions_url }\n format.json { head :no_content }\n end\n end", "def delete()\n @ole.Delete()\n end", "def delete()\n @ole.Delete()\n end", "def delete()\n @ole.Delete()\n end", "def delete()\n @ole.Delete()\n end", "def nuke\n self.open_graph_event.destroy unless self.open_graph_event.nil?\n self.teamsheet_entries.destroy_all\n self.messages.destroy_all\n self.activity_items.destroy_all\n self.result.destroy unless self.result.nil?\n self.destroy\n end", "def delete(s)\n \n key = @sections.keys.grep(/#{s.downcase}/i).first\n old_value = @sections[key].flatten.join\n heading = last_heading(key)\n old_section = heading + old_value\n \n @s.sub!(old_section, '') \n load_sections(@s) \n save()\n \n :deleted\n end" ]
[ "0.65500957", "0.65500957", "0.6464295", "0.61432683", "0.59583855", "0.5821966", "0.58048797", "0.5801757", "0.5801757", "0.5780022", "0.5767575", "0.57407224", "0.57278293", "0.5714026", "0.5684943", "0.5684763", "0.5663709", "0.5648932", "0.5641919", "0.56105095", "0.560388", "0.55793345", "0.5577721", "0.5576162", "0.55721635", "0.5566122", "0.5566122", "0.55655444", "0.55623263", "0.5559423", "0.5556944", "0.5539776", "0.5539143", "0.5520377", "0.5514484", "0.551169", "0.55036414", "0.5502325", "0.5501949", "0.55019045", "0.54984885", "0.54946345", "0.5488978", "0.54797405", "0.5475413", "0.5473276", "0.5469314", "0.546046", "0.54583794", "0.5456794", "0.5455327", "0.5450603", "0.5450603", "0.54498297", "0.54489404", "0.54484016", "0.54419345", "0.5437466", "0.5434536", "0.5428841", "0.5427051", "0.5423176", "0.5418107", "0.5415113", "0.5415113", "0.5412624", "0.54007936", "0.5393827", "0.53896683", "0.5389308", "0.5389308", "0.53860426", "0.5384378", "0.5380734", "0.5377866", "0.5375149", "0.5372587", "0.537004", "0.53690875", "0.5367392", "0.53663427", "0.53658307", "0.535784", "0.5356917", "0.53548276", "0.53543556", "0.535193", "0.5351486", "0.53502756", "0.53493285", "0.5344893", "0.53447384", "0.53349835", "0.53345585", "0.53332055", "0.53332055", "0.53332055", "0.53332055", "0.532714", "0.53242" ]
0.6589975
0
Map a set of cloud agnostic VM properties (cpu, ram, ephemeral_disk_size) to a set of AWS specific cloud_properties
def calculate_vm_cloud_properties(vm_properties) required_keys = ['cpu', 'ram', 'ephemeral_disk_size'] missing_keys = required_keys.reject { |key| vm_properties[key] } unless missing_keys.empty? missing_keys.map! { |k| "'#{k}'" } raise "Missing VM cloud properties: #{missing_keys.join(', ')}" end instance_type = @instance_type_mapper.map(vm_properties) { 'instance_type' => instance_type, 'ephemeral_disk' => { 'size' => vm_properties['ephemeral_disk_size'], } } end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def calculate_vm_cloud_properties(vm_properties)\n required_keys = ['cpu', 'ram', 'ephemeral_disk_size']\n missing_keys = required_keys.reject { |key| vm_properties[key] }\n unless missing_keys.empty?\n missing_keys.map! { |k| \"'#{k}'\" }\n raise \"Missing VM cloud properties: #{missing_keys.join(', ')}\"\n end\n\n instance_type = @instance_type_mapper.map(vm_properties)\n {\n 'instance_type' => instance_type,\n 'ephemeral_disk' => {\n 'size' => vm_properties['ephemeral_disk_size']\n }\n }\n end", "def calculate_vm_cloud_properties(requirements)\n required_keys = %w[cpu ram ephemeral_disk_size]\n missing_keys = required_keys.reject { |key| requirements[key] }\n unless missing_keys.empty?\n missing_keys.map! { |k| \"'#{k}'\" }\n raise \"Missing VM cloud properties: #{missing_keys.join(', ')}\"\n end\n\n @instance_type_mapper.map(\n requirements: requirements,\n flavors: compute.flavors,\n boot_from_volume: @boot_from_volume,\n )\n end", "def calculate_vm_cloud_properties(desired_instance_size)\n @telemetry_manager.monitor('initialize') do\n _init_azure\n end\n with_thread_name(\"calculate_vm_cloud_properties(#{desired_instance_size})\") do\n @telemetry_manager.monitor('calculate_vm_cloud_properties') do\n @logger.info(\"calculate_vm_cloud_properties(#{desired_instance_size})\")\n location = _azure_config.location\n cloud_error(\"Missing the property 'location' in the global configuration\") if location.nil?\n\n required_keys = %w[cpu ram ephemeral_disk_size]\n missing_keys = required_keys.reject { |key| desired_instance_size[key] }\n unless missing_keys.empty?\n missing_keys.map! { |k| \"'#{k}'\" }\n raise \"Missing VM cloud properties: #{missing_keys.join(', ')}\"\n end\n\n available_vm_sizes = @azure_client.list_available_virtual_machine_sizes_by_location(location)\n instance_types = @instance_type_mapper.map(desired_instance_size, available_vm_sizes)\n {\n 'instance_types' => instance_types,\n 'ephemeral_disk' => {\n 'size' => (desired_instance_size['ephemeral_disk_size'] / 1024.0).ceil * 1024\n }\n }\n end\n end\n end", "def cloud_properties_for_server_flavor(server_flavor)\n if aws?\n { \"instance_type\" => server_flavor }\n elsif openstack?\n { \"instance_type\" => server_flavor }\n else\n raise 'Please implement #{self.class}#cloud_properties_for_server_flavor'\n end\n end", "def logical_properties(disks)\n properties = Mash.new\n disks.each do |disk|\n property = Mash.new\n drive = disk[\"deviceid\"]\n property[:kb_size] = disk[\"size\"].to_i / 1000\n property[:kb_available] = disk[\"freespace\"].to_i / 1000\n property[:kb_used] = property[:kb_size] - property[:kb_available]\n property[:percent_used] = (property[:kb_size] == 0 ? 0 : (property[:kb_used] * 100 / property[:kb_size]))\n property[:mount] = disk[\"name\"]\n property[:fs_type] = disk[\"filesystem\"].to_s.downcase\n property[:volume_name] = disk[\"volumename\"].to_s.downcase\n properties[drive] = property\n end\n properties\n end", "def customize_cloud_config(cloud_init_yaml, vm_i)\n case vm_i\n when 1 then cloud_init_yaml['coreos']['fleet']['metadata'] = 'role=head'\n when 2 then cloud_init_yaml['coreos']['fleet']['metadata'] = 'role=proxy'\n when 3 then cloud_init_yaml['coreos']['fleet']['metadata'] = 'role=web'\n when 4 then cloud_init_yaml['coreos']['fleet']['metadata'] = 'role=web'\n end\nend", "def compute_attributes\n\t\t\t\t\tcp_attributes = self.vm_attrs\t\t\t\n\t\t\t\t\tunless (vm_attributes || {}).empty?\n\t\t\t\t\t\tcp_attributes[\"cpus\"] = vm_attributes[\"cpus\"]\n\t\t\t\t\t\tcp_attributes[\"memory_mb\"] = vm_attributes[\"memory_mb\"].to_i\n\t\t\t\t\t\tvm_volumes_attributes = cp_attributes[\"volumes_attributes\"]\n\t\t\t\t\t\tvm_volumes_attributes.each{|k, v| k != \"new_volumes\" ? v[\"size_gb\"] = vm_attributes[\"size_gb\"] : \"\"}\n\t\t\t\t\t\tcp_attributes[\"volumes_attributes\"] = vm_volumes_attributes\n\t\t\t\t\tend\n\t\t\t\t\treturn {\"compute_attributes\" => cp_attributes.merge({\"start\" => \"1\"})}.merge!(self.build_interface)\n\t\t\t\tend", "def to_configure_vapp_hash\n {\n :name => name,\n :cpus => cpus,\n :memory => memory,\n :disks => disks.map {|d| { :number => d.address.to_s, :size => d.vcloud_size, :resource => d.vcloud_size.to_s } }\n }\n end", "def to_configure_vapp_hash\n {\n :name => name,\n :cpus => cpus,\n :memory => memory,\n :disks => disks.map {|d| { :number => d.address.to_s, :size => d.vcloud_size, :resource => d.vcloud_size.to_s } }\n }\n end", "def set_default_properties\n @properties = {\n :InstanceType => \"t2.micro\",\n :ImageId => \"ami-d05e75b8\"\n }\n end", "def get_ec2_values \n cloud[:public_ip][0] = ec2['public_ipv4']\n cloud[:private_ip][0] = ec2['local_ipv4']\n cloud[:provider] = \"ec2\"\nend", "def get_ec2_values\n cloud[:public_ips] << ec2[\"public_ipv4\"]\n cloud[:private_ips] << ec2[\"local_ipv4\"]\n cloud[:public_ipv4] = ec2[\"public_ipv4\"]\n cloud[:public_hostname] = ec2[\"public_hostname\"]\n cloud[:local_ipv4] = ec2[\"local_ipv4\"]\n cloud[:local_hostname] = ec2[\"local_hostname\"]\n cloud[:provider] = \"ec2\"\n end", "def create_objects\n cloud Mash.new\n cloud[:public_ips] = Array.new\n cloud[:private_ips] = Array.new\n end", "def get_gce_values\n cloud[:public_ipv4] = []\n cloud[:local_ipv4] = []\n\n public_ips = gce[\"instance\"][\"networkInterfaces\"].collect do |interface|\n if interface.has_key?(\"accessConfigs\")\n interface[\"accessConfigs\"].collect { |ac| ac[\"externalIp\"] }\n end\n end.flatten.compact\n\n private_ips = gce[\"instance\"][\"networkInterfaces\"].collect do |interface|\n interface[\"ip\"]\n end.compact\n\n cloud[:public_ips] += public_ips\n cloud[:private_ips] += private_ips\n cloud[:public_ipv4] += public_ips\n cloud[:public_hostname] = nil\n cloud[:local_ipv4] += private_ips\n cloud[:local_hostname] = gce[\"instance\"][\"hostname\"]\n cloud[:provider] = \"gce\"\n end", "def attributes_mapping\n common = {\n :hourly_billing_flag => :hourlyBillingFlag,\n :os_code => :operatingSystemReferenceCode,\n :vlan => :primaryNetworkComponent,\n :private_vlan => :primaryBackendNetworkComponent,\n :key_pairs => :sshKeys,\n :private_network_only => :privateNetworkOnlyFlag,\n :user_data => :userData,\n :provision_script => :postInstallScriptUri,\n :network_components => :networkComponents,\n }\n\n conditional = if bare_metal?\n {\n :cpu => :processorCoreAmount,\n :ram => :memoryCapacity,\n :disk => :hardDrives,\n :bare_metal => :bareMetalInstanceFlag\n }\n else\n {\n :cpu => :startCpus,\n :ram => :maxMemory,\n :disk => :blockDevices,\n :image_id => :blockDeviceTemplateGroup,\n :ephemeral_storage => :localDiskFlag,\n }\n end\n common.merge(conditional)\n end", "def set_properties\n @cli.system_call \"gcloud config set compute/zone #{self.compute_zone}\"\n @cli.system_call \"gcloud config set container/cluster #{self.container_cluster}\"\n @cli.system_call \"gcloud config set project #{self.project_id}\"\n end", "def get_ec2_values\n @cloud_attr_obj.add_ipv4_addr(ec2[\"public_ipv4\"], :public)\n @cloud_attr_obj.add_ipv4_addr(ec2[\"local_ipv4\"], :private)\n @cloud_attr_obj.public_hostname = ec2[\"public_hostname\"]\n @cloud_attr_obj.local_hostname = ec2[\"local_hostname\"]\n @cloud_attr_obj.provider = \"ec2\"\n end", "def create_hashes\n cloud Mash.new\n cloud[:public_ip] = Hash.new\n cloud[:private_ip] = Hash.new\nend", "def set_resources(vm, options)\n vm.provider :virtualbox do |v|\n v.cpus = options[:cpu]\n v.memory = options[:ram]\n end\n\n # VMware Fusion\n vm.provider :vmware_fusion do |v|\n v.vmx[\"numvcpus\"] = options[:cpu]\n v.vmx[\"memsize\"] = options[:ram]\n end\nend", "def get_azure_values\n cloud[:vm_name] = azure[\"vm_name\"]\n cloud[:public_ips] << azure[\"public_ip\"]\n cloud[:public_ipv4] = azure[\"public_ip\"]\n cloud[:public_fqdn] = azure[\"public_fqdn\"]\n cloud[:public_hostname] = azure[\"public_fqdn\"]\n cloud[:public_ssh_port] = azure[\"public_ssh_port\"] if azure[\"public_ssh_port\"]\n cloud[:public_winrm_port] = azure[\"public_winrm_port\"] if azure[\"public_winrm_port\"]\n cloud[:provider] = \"azure\"\n end", "def get_azure_values\n azure[\"metadata\"][\"network\"][\"public_ipv4\"].each { |ipaddr| @cloud_attr_obj.add_ipv4_addr(ipaddr, :public) }\n azure[\"metadata\"][\"network\"][\"public_ipv6\"].each { |ipaddr| @cloud_attr_obj.add_ipv6_addr(ipaddr, :public) }\n azure[\"metadata\"][\"network\"][\"local_ipv4\"].each { |ipaddr| @cloud_attr_obj.add_ipv4_addr(ipaddr, :private) }\n azure[\"metadata\"][\"network\"][\"local_ipv6\"].each { |ipaddr| @cloud_attr_obj.add_ipv6_addr(ipaddr, :private) }\n @cloud_attr_obj.public_hostname = azure[\"public_fqdn\"]\n @cloud_attr_obj.provider = \"azure\"\n end", "def set_resources(vm, options)\n vm.provider :virtualbox do |v|\n v.cpus = options[:cpu]\n v.memory = options[:ram]\n end\nend", "def used_ips\n existing_kvms.map do |name|\n kvm_ip(name)\n end\nend", "def get_openstack_values\n @cloud_attr_obj.add_ipv4_addr(openstack[\"public_ipv4\"], :public)\n @cloud_attr_obj.add_ipv4_addr(openstack[\"local_ipv4\"], :private)\n @cloud_attr_obj.public_hostname = openstack[\"public_hostname\"]\n @cloud_attr_obj.local_hostname = openstack[\"local_hostname\"]\n @cloud_attr_obj.provider = openstack[\"provider\"]\n end", "def get_rackspace_values\n cloud[:public_ip][0] = rackspace['public_ip']\n cloud[:private_ip][0] = rackspace['private_ip']\n cloud[:provider] = \"rackspace\"\nend", "def give_vm_resources(vm, options = {})\n # scale VM memory/CPUs based on the host's resources\n unless RUBY_PLATFORM.downcase.include?(\"mswin\")\n vm.provider :virtualbox do |virtualbox|\n if options[:memory_scale]\n host_memory_bytes = `\n if command -v free > /dev/null 2>&1\n then\n free -b | awk '/^Mem/ {print $2}'\n else\n sysctl hw.memsize | awk '{print $2}'\n fi\n `.to_i\n host_memory_mb = host_memory_bytes / 1024 / 1024\n vm_memory_mb = (host_memory_mb * options[:memory_scale]).floor\n if vm_memory_mb > 0\n virtualbox.customize [\"modifyvm\", :id, \"--memory\", vm_memory_mb.to_s]\n end\n end\n\n if options[:cpu_scale]\n host_cpus = `\n if [ -f /proc/cpuinfo ]\n then\n awk '/^processor/ {++n} END {print n}' /proc/cpuinfo\n else\n sysctl hw.logicalcpu | awk '{print $2}'\n fi\n `.to_i\n vm_cpus = (host_cpus * options[:cpu_scale]).floor\n if vm_cpus > 1\n # I/O APIC has to be enabled for the VM to use more than one cpu\n virtualbox.customize [\"modifyvm\", :id, \"--ioapic\", \"on\"]\n virtualbox.customize [\"modifyvm\", :id, \"--cpus\", vm_cpus.to_s]\n end\n end\n end\n end\nend", "def get_rackspace_values\n cloud[:public_ips] << rackspace[\"public_ipv4\"] if rackspace[\"public_ipv4\"]\n cloud[:private_ips] << rackspace[\"local_ipv4\"] if rackspace[\"local_ipv4\"]\n cloud[:public_ipv4] = rackspace[\"public_ipv4\"]\n cloud[:public_ipv6] = rackspace[\"public_ipv6\"]\n cloud[:public_hostname] = rackspace[\"public_hostname\"]\n cloud[:local_ipv4] = rackspace[\"local_ipv4\"]\n cloud[:local_ipv6] = rackspace[\"local_ipv6\"]\n cloud[:local_hostname] = rackspace[\"local_hostname\"]\n cloud[:provider] = \"rackspace\"\n end", "def property_map\n map = {}\n\n current_resource.class.state_properties.each do |property|\n name = property.options[:name]\n\n map[name] = current_resource.send(name)\n end\n\n map[id_property] = current_resource.send(id_property)\n\n map\n end", "def small(config)\n config.vm.provider \"virtualbox\" do |v|\n v.memory = 512 \n v.cpus = 1\n end\nend", "def to_aws(include_min_max_desired)\n {\n auto_scaling_group_name: @name,\n min_size: if include_min_max_desired then @min end,\n max_size: if include_min_max_desired then @max end,\n desired_capacity: if include_min_max_desired then @desired end,\n default_cooldown: @cooldown,\n health_check_type: @check_type,\n health_check_grace_period: @check_grace,\n vpc_zone_identifier: if [email protected]? then subnets_to_aws.map(&:subnet_id).join(\",\") end,\n termination_policies: @termination,\n launch_configuration_name: @launch\n }\n end", "def new_vm_memory_specs(vm, memory)\n # Calculate the adjusted virtual and guaranteed memory:\n virtual = calculate_adjusted_virtual_memory(vm, memory)\n guaranteed = calculate_adjusted_guaranteed_memory(vm, memory)\n\n # The required memory cannot exceed the max configured memory of the VM. Therefore, we'll increase the max\n # memory up to 1TB or to the required limit, to allow a successful update for the VM.\n # Once 'max' memory attribute will be introduced, this code should be replaced with the specified max memory.\n supports_max = ext_management_system.version_at_least?('4.1')\n max = calculate_max_memory(vm, memory) if supports_max\n\n {\n :memory => virtual,\n :memory_policy => {\n :guaranteed => guaranteed,\n :max => (max if supports_max)\n }.compact\n }\n end", "def get_digital_ocean_values\n public_ipv4 = digital_ocean[\"networks\"][\"v4\"].select { |address| address[\"type\"] == \"public\" }\n private_ipv4 = digital_ocean[\"networks\"][\"v4\"].select { |address| address[\"type\"] == \"private\" }\n public_ipv6 = digital_ocean[\"networks\"][\"v6\"].select { |address| address[\"type\"] == \"public\" }\n private_ipv6 = digital_ocean[\"networks\"][\"v6\"].select { |address| address[\"type\"] == \"private\" }\n cloud[:public_ips].concat public_ipv4 + public_ipv6\n cloud[:private_ips].concat private_ipv4 + private_ipv6\n cloud[:public_ipv4] = public_ipv4.first\n cloud[:public_ipv6] = public_ipv6.first\n cloud[:local_ipv4] = private_ipv4.first\n cloud[:local_ipv6] = private_ipv6.first\n cloud[:public_hostname] = digital_ocean[\"name\"]\n cloud[:provider] = \"digital_ocean\"\n end", "def get_rackspace_values\n @cloud_attr_obj.add_ipv4_addr(rackspace[\"public_ipv4\"], :public)\n @cloud_attr_obj.add_ipv4_addr(rackspace[\"local_ipv4\"], :private)\n @cloud_attr_obj.add_ipv6_addr(rackspace[\"public_ipv6\"], :public)\n @cloud_attr_obj.add_ipv6_addr(rackspace[\"local_ipv6\"], :private)\n @cloud_attr_obj.public_hostname = rackspace[\"public_hostname\"]\n @cloud_attr_obj.local_hostname = rackspace[\"local_hostname\"]\n @cloud_attr_obj.provider = \"rackspace\"\n end", "def get_openstack_values\n cloud[:public_ips] << openstack[\"public_ipv4\"]\n cloud[:private_ips] << openstack[\"local_ipv4\"]\n cloud[:public_ipv4] = openstack[\"public_ipv4\"]\n cloud[:public_hostname] = openstack[\"public_hostname\"]\n cloud[:local_ipv4] = openstack[\"local_ipv4\"]\n cloud[:local_hostname] = openstack[\"local_hostname\"]\n cloud[:provider] = openstack[\"provider\"]\n end", "def get_digital_ocean_values\n @cloud_attr_obj.add_ipv4_addr(digital_ocean[\"interfaces\"][\"public\"][0][\"ipv4\"][\"ip_address\"], :public) rescue NoMethodError\n @cloud_attr_obj.add_ipv4_addr(digital_ocean[\"interfaces\"][\"private\"][0][\"ipv4\"][\"ip_address\"], :private) rescue NoMethodError\n @cloud_attr_obj.add_ipv6_addr(digital_ocean[\"interfaces\"][\"public\"][0][\"ipv6\"][\"ip_address\"], :public) rescue NoMethodError\n @cloud_attr_obj.add_ipv6_addr(digital_ocean[\"interfaces\"][\"private\"][0][\"ipv6\"][\"ip_address\"], :private) rescue NoMethodError\n @cloud_attr_obj.provider = \"digital_ocean\"\n end", "def set_aws_connections\n\n @rs_to_aws_cloud_map = {\n 1 => AWS::EC2.new(region: 'us-east-1'),\n 3 => AWS::EC2.new(region: 'us-west-1'),\n 6 => AWS::EC2.new(region: 'us-west-2'),\n 4 => AWS::EC2.new(region: 'ap-southeast-1'),\n 8 => AWS::EC2.new(region: 'ap-southeast-2'),\n 5 => AWS::EC2.new(region: 'ap-northeast-1'),\n 7 => AWS::EC2.new(region: 'sa-east-1'),\n 2 => AWS::EC2.new(region: 'eu-west-1')\n }\nend", "def set_aws_connections\n\n @rs_to_aws_cloud_map = {\n 1 => AWS::EC2.new(region: 'us-east-1'),\n 3 => AWS::EC2.new(region: 'us-west-1'),\n 6 => AWS::EC2.new(region: 'us-west-2'),\n 4 => AWS::EC2.new(region: 'ap-southeast-1'),\n 8 => AWS::EC2.new(region: 'ap-southeast-2'),\n 5 => AWS::EC2.new(region: 'ap-northeast-1'),\n 7 => AWS::EC2.new(region: 'sa-east-1'),\n 2 => AWS::EC2.new(region: 'eu-west-1')\n }\nend", "def couchbase_ami_mapping\n {\n 'us-west-2': { paravirtual: 'ami-c398c6f3' },\n 'us-west-1': { paravirtual: 'ami-1a554c5f' },\n 'us-east-1': { paravirtual: 'ami-403b4328' },\n 'sa-east-1': { paravirtual: 'ami-59229f44' },\n 'eu-west-1': { paravirtual: 'ami-8129aaf6' },\n 'ap-southeast-1': { paravirtual: 'ami-88745fda' },\n 'ap-northeast-1': { paravirtual: 'ami-6a7b676b' }\n }.with_indifferent_access\n end", "def fetch_mapping(storage, arch)\n versions = fetch_versions\n versions.select { |r| r.root_storage == storage && r.arch == arch }\n .group_by(&:region)\n .map { |k, v| [k, v.map { |i| [i.virtualization, i.ami] }.to_h] }.to_h\n .with_indifferent_access\n end", "def build_flavor_maps\n self.class.const_set(:FLAVOR_LIST, [\n {id: 1, mem: 256, hdd: 10},\n {id: 2, mem: 512, hdd: 20},\n {id: 3, mem: 1024, hdd: 40},\n {id: 4, mem: 2048, hdd: 80},\n {id: 5, mem: 4096, hdd: 160},\n {id: 6, mem: 8192, hdd: 320},\n {id: 7, mem: 15872, hdd: 620},\n {id: 8, mem: 30720, hdd: 1200}\n ].reduce({}) {|list, flavor| list[flavor[:id]] = flavor; list })\n end", "def all\n data = []\n if @ec2_main.settings.openstack \n conn = @ec2_main.environment.connection\n if conn != nil\n begin \n x = conn.flavors.all\n x.each do |y|\n vcpu = nil\n begin \n vcpu = y.vcpus\n rescue\n vcpu = nil \n end\n if vcpu != nil \n data.push(\"#{y.id} (#{y.name} Mem: #{y.ram}MB Disk: #{y.disk}GB VCPU: #{y.vcpus}VCPUs)\")\n else\n data.push(\"#{y.id} (#{y.name} Mem: #{y.ram}MB Disk: #{y.disk}GB)\") \n end\n end\n rescue\n puts \"ERROR: getting all flavors #{$!}\"\n end\n else \n raise \"Connection Error\" \n end \n elsif @ec2_main.settings.google \n conn = @ec2_main.environment.connection\n if conn != nil\n begin \n response = conn.list_machine_types($google_zone)\n\t\t\t if response.status == 200\n\t x = response.body['items']\n\t x.each do |r|\n\t\t\t\t data.push(\"#{r['name']} ( Mem: #{r['memoryMb']}MB Disks: #{r['maximumPersistentDisks']} Disk Size: #{r['maximumPersistentDisksSizeGb']}GB CPUs: #{r['guestCpus']})\")\n \t end\n\t else\n\t \t data = []\n end\n rescue\n puts \"ERROR: getting all flavors #{$!}\"\n end\n else \n raise \"Connection Error\" \n end \t\t\n\t else \n data.push('t1.micro (EBS only Micro 32 or 64-bit, 613 MB, up to 2 compute unit)') \n data.push('m1.small (Small 32 or 64-bit, 1.7 GB, 1 compute unit)')\n data.push('m1.medium (Medium 32 or 64-bit, 3.75 GB, 2 compute unit)')\n data.push('m1.large (Large 64-bit, 7.5 GB, 4 compute unit)')\n data.push('m1.xlarge (Extra Large 64-bit, 15 GB, 8 compute unit)')\n data.push('m3.xlarge (EBS Only Extra Large 64-bit, 15 GB, 13 compute unit)')\n data.push('m3.2xlarge (EBS Only Extra Double Large 64-bit, 30 GB, 26 compute unit)')\n data.push('m2.xlarge (High Memory Extra Large 64-bit, 17.1 GB, 6.5 compute unit)')\n data.push('m2.2xlarge (High Memory Double Extra Large 64-bit, 34.2 GB, 13 compute unit)')\n data.push('m2.4xlarge (High Memory Quadruple Large 64-bit, 68.4 GB, 26 compute unit)')\n data.push('c1.medium (Compute optimized CPU Medium 32 or 64-bit, 1.7 GB, 5 compute unit)')\n data.push('c1.xlarge (Compute optimized CPU Extra Large 64-bit, 7 GB, 20 compute unit)')\n data.push('c3.xlarge (Compute optimized Extra Large 64-bit, 3.75 GB, 7 compute unit)')\n data.push('c3.2xlarge (Compute optimized Double Extra Large 64-bit, 7 GB, 14 compute unit)')\n data.push('c3.4xlarge (Compute optimized Quadruple Large 64-bit, 15 GB, 28 compute unit)')\t\n data.push('c3.8xlarge (Compute optimized Eight Large 64-bit, 30 GB, 55 compute unit)')\n data.push('i2.xlarge\t\t (High I/O 1x800 GB SSD, 30.5 GB, 14 compute unit)')\n data.push('i2.2xlarge\t\t (High I/O 2x800 GB SSD, 61 GB, 27 compute unit)')\n data.push('i2.4xlarge\t\t (High I/O 4x800 GB SSD, 122 GB, 53 compute unit)')\n data.push('i2.8xlarge\t \t (High I/O 8x800 GB SSD, 244 GB, 104 compute unit)')\t\t \n data.push('cc1.4xlarge (Cluster Compute Quadruple Extra Large 64-bit, 23 GB, 33.5 compute unit. 10GBit network)')\n data.push('cc2.8xlarge (Cluster Compute Eight Extra Large 64-bit, 60.5 GB, 88 compute unit. 10GBit network)')\n\t\t data.push('g2.2xlarge (Cluster GPU Quadruple Extra Large 64-bit, 15 GB, 26compute unit.)') \n data.push('cg1.4xlarge (Cluster GPU Quadruple Extra Large 64-bit, 22 GB, 33.5 compute unit. 10GBit network)') \n data.push('hi1.4xlarge (High I/O Quadruple Extra Large 64-bit, 60.5 GB, 2x1024GB SSD, 35 compute unit. 10GBit network)')\n\t\t data.push('hs1.8xlarge (High I/O Quadruple Extra Large 64-bit, 117 GB, 24x2048GB SSD, 35 compute unit. 10GBit network)')\n \t\t\n end \n return data\n end", "def encryption_properties(disks)\n properties = Mash.new\n disks.each do |disk|\n drive = disk[\"driveletter\"]\n property = Mash.new\n property[:encryption_status] = disk[\"conversionstatus\"] ? CONVERSION_STATUS[disk[\"conversionstatus\"]] : \"\"\n properties[drive] = property\n end\n properties\n end", "def physical_memory_info\n\n if PlatformInfo.linux?\n\n {\n :total => proc_meminfo['MemTotal'],\n :used => proc_meminfo['MemTotal'] - proc_meminfo['MemFree'],\n :cached => proc_meminfo['Cached'],\n :free => proc_meminfo['MemFree'] \n }\n\n elsif PlatformInfo.osx?\n\n hw_memsize = capture_command_output('sysctl', 'hw.memsize')[0]\n total_memory = hw_memsize.split(':')[1].strip.to_i\n\n # Parse the header information produced by top -l 1 to figure out the\n # physical memory stats.\n top = capture_command_output('top', '-l', '1')\n top_phys_mem = top.select { |t| t =~ /^PhysMem\\:/ }.first.strip.gsub(/^PhysMem\\:\\s+/, '')\n top_phys_mem_pairs = top_phys_mem.split(',')\n\n phys_mem = {}\n top_phys_mem_pairs.each do |top_phys_mem_pair|\n items = top_phys_mem_pair.strip.split(/\\s+/)\n key = items[1].gsub(/\\W/, '')\n value = items[0].to_i * 1024 * 1024 # Convert MB to bytes\n phys_mem[key] = value\n end\n\n {\n :total => total_memory,\n :used => phys_mem['used'],\n :free => phys_mem['free']\n }\n\n else\n unsupported_platform\n end\n\n end", "def attrs_from_props\n @doc.find('//apps:property').each do |entry|\n prop_name = entry.attributes['name'].to_sym\n if @map.keys.include?(prop_name)\n instance_variable_set \"@#{@map[prop_name]}\", check_value(entry.attributes['value'])\n end\n end\n end", "def process_properties(properties); end", "def property_map\n self.class.properties.map do |name, opts|\n if opts\n yreq = opts[:req] ? :req : :opt\n [\"@#{ name }\", yreq] if yreq\n end\n end.compact\n end", "def customize_vm(v)\n mem = `grep 'MemTotal' /proc/meminfo | sed -e 's/MemTotal://' -e 's/ kB//'`.to_i / 1024 / 8\n cpus = 4\n v.customize [\"modifyvm\", :id, \"--memory\", mem]\n v.customize [\"modifyvm\", :id, \"--cpus\", cpus]\nend", "def cost_map\n flavor_os_families.each_with_object({}) do |f, hash|\n hash[f.os_family.name] = f.hourly_cost\n end\n end", "def parse\n physical_storage_families\n physical_storages\n storage_resources\n host_initiators\n san_addresses\n storage_services\n cloud_volumes\n volume_mappings\n wwpn_candidates\n end", "def configure_virtualbox(host,vmcfg)\n host.vm.provider \"virtualbox\" do |pcfg|\n pcfg.memory = vmcfg['vm']['memory'] if vmcfg['vm']['memory']\n pcfg.customize [\"modifyvm\", :id, \"--cpus\", vmcfg['vm']['cpu']] if vmcfg['vm']['cpu'] \n end\nend", "def ec2_instance_data # rubocop:disable Metrics/MethodLength, Metrics/AbcSize\n i = {\n :placement => {\n :availability_zone => config[:availability_zone]\n },\n :instance_type => config[:instance_type],\n :ebs_optimized => config[:ebs_optimized],\n :image_id => config[:image_id],\n :key_name => config[:aws_ssh_key_id],\n :subnet_id => config[:subnet_id],\n :private_ip_address => config[:private_ip_address]\n }\n i[:block_device_mappings] = block_device_mappings unless block_device_mappings.empty?\n i[:security_group_ids] = config[:security_group_ids] if config[:security_group_ids]\n i[:user_data] = prepared_user_data if prepared_user_data\n if config[:iam_profile_name]\n i[:iam_instance_profile] = { :name => config[:iam_profile_name] }\n end\n if !config.fetch(:associate_public_ip, nil).nil?\n i[:network_interfaces] =\n [{\n :device_index => 0,\n :associate_public_ip_address => config[:associate_public_ip],\n :delete_on_termination => true\n }]\n # If specifying `:network_interfaces` in the request, you must specify\n # network specific configs in the network_interfaces block and not at\n # the top level\n if config[:subnet_id]\n i[:network_interfaces][0][:subnet_id] = i.delete(:subnet_id)\n end\n if config[:private_ip_address]\n i[:network_interfaces][0][:private_ip_address] = i.delete(:private_ip_address)\n end\n if config[:security_group_ids]\n i[:network_interfaces][0][:groups] = i.delete(:security_group_ids)\n end\n end\n i\n end", "def property_map\n @property_map ||= properties.index_by(&:ticket_property_type)\n end", "def tf_vars_aws\n {\n aws_region: provider.region,\n route53_zone_main_name: infra.dns.domain,\n route53_zone_this_name: infra.dns.subdomain,\n ec2_instance_type: provider.instance.type,\n ec2_key_pair: provider.instance.key_pair,\n ec2_tags: provider.instance.tags,\n ec2_ami_distro: provider.instance.ami_distro\n # lambda_filename: infra.lambda_filename\n }\n end", "def set_attributes\n self.web_name = AppConfig.cloud[:name]\n if self.custom_image_id\n ci = CustomImage.find_by_id!(custom_image_id)\n self.cost = ci.price\n self.region = ci.region\n self.image_id = ci.remote_image_id\n\n #Map product_type or size_type since that is being used across the app.\n if ci.hosting == \"AWS\"\n pt = ProductType.find_by_memory!(ci.ram)\n self.product_type = pt.name\n self.size_type = nil\n elsif ci.hosting == \"DigitalOcean\"\n st = SizeType.find_by_memory!(ci.ram)\n self.size_type = st.size_id\n self.product_type = nil\n end\n else\n if type == \"AWS\"\n self.cost = ProductType.find_by_name(params[:product][:product_type]).cost_per_month\n elsif type == \"DigitalOcean\"\n self.cost = SizeType.find_by_size_id(params[:product][:size_type]).cost_per_month\n end\n end\n\n self.status = 'pending'\n end", "def image_sizes\n { :small => 'x110',\n :medium => 'x208',\n :large => 'x413',\n :big => 'x654',\n :storage => 'x867'\n }\n end", "def load_vms!\n\t\t\tresult = {}\n\n\t\t\t# Load the VM UUIDs from the local data store\n\t\t\t(local_data[:active] || {}).each do |name, desc|\n\t\t\t\tresult[name.to_sym] = VagrantAWS::VM.find(desc, self, name.to_sym)\n\t\t\tend\n\n\t\t\t# For any VMs which aren't created, create a blank VM instance for\n\t\t\t# them\n\t\t\tall_keys = config.vms\n\t\t\tall_keys = [DEFAULT_VM] if all_keys.empty?\n\t\t\tall_keys.each do |name|\n\t\t\t\tresult[name] = VagrantAWS::VM.new(name, self, config.for_vm(name)) if !result.has_key?(name)\n\t\t\tend\n\n\t\t\tresult\n\t\tend", "def list_kvms\n info = Hash[\n *Dir.glob(File.join(KVM_HOME, 'storage', '*.qcow2')).map{|dir|\n key = File.basename(dir).sub('.qcow2', '')\n [key, {:address => kvm_ip(key), :type => kvm_type(key)}]\n }.sort{|a,b|\n a.first <=> b.first\n }.flatten\n ]\n info.each do |name, info|\n puts \"#{name}\"\n puts \" Type: #{info[:type]}\"\n puts \" Address: #{info[:address]}\"\n end\nend", "def field_mapping_for_cloud_exposures\n {\n 'application-server-software' => {\n 'asset' => [],\n 'vuln' => [\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"application_server_software_#{x[\"firstObservation\"][\"configuration\"][\"applicationServerSoftware\"]}\".to_string_identifier }\n },\n ],\n 'vuln_def' => [ \n { action: \"proc\", target: \"description\", proc: lambda{|x| \n \"Exposed App Server Software: #{x[\"firstObservation\"][\"configuration\"][\"applicationServerSoftware\"]}\" } },\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"application_server_software_#{x[\"firstObservation\"][\"configuration\"][\"applicationServerSoftware\"]}\".to_string_identifier }\n }\n ]\n },\n 'bacnet-servers' => {}, \n '-certificate-advertisements' => {}, \n 'development-environments' => {},\n 'dns-servers' => {}, \n '-domain-control-validated-certificate-advertisements' => {\n 'asset' => [],\n 'vuln' => [\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"comain_control_validated_certificate_advertisement_#{x[\"certificate\"][\"id\"]}\".to_string_identifier }\n },\n ],\n 'vuln_def' => [ \n { action: \"proc\", target: \"description\", proc: lambda{|x| \n \"Domain Control Validated Certificate: #{JSON.pretty_generate(x[\"certificate\"])}\" } },\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"comain_control_validated_certificate_advertisement_#{x[\"certificate\"][\"id\"]}\".to_string_identifier }\n }\n ]\n },\n 'ethernet-ip-servers' => {}, \n 'expired-when-scanned-certificate-advertisements' => {\n 'asset' => [],\n 'vuln' => [\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"expired_when_scanned_certificate_advertisement_#{x[\"certificate\"][\"id\"]}\".to_string_identifier }\n },\n ],\n 'vuln_def' => [ \n { action: \"proc\", target: \"description\", proc: lambda{|x| \n \"Expired Certificate: #{JSON.pretty_generate(x[\"certificate\"])}\" } },\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"expired_when_scanned_certificate_advertisement_#{x[\"certificate\"][\"id\"]}\".to_string_identifier }\n }\n ]\n },\n 'ftp-servers' => {}, \n 'ftps-servers' => {}, \n '-healthy-certificate-advertisements' => {\n 'asset' => [],\n 'vuln' => [\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"healthy_certificate_advertisement_#{x[\"certificate\"][\"id\"]}\".to_string_identifier }\n },\n ],\n 'vuln_def' => [ \n { action: \"proc\", target: \"description\", proc: lambda{|x| \n \"Healthy Certificate Advertisement: #{JSON.pretty_generate(x[\"certificate\"])}\" } },\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"healthy_certificate_advertisement_#{x[\"certificate\"][\"id\"]}\".to_string_identifier }\n }\n ]\n },\n 'insecure-signature-certificate-advertisements' => {\n 'asset' => [],\n 'vuln' => [\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"insecure_signature_certificate_advertisement_#{x[\"certificate\"][\"id\"]}\".to_string_identifier }\n },\n ],\n 'vuln_def' => [ \n { action: \"proc\", target: \"description\", proc: lambda{|x| \n \"Insecure Signature Certificate: #{JSON.pretty_generate(x[\"certificate\"])}\" } },\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"insecure_signature_certificate_advertisement_#{x[\"certificate\"][\"id\"]}\".to_string_identifier }\n }\n ]\n },\n 'internal-ip-address-advertisements'=> {\n 'asset' => [],\n 'vuln' => [\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"internal_ip_address_advertisements_#{x[\"cloudAssetId\"]}\".to_string_identifier }\n },\n ],\n 'vuln_def' => [ \n { action: \"proc\", target: \"description\", proc: lambda{|x| \n \"Detected Internal IP advertisement with configuration: #{JSON.pretty_generate(x[\"firstObservation\"][\"configuration\"])}\" } },\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"internal_ip_address_advertisements_#{x[\"cloudAssetId\"]}\".to_string_identifier }\n }\n ]\n },\n 'load-balancers' => {},\n 'long-expiration-certificate-advertisements' => {\n 'asset' => [],\n 'vuln' => [\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"long_expiration_certificate_advertisement_#{x[\"certificate\"][\"id\"]}\".to_string_identifier }\n },\n ],\n 'vuln_def' => [ \n { action: \"proc\", target: \"description\", proc: lambda{|x| \n \"Long Expiration Certificate: #{JSON.pretty_generate(x[\"certificate\"])}\" } },\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"long_expiration_certificate_advertisement_#{x[\"certificate\"][\"id\"]}\".to_string_identifier }\n }\n ]\n },\n 'memcached-servers' => {}, \n 'modbus-servers' => {}, \n 'ms-sql-servers' => {}, \n 'my-sql-servers' => {}, \n 'net-bios-name-servers' => {},\n 'pop3-servers' => {\n 'asset' => [],\n 'vuln' => [\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"detected_server_pop3_#{x[\"cloudAssetId\"]}\".to_string_identifier }\n },\n ],\n 'vuln_def' => [ \n { action: \"proc\", target: \"description\", proc: lambda{|x| \n \"Detected Pop3 Server with configuration: #{JSON.pretty_generate(x[\"firstObservation\"][\"configuration\"])}\" } },\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"detected_server_pop3_#{x[\"cloudAssetId\"]}\".to_string_identifier }\n }\n ]\n }, \n 'rdp-servers' => {},\n 'self-signed-certificate-advertisements' => {\n 'asset' => [],\n 'vuln' => [\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"self_signed_certificate_advertisement_#{x[\"certificate\"][\"id\"]}\".to_string_identifier }\n },\n ],\n 'vuln_def' => [ \n { action: \"proc\", target: \"description\", proc: lambda{|x| \n \"Self Signed Certificate: #{JSON.pretty_generate(x[\"certificate\"])}\" } },\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"self_signed_certificate_advertisement_#{x[\"certificate\"][\"id\"]}\".to_string_identifier }\n }\n ]\n },\n 'server-software' => {\n 'asset' => [],\n 'vuln' => [\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"server_software_#{x[\"firstObservation\"][\"configuration\"][\"serverSoftware\"]}\".to_string_identifier }\n },\n ],\n 'vuln_def' => [ \n { action: \"proc\", target: \"description\", proc: lambda{|x| \n \"Exposed Server Software: #{x[\"firstObservation\"][\"configuration\"][\"serverSoftware\"]}\" } },\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"server_software_#{x[\"firstObservation\"][\"configuration\"][\"serverSoftware\"]}\".to_string_identifier }\n }\n ]\n },\n 'short-key-certificate-advertisements' => {\n 'asset' => [],\n 'vuln' => [\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"shert_key_certificate_advertisement_#{x[\"certificate\"][\"id\"]}\".to_string_identifier }\n },\n ],\n 'vuln_def' => [ \n { action: \"proc\", target: \"description\", proc: lambda{|x| \n \"Short Key Certificate: #{JSON.pretty_generate(x[\"certificate\"])}\" } },\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"shert_key_certificate_advertisement_#{x[\"certificate\"][\"id\"]}\".to_string_identifier }\n }\n ]\n },\n 'sip-servers' => {},\n 'smb-servers' => {},\n 'smtp-servers' => {},\n 'snmp-servers' => {},\n 'ssh-servers' => {},\n 'telnet-servers' => {},\n 'upnp-servers' => {},\n 'unencrypted-logins' => {},\n 'unencrypted-ftp-servers' => {},\n 'web-servers' => {},\n 'wildcard-certificate-advertisements' => {\n 'asset' => [],\n 'vuln' => [\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"wildcard_certificate_advertisement_#{x[\"certificate\"][\"id\"]}\".to_string_identifier }\n },\n ],\n 'vuln_def' => [ \n { action: \"proc\", target: \"description\", proc: lambda{|x| \n \"Wildcard Certificate: #{JSON.pretty_generate(x[\"certificate\"])}\" } },\n { action: \"proc\", target: \"scanner_identifier\", proc: lambda{|x| \n \"wildcard_certificate_advertisement_#{x[\"certificate\"][\"id\"]}\".to_string_identifier }\n }\n ]\n },\n 'vnc-servers' => {},\n 'vx-works-servers' => {}\n }\n end", "def specific_machine_options(component, _count = nil)\n return [] unless @node['provisioning'][component]\n options = []\n if @node['provisioning'][component]['flavor']\n options << {\n bootstrap_options: {\n instance_type: @node['provisioning'][component]['flavor']\n }\n }\n end\n # if @node['provisioning'][component]['security_group_ids']\n if Server::Helpers::Component.security_group_ids(component)\n options << {\n bootstrap_options: {\n security_group_ids: Server::Helpers::Component.security_group_ids(component)\n }\n }\n end\n if @node['provisioning'][component]['image_id']\n options << { image_id: @node['provisioning'][component]['image_id'] }\n end\n if @node['provisioning'][component]['aws_tags']\n options << { aws_tags: @node['provisioning'][component]['aws_tags'] }\n end\n # Specify more specific machine_options to add\n options\n end", "def zfs_properties(name)\n properties || parse_zfs_properties(zfs_get_properties(name))\nend", "def jvm_property(name,value)\n cmd = <<-END\nAdminTask.setJVMProperties('[-nodeName #{resource[:nodename]} -serverName #{resource[:name]} -#{name} #{value}]')\n END\n cmd\n end", "def build_properties\n properties.each do |key,val|\n prop = listing_properties.find_or_initialize_by(key:key)\n prop.value = val\n\n end\n end", "def cpus(num)\n Vagrant.configure(2) do |config|\n config.vm.define @name do |m|\n m.vm.provider :virtualbox do |vb|\n vb.cpus = num\n end\n end\n end\n end", "def to_hash\n hash = {}\n StoragePhysicalDiskAllOf.attribute_map.each_pair do |attr, param|\n value = self.send(attr)\n if value.nil?\n is_nullable = StoragePhysicalDiskAllOf.openapi_nullable.include?(attr)\n next if !is_nullable || (is_nullable && !instance_variable_defined?(:\"@#{attr}\"))\n end\n\n hash[param] = _to_hash(value)\n end\n hash\n end", "def assign_properties\n self.properties ||= {}\n listing_properties.each do |prop|\n self.properties[prop.key] ||= prop.value\n end\n end", "def configure_vmware_fusion(host,vmcfg)\n host.vm.provider \"vmware_fusion\" do |pcfg|\n pcfg.vmx['memsize'] = vmcfg['vm']['memory'] if vmcfg['vm']['memory']\n pcfg.vmx['numvcpus'] = vmcfg['vm']['cpu'] if vmcfg['vm']['cpu']\n end\nend", "def oci_values\n oci[\"metadata\"][\"network\"][\"interface\"].each { |vnic| @cloud_attr_obj.add_ipv4_addr(vnic[\"privateIp\"], :private) }\n @cloud_attr_obj.local_hostname = oci[\"metadata\"][\"compute\"][\"hostname\"]\n @cloud_attr_obj.provider = \"oci\"\n end", "def prepare_vm_config\n inh_tags = %i(CLUSTER_ID CLUSTER_NAME HOST_ID HOST_NAME)\n @@storage[:vm][:inh_tags] = @@storage[:vm][:tags].clone\n\n # Get inherited tags into special hash for VMs only\n @@storage[:vm][:inh_tags].keep_if do |key, value|\n inh_tags.include? key\n end\n # Remove them from original hash\n @@storage[:vm][:inh_tags].each do |key, value|\n @@storage[:vm][:tags].delete(key)\n end\n end", "def get_kvm_guest_info(guest)\n info = {}\n result = `virsh dumpxml #{guest}`\n result.split(\"\\n\").each do |line|\n if line =~ /source file='(.+)'/\n img_path = $1\n if File.exists?(img_path)\n # nVentory expects the value to be in KB\n info['vmimg_size'] = File.stat(img_path).size.to_i / 1024\n # how to calculate this?\n # info['vmspace_used'] = ???\n end\n end\n end\n return info.clone\n end", "def get_old_azure_mapping(size)\n\n old_size_map = '{\"XS\":\"Standard_A0\",\"S\":\"Standard_A1\",\"M\":\"Standard_A2\",\"L\":\"Standard_A3\",\"XL\":\"Standard_A4\",\"XXL\":\"Standard_A5\",\"3XL\":\"Standard_A6\",\"4XL\":\"Standard_A7\",\"S-CPU\":\"Standard_D1\",\"M-CPU\":\"Standard_D2\",\"L-CPU\":\"Standard_D3\",\"XL-CPU\":\"Standard_D4\",\"8XL-CPU\":\"Standard_D11\",\"9XL-CPU\":\"Standard_D12\",\"10XL-CPU\":\"Standard_D13\",\"11XL-CPU\":\"Standard_D14\",\"S-MEM\":\"Standard_DS1\",\"M-MEM\":\"Standard_DS2\",\"L-MEM\":\"Standard_DS3\",\"XL-MEM\":\"Standard_DS4\",\"8XL-MEM\":\"Standard_DS11\",\"9XL-MEM\":\"Standard_DS12\",\"10XL-MEM\":\"Standard_DS13\",\"11XL-MEM\":\"Standard_DS14\"}'\n\n jold_size_map = JSON.parse(old_size_map)\n\n return(jold_size_map[\"#{size}\"])\n\n end", "def config_options\n {\n 'datacenter' => new_resource.datacenter,\n 'template_path' => new_resource.template_path,\n 'power_on' => true,\n 'datastore' => new_resource.datastore,\n 'wait' => true,\n 'hostname' => new_resource.hostname,\n 'name' => new_resource.name,\n 'customization_spec' => {\n 'domain' => new_resource.domain,\n 'ipsettings' => {\n 'ip' => new_resource.ip || node['vcac_vm']['ip'],\n 'gateway' => new_resource.gateway,\n 'subnetMask' => new_resource.subnet_mask,\n },\n }\n }\nend", "def add_properties(source)\n p = []\n prop_list = {\n \"maxbw\" => source[:maxbw],\n \"priority\" => source[:priority],\n \"protection\" => source[:protection],\n \"l2-type\" => source[:l2_type],\n \"vlanid\" => source[:vlanid],\n \"vni\" => source[:vni],\n \"uuid\" => source[:uuid]\n }\n prop_list.each do |key, value|\n next if (value == nil) || (value == \"\")\n p << \"#{key}=#{value}\"\n end\n return [] if p.empty?\n properties = Array[\"-p\", p.join(\",\")]\n end", "def build_maps(fog)\n if fog.respond_to?(:images) && fog.respond_to?(:flavors)\n @image_map = fog.images.reduce({}) do |c,e|\n name, ver = map_image_id(e)\n c[name] ||= []\n c[name] << ver\n c\n end\n @flavor_map = fog.flavors.reduce({}) do |c,e|\n c.merge({e.id => { ram: e.ram, disk: e.disk }})\n end\n else\n @image_map = {}\n @flavor_map = {}\n end\n end", "def xen_capacity\n return nil unless self.is_xen_dom0? and !self.model.nil?\n\n cap = {}\n cap[:cpu_cap] = self.model.cpu_cores\n cap[:mem_cap] = self.model.megabytes_memory\n \n cap[:cpu_use] = 0\n cap[:mem_use] = 0\n\n self.xen_guests.each do |n|\n unless n.guest.model.nil?\n cap[:cpu_use] += n.guest.model.cpu_cores\n cap[:mem_use] += n.guest.model.megabytes_memory\n else\n cap[:cpu_use] += 2\n cap[:mem_use] += 4096\n end\n end\n\n cap\n end", "def vm_size(size)\n if parent.properties.nil? or parent.properties.hardware_profile.nil?\n hardware_profile vm_size: size\n else\n parent.properties.hardware_profile.vm_size = size\n end\n end", "def optimize_puppetdb_settings(resources, with_external_postgresql)\n output_minimum_system_requirements_error_and_exit unless meets_minimum_system_requirements?(resources)\n\n percent_cpu_threads = 75\n minimum_cpu_threads = 1\n maximum_cpu_threads = resources['cpu'] - 1\n percent_mb_puppetdb = with_external_postgresql ? 50 : 25\n percent_mb_buffers = with_external_postgresql ? 0 : 25\n minimum_mb_puppetdb = fit_to_memory(resources['ram'], 512, 1024, 2048)\n maximum_mb_puppetdb = 8192\n minimum_mb_buffers = fit_to_memory(resources['ram'], 2048, 3072, 4096)\n maximum_mb_buffers = 16384\n minimum_mb_os = reserved_memory_os\n # minimum_mb_g1gc = 2048\n\n minimum_mb_buffers = with_external_postgresql ? 0 : minimum_mb_buffers\n\n settings = {}\n totals = {}\n\n if with_external_postgresql\n mb_buffers = 0\n else\n available_mb_for_buffers = resources['ram'] - minimum_mb_os\n if available_mb_for_buffers < minimum_mb_buffers\n Puppet.debug(\"Error: available_mb_for_buffers: #{available_mb_for_buffers} < minimum_mb_buffers: #{minimum_mb_buffers}\")\n output_minimum_system_requirements_error_and_exit\n end\n mb_buffers = clamp_percent_of_resource(resources['ram'], percent_mb_buffers, minimum_mb_buffers, maximum_mb_buffers)\n settings['puppet_enterprise::profile::database::shared_buffers'] = \"#{mb_buffers}MB\"\n end\n\n command_processing_threads = clamp_percent_of_resource(resources['cpu'], percent_cpu_threads, minimum_cpu_threads, maximum_cpu_threads)\n settings['puppet_enterprise::puppetdb::command_processing_threads'] = command_processing_threads\n\n available_mb_for_puppetdb = resources['ram'] - minimum_mb_os - mb_buffers\n\n if available_mb_for_puppetdb < minimum_mb_puppetdb\n Puppet.debug(\"Error: available_mb_for_puppetdb: #{available_mb_for_puppetdb} < minimum_mb_puppetdb: #{minimum_mb_puppetdb}\")\n output_minimum_system_requirements_error_and_exit\n end\n\n mb_puppetdb = clamp_percent_of_resource(resources['ram'], percent_mb_puppetdb, minimum_mb_puppetdb, maximum_mb_puppetdb)\n java_args_for_puppetdb = { 'Xms' => \"#{mb_puppetdb}m\", 'Xmx' => \"#{mb_puppetdb}m\" }\n # java_args_for_puppetdb['XX:+UseG1GC'] = '' if (jruby_9k_enabled? == false) && (mb_puppetdb >= minimum_mb_g1gc)\n settings['puppet_enterprise::profile::puppetdb::java_args'] = java_args_for_puppetdb\n\n cpu_used = command_processing_threads\n ram_used = mb_buffers + mb_puppetdb\n totals['CPU'] = { 'total' => resources['cpu'], 'used' => cpu_used }\n totals['RAM'] = { 'total' => resources['ram'], 'used' => ram_used }\n\n [settings, totals]\n end", "def to_hash\n hash = super\n VirtualizationVmwareVirtualDisk.attribute_map.each_pair do |attr, param|\n value = self.send(attr)\n if value.nil?\n is_nullable = VirtualizationVmwareVirtualDisk.openapi_nullable.include?(attr)\n next if !is_nullable || (is_nullable && !instance_variable_defined?(:\"@#{attr}\"))\n end\n\n hash[param] = _to_hash(value)\n end\n hash\n end", "def load_attrs\n self.availability_zone = cluster_config[:availability_zone] || Settings.availability_zone\n self.image_id = cluster_config[:image_id] if cluster_config[:image_id]\n self.instance_type = cluster_config[:instance_type] if cluster_config[:instance_type]\n # self.deletes_on_termination = cluster_config[:deletes_on_termination] if cluster_config[:deletes_on_termination]\n end", "def required_properties\n {\n \"cc\" => {\n \"internal_service_hostname\" => \"cc.service.cf.internal\"\n },\n \"loggregator\" => {\n \"etcd\" => {\n \"machines\" => []\n },\n \"uaa\" => {\n \"client_secret\" => \"secret\"\n }\n },\n \"system_domain\" => \"bosh-lite.com\",\n }\n end", "def add_resource_pools_to_manifest(manifest)\n if any_service_nodes?\n config.each do |cluster|\n server_count = cluster[\"count\"]\n server_flavor = cluster[\"flavor\"]\n resource_pool = {\n \"name\" => cluster_name(cluster),\n \"network\" => \"default\",\n \"size\" => server_count,\n \"stemcell\" => {\n \"name\" => system_config.stemcell_name,\n \"version\" => system_config.stemcell_version\n },\n # TODO how to create \"cloud_properties\" per-provider?\n \"cloud_properties\" => {\n \"instance_type\" => server_flavor\n },\n \"persistent_disk\" => system_config.common_persistent_disk\n }\n manifest[\"resource_pools\"] << resource_pool\n end\n end\n end", "def optimize_external_postgresql_settings(resources)\n output_minimum_system_requirements_error_and_exit unless meets_minimum_system_requirements?(resources)\n\n percent_mb_buffers = 25\n minimum_mb_buffers = fit_to_memory(resources['ram'], 2048, 3072, 4096)\n maximum_mb_buffers = 16384\n minimum_mb_os = reserved_memory_os\n\n settings = {}\n totals = {}\n\n available_mb_for_buffers = resources['ram'] - minimum_mb_os\n if available_mb_for_buffers < minimum_mb_buffers\n Puppet.debug(\"Error: available_mb_for_buffers: #{available_mb_for_buffers} < minimum_mb_buffers: #{minimum_mb_buffers}\")\n output_minimum_system_requirements_error_and_exit\n end\n\n mb_buffers = clamp_percent_of_resource(resources['ram'], percent_mb_buffers, minimum_mb_buffers, maximum_mb_buffers)\n settings['puppet_enterprise::profile::database::shared_buffers'] = \"#{mb_buffers}MB\"\n\n ram_used = mb_buffers\n totals['RAM'] = { 'total' => resources['ram'], 'used' => ram_used }\n\n [settings, totals]\n end", "def cloud_config\n microbosh_config[\"cloud\"]\n end", "def parse\n physical_storage_families\n physical_storages\n storage_resources\n storage_services\n cloud_volumes\n end", "def remove_vpc_properties\n properties = []\n properties << :InstanceTenancy\n properties << :Tags\n add_patch Patches::RemoveProperty.new 'AWS::EC2::VPC', properties\n end", "def get_all_vpcs(ec2)\n all_vpcs = {}\n ec2.vpcs.each do |vpc|\n vpc_name=\"\"\n vpc.tags.each do |tag| \n vpc_name = tag.value if tag.key == \"Name\" \n end\n ## hash of hashes\n all_vpcs[vpc_name] = {\n 'cidr_block' => vpc.data['cidr_block'], \n 'vpc_id' => vpc.data['vpc_id'], \n }\n end\n all_vpcs\nend", "def create_instances\n min_count = max_count = @bs.number_of_nodes\n puts \"\\nCreating #{max_count} on-demand instance(s)\"\n options = {\n 'ClientToken' => generate_token,\n 'KeyName' => Chef::Config[:knife][:aws_ssh_key_id],\n 'InstanceType' => @bs.flavor,\n 'SubnetId' => @bs[:novpc] ? nil : @bs.subnet_id,\n 'Placement.AvailabilityZone' => @bs.mixins.az.data,\n 'SecurityGroupId' => @bs.mixins.sg.data\n }\n options['EbsOptimized'] = !! @bs[:ebs_optimized]\n\n ## REVIEW\n if ami.root_device_type == \"ebs\"\n ami_map = ami.block_device_mapping.first\n block_device_mapping = {\n 'DeviceName' => ami_map['deviceName'],\n 'Ebs.VolumeSize' => ami_map['volumeSize'].to_s,\n 'Ebs.DeleteOnTermination' => ami_map['deleteOnTermination']\n }\n options['BlockDeviceMapping'] = [block_device_mapping]\n end\n\n ## Optionally only include mapped devices\n ## This way we get all of the ephemeral drives, some unmapped however\n if @bs.mixins.volume.data[:ephemeral_available]\n ephmap = @bs.mixins.volume.data.ephemeral_available.each_with_index.map do |d,i|\n {\n 'VirtualName' => \"ephemeral#{i}\",\n 'DeviceName' => d\n }\n end\n options['BlockDeviceMapping'].concat( ephmap )\n end\n\n if (max_count == 1) and @bs[:private_ip_address]\n options['PrivateIpAddress'] = @bs.private_ip_address\n puts \"Assigning IP ADDRESS : #{options['PrivateIpAddress']}\"\n end\n\n if Chef::Config[:knife][:aws_user_data]\n begin\n options['UserData']= File.read(Chef::Config[:knife][:aws_user_data])\n rescue\n ui.warn(\"Cannot read #{Chef::Config[:knife][:aws_user_data]}:\"\\\n \" #{$!.inspect}. Ignoring option.\")\n end\n end\n\n # -----------------------------------------------------------------\n tries = 5\n print_table(options, 'Launch Config')\n begin\n puts \"\\nSending request...\"\n response = connection.run_instances(@bs.image, min_count,\n max_count, options)\n ui.msg(response.inspect)\n rescue Exception => e\n ui.warn(\"#{e.message}\\nException creating instances\")\n if (tries -= 1) <= 0\n ui.warn(\"\\n\\nMax tries reached. Exiting.\\n\\n\")\n exit 1\n else\n ui.msg(\"Trying again.\\n\")\n retry\n end\n end\n # now we have our servers\n instances = response.body['instancesSet']\n # select only instances that have instanceId key and collect those ids\n # into an array\n @bs[:instance_ids] =\n instances.select {|i| i.has_key?('instanceId')}.collect do |i|\n i['instanceId']\n end\n\n puts \"\\nNumber of instances started: #{@bs.instance_ids.size}\\n\"\n sleep 10\n puts \"Getting servers..\"\n # collect an array of servers retrieved based on the instance ids we\n # obtained above\n @bs[:servers] = @bs.instance_ids.collect do |id|\n begin\n server = connection.servers.get(id)\n rescue Exception => e\n sleep 7\n retry\n end\n raise Ec2Error.new(\"server #{id} was nil\") if server.nil?\n server\n end\n end", "def new_ec2_instance_attrs(ami_id, security_group_ids)\n attrs = { image_id: ami_id, key_name: self.ssh_identity, security_group_ids: security_group_ids,\n instance_type: self.instance_type, subnet: self.vpc_subnet_id }\n attrs[:availability_zone] = self.zone if self.zone\n attrs[:associate_public_ip_address] = true if self.vpc_subnet_id\n attrs\n end", "def remove_instance_properties\n properties = []\n properties << :DisableApiTermination\n properties << :KernelId\n properties << :Monitoring\n properties << :PlacementGroupName\n properties << :PrivateIpAddress\n properties << :RamDiskId\n properties << :SourceDestCheck\n properties << :Tenancy\n add_patch Patches::RemoveProperty.new 'AWS::EC2::Instance', properties\n end", "def add_physical_disks\n add_collection(physical_infra, :physical_disks) do |builder|\n builder.add_properties(\n :manager_ref => %i(physical_storage ems_ref),\n :manager_ref_allowed_nil => %i(ems_ref)\n )\n end\n end", "def network_configuration\n dns = settings.provider.network.dns\n dns = dns.split(\",\") if dns.is_a?(String)\n {\n \"ip\"=>public_ip,\n \"netmask\"=>settings.provider.network.netmask,\n \"gateway\"=>settings.provider.network.gateway,\n \"dns\"=>dns,\n \"cloud_properties\"=>{\n \"name\"=>settings.provider.network.name\n }\n }\n end", "def get_transfer_properties(keyword_filter = DEFAULT_PARAMS_FILTER, strip_filter = false)\n result = {}\n STANDARD_PROPERTIES.each{|prop| result[prop.gsub(\"SS_\",\"RPM_\")] = @params[prop] }\n @params.each{|k,v| result[strip_filter ? k.gsub(keyword_filter,\"\") : k] = v if k.include?(keyword_filter) }\n result\nend", "def get_hash\n requires :size, :virtual_disk_type\n {\n :Size => size,\n :VirtualDiskType => virtual_disk_type\n }\n end", "def monitor_all_vms\n totalmemory = 0\n totalcpu = 0\n\n # Get last cloudwatch monitoring time\n host_obj = OpenNebula::Host.new_with_id(@host_id,\n OpenNebula::Client.new)\n host_obj.info\n cw_mon_time = host_obj[\"/HOST/TEMPLATE/CWMONTIME\"]\n capacity = host_obj.to_hash[\"HOST\"][\"TEMPLATE\"][\"CAPACITY\"]\n if !capacity.nil? && Hash === capacity\n capacity.each{ |name, value|\n name = parse_inst_type(name)\n cpu, mem = instance_type_capacity(name)\n totalmemory += mem * value.to_i\n totalcpu += cpu * value.to_i\n }\n else\n raise \"you must define CAPACITY section properly! check the template\"\n end\n\n host_info = \"HYPERVISOR=ec2\\n\"\n host_info << \"PUBLIC_CLOUD=YES\\n\"\n host_info << \"PRIORITY=-1\\n\"\n host_info << \"TOTALMEMORY=#{totalmemory.round}\\n\"\n host_info << \"TOTALCPU=#{totalcpu}\\n\"\n host_info << \"CPUSPEED=1000\\n\"\n host_info << \"HOSTNAME=\\\"#{@host}\\\"\\n\"\n\n vms_info = \"VM_POLL=YES\\n\"\n\n #\n # Add information for running VMs (running and pending).\n #\n usedcpu = 0\n usedmemory = 0\n\n # Build an array of VMs and last_polls for monitoring\n vpool = OpenNebula::VirtualMachinePool.new(OpenNebula::Client.new,\n OpenNebula::VirtualMachinePool::INFO_ALL_VM)\n vpool.info\n onevm_info = {}\n\n\n if !cw_mon_time\n cw_mon_time = Time.now.to_i\n else\n cw_mon_time = cw_mon_time.to_i\n end\n\n do_cw = (Time.now.to_i - cw_mon_time) >= 360\n vpool.each{\n |vm| onevm_info[vm.deploy_id] = vm\n }\n\n\n work_q = Queue.new\n @ec2.instances.each{|i| work_q.push i }\n\t\tworkers = (0...20).map do\n Thread.new do\n begin\n while i = work_q.pop(true)\n next if i.state.name != 'pending' && i.state.name != 'running'\n one_id = i.tags.find {|t| t.key == 'ONE_ID' }\n one_id = one_id.value if one_id\n poll_data=parse_poll(i, onevm_info[i.id], do_cw, cw_mon_time)\n vm_template_to_one = vm_to_one(i)\n vm_template_to_one = Base64.encode64(vm_template_to_one).gsub(\"\\n\",\"\")\n vms_info << \"VM=[\\n\"\n vms_info << \" ID=#{one_id || -1},\\n\"\n vms_info << \" DEPLOY_ID=#{i.instance_id},\\n\"\n vms_info << \" VM_NAME=#{i.instance_id},\\n\"\n vms_info << \" IMPORT_TEMPLATE=\\\"#{vm_template_to_one}\\\",\\n\"\n vms_info << \" POLL=\\\"#{poll_data}\\\" ]\\n\"\n if one_id\n name = i.instance_type\n cpu, mem = instance_type_capacity(name)\n usedcpu += cpu\n usedmemory += mem\n end\n end\n rescue Exception => e\n end\n end\n end; \"ok\"\n workers.map(&:join); \"ok\"\n\n host_info << \"USEDMEMORY=#{usedmemory.round}\\n\"\n host_info << \"USEDCPU=#{usedcpu.round}\\n\"\n host_info << \"FREEMEMORY=#{(totalmemory - usedmemory).round}\\n\"\n host_info << \"FREECPU=#{(totalcpu - usedcpu).round}\\n\"\n\n if do_cw\n host_info << \"CWMONTIME=#{Time.now.to_i}\"\n else\n host_info << \"CWMONTIME=#{cw_mon_time}\"\n end\n\n puts host_info\n puts vms_info\n end", "def get_softlayer_values\n cloud[:public_ipv4] = softlayer[\"public_ipv4\"]\n cloud[:local_ipv4] = softlayer[\"local_ipv4\"]\n cloud[:public_ips] << softlayer[\"public_ipv4\"] if softlayer[\"public_ipv4\"]\n cloud[:private_ips] << softlayer[\"local_ipv4\"] if softlayer[\"local_ipv4\"]\n cloud[:public_hostname] = softlayer[\"public_fqdn\"]\n cloud[:provider] = \"softlayer\"\n end", "def inferred_properties(hash)\n service_type = self.service_type\n case service_type\n when PrismeService::TOMCAT\n self.service_properties.each do |sp|\n if (sp.key.eql?(PrismeService::CARGO_REMOTE_URL))\n url = sp.value\n hash[PrismeService::CARGO_HOSTNAME] = (URI url).host\n hash[PrismeService::CARGO_SERVLET_PORT] = ((URI url).port).to_s\n end\n end\n end\n end", "def properties\n # vendor = Vendor.find(params[:vendor_id])\n search_params = { vendor_id: params[:vendor_id].to_i, results_per_page: 150 }\n search_params[:p] = params[:p].to_i if params[:p]\n pd = PropertySearchApi.new(filtered_params: search_params )\n pd.query[:size] = 1000\n results, status = pd.filter\n results[:results].each { |e| e[:address] = PropertyDetails.address(e) }\n response = results[:results].map { |e| e.slice(:udprn, :address) }\n response = response.sort_by{ |t| t[:address] }\n #Rails.logger.info \"sending response for vendor properties -> #{response.inspect}\"\n render json: response, status: status\n end", "def cpus\n flavor[2]\n end", "def optimize_console_settings(resources)\n output_minimum_system_requirements_error_and_exit unless meets_minimum_system_requirements?(resources)\n\n percent_mb_console = 75\n minimum_mb_console = fit_to_memory(resources['ram'], 512, 768, 1024)\n maximum_mb_console = 4096\n minimum_mb_os = reserved_memory_os\n # minimum_mb_g1gc = 2048\n\n settings = {}\n totals = {}\n\n available_mb_for_console = resources['ram'] - minimum_mb_os\n if available_mb_for_console < minimum_mb_console\n Puppet.debug(\"Error: available_mb_for_console: #{available_mb_for_console} < minimum_mb_console: #{minimum_mb_console}\")\n output_minimum_system_requirements_error_and_exit\n end\n\n mb_console = clamp_percent_of_resource(resources['ram'], percent_mb_console, minimum_mb_console, maximum_mb_console)\n java_args_for_console = { 'Xms' => \"#{mb_console}m\", 'Xmx' => \"#{mb_console}m\" }\n # java_args_for_console['XX:+UseG1GC'] = '' if (jruby_9k_enabled? == false) && (mb_console >= minimum_mb_g1gc)\n settings['puppet_enterprise::profile::console::java_args'] = java_args_for_console\n\n ram_used = mb_console\n totals['RAM'] = { 'total' => resources['ram'], 'used' => ram_used }\n\n [settings, totals]\n end", "def to_hash\n hash = {}\n VirtualizationVmwareVirtualMachineAllOf.attribute_map.each_pair do |attr, param|\n value = self.send(attr)\n if value.nil?\n is_nullable = VirtualizationVmwareVirtualMachineAllOf.openapi_nullable.include?(attr)\n next if !is_nullable || (is_nullable && !instance_variable_defined?(:\"@#{attr}\"))\n end\n\n hash[param] = _to_hash(value)\n end\n hash\n end" ]
[ "0.8066609", "0.7835972", "0.6794016", "0.624362", "0.61670685", "0.6039891", "0.58982915", "0.5856219", "0.5856219", "0.57928544", "0.57892734", "0.57689637", "0.57237", "0.5660547", "0.5595816", "0.5595334", "0.5577137", "0.55624706", "0.5524775", "0.5453265", "0.5409062", "0.5374561", "0.537172", "0.53446656", "0.5322438", "0.5288049", "0.52854896", "0.5278039", "0.5272348", "0.52595985", "0.5244059", "0.5239131", "0.52260965", "0.5214658", "0.5202491", "0.52017826", "0.52017826", "0.51998353", "0.5199563", "0.51910526", "0.5189152", "0.5184577", "0.51770055", "0.5164101", "0.5150113", "0.5148755", "0.5145041", "0.51357365", "0.51236635", "0.5108028", "0.5098008", "0.50934136", "0.50690216", "0.5068203", "0.5065226", "0.5063693", "0.5062774", "0.50563914", "0.50506973", "0.5046804", "0.5034324", "0.501337", "0.50127065", "0.49999306", "0.49943182", "0.49762386", "0.49757043", "0.49755782", "0.49702495", "0.49581155", "0.4956235", "0.49475643", "0.49397036", "0.4938313", "0.49189267", "0.49174473", "0.4913116", "0.4899055", "0.48977956", "0.48952293", "0.48909646", "0.4889632", "0.48826548", "0.4879092", "0.48758566", "0.48750332", "0.48627615", "0.48606294", "0.4857314", "0.48554754", "0.48336133", "0.48308843", "0.4828159", "0.4818546", "0.48145306", "0.4811824", "0.47956795", "0.4793604", "0.47933713" ]
0.80607426
2
Checks if options passed to CPI are valid and can actually be used to create all required data structures etc.
def validate_options required_keys = { "aws" => ["default_key_name", "max_retries"], "registry" => ["endpoint", "user", "password"], } missing_keys = [] required_keys.each_pair do |key, values| values.each do |value| if (!options.has_key?(key) || !options[key].has_key?(value)) missing_keys << "#{key}:#{value}" end end end raise ArgumentError, "missing configuration parameters > #{missing_keys.join(', ')}" unless missing_keys.empty? if !options['aws'].has_key?('region') && ! (options['aws'].has_key?('ec2_endpoint') && options['aws'].has_key?('elb_endpoint')) raise ArgumentError, "missing configuration parameters > aws:region, or aws:ec2_endpoint and aws:elb_endpoint" end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def validate_options(options)\n raise 'Invalid output directory.' unless options.config.respond_to?(:output_dir)\n raise 'Invalid template.' unless options.config.respond_to?(:template)\n raise 'Invalid template directory.' unless options.config.respond_to?(:template_dir)\n raise 'Invalid author.' unless options.config.respond_to?(:author)\n raise 'Invalid affiliation.' unless options.config.respond_to?(:affiliation)\n raise 'Invalid department.' unless options.config.respond_to?(:department)\n raise 'Invalid institution.' unless options.config.respond_to?(:institution)\n raise 'Invalid email.' unless options.config.respond_to?(:email)\n raise 'Invalid project name.' unless options.config.respond_to?(:project_name)\n raise 'Invalid project title.' unless options.config.respond_to?(:project_title)\n end", "def valid?(options)\n (@required_options - options.keys).size == 0\n end", "def validate_arguments\n if([email protected] || !@@sections.include?(@options.parse))\n @log.error \"select one of the following to parse: #{@@sections.join(\"|\")}\"\n exit!\n end\n \n if([email protected] && [email protected])\n @log.error \"Select either to download the file remotely or supply the given file\"\n exit!\n end\n \n if([email protected])\n @log.error \"supply an output directory with -o\"\n exit!\n end\n \n return true\n end", "def arguments_valid?\n # check the parameters have values\n return false unless @options.has_key?(:hcdFile)\n return false unless @options.has_key?(:etdFile) \n return false if @options[:mzArray].empty?\n return false unless (@options[:mzTolerance] > 0.0 || @options[:ppmTolerance] > 0.0 )\n # check the file exists\n return false unless File.file?(@options[:hcdFile])\n return false unless File.file?(@options[:etdFile])\n true\n end", "def validate_opts\n if @options[:file].nil?\n puts \"Pass me some filename (-f FILE)\"\n return false\n elsif !File.exists?(@options[:file])\n puts \"File: #{@options[:file]} does not exist\"\n return false\n elsif !File.readable?(@options[:file])\n puts \"File: #{@options[:file]} is not readable\"\n return false\n elsif File.directory?(@options[:file])\n puts \"#{@options[:file]} is a directory!\"\n return false\n elsif File.zero?(@options[:file])\n puts \"File: #{@options[:file]} is empty\"\n return false\n end\n\n if @options[:mode].nil?\n puts \"Pass me indexing mode (-m index|noindex )\"\n return false\n end\n return true\n end", "def validate_options\n required_keys = %w(endpoint user password\n container_name agent ntp blobstore)\n missing_keys = []\n required_keys.each do |key|\n unless @options.has_key?(key)\n missing_keys << key\n end\n end\n message = \"Missing configuration parameters: #{missing_keys}\"\n raise ArgumentError, message unless missing_keys.empty?\n end", "def valid_opts\n true\n end", "def validate_options!(options)\n return if options[:help] || options[:version]\n validate_date(options)\n raise MissingArgument, \"--repository or --config\" unless\n options[:repository] || options[:config]\n end", "def validate_options\n errors.add(:base, 'pipeline_name is required') unless options['pipeline_name'].present?\n if options['data'].blank?\n errors.add(:base, 'data is required')\n else\n errors.add(:base, 'file_name is required') unless options['data']['file_name'].present?\n end\n end", "def validate_opts\n validate_opts_file if options[:file]\n end", "def options_valid?\n missing = MANDATORY_OPTIONS.select { |arg| @options[arg].nil? }\n missing.empty?\n end", "def check_configuration_options\r\n if ($project_name == \"\")\r\n\t raise 'Project name must be set' \r\n\tend\r\n\tif ($repo_key == \"\")\r\n raise 'Repository is required'\r\n end\r\n\tif ($target_repo_key == \"\")\r\n raise 'Target repository is required'\r\n\tend\r\n\tif ($username == \"\")\r\n raise 'Codeporting username must be provided'\r\n\tend\r\n\tif ($password == \"\")\r\n raise 'Codeporting password must be provided'\r\n\tend\r\n end", "def valid?\n errors, options = {}, {}\n @results = {:errors => errors, :options => options}\n \n remaining_args = args.dup\n valid_options.each do |long_name, details|\n short_name, default = *details\n key = long_name.gsub(/^--/, '').gsub('-', '_').to_sym\n index = remaining_args.index(long_name) ||\n remaining_args.index(short_name)\n if index\n remaining_args.delete_at index\n if self.class.option?(remaining_args[index])\n options[key] = true\n else\n options[key] = remaining_args.delete_at(index)\n end\n else\n options[key] = default\n end\n end\n remaining_args.each do |arg|\n arg_type = self.class.option?(arg) ? 'option' : 'argument'\n errors[arg] = \"is not a valid #{arg_type}\"\n end\n \n errors.empty?\n end", "def validate_options\n true\n end", "def arguments_valid?\n begin\n @validoptions = BoilermakerOptions.new(options)\n @validoptions.validate\n # pp @validoptions.args\n return @validoptions.args\n rescue => error\n # pp x.args\n puts error.message + \"\\n\"\n exit\n end\n end", "def validate_options!(options)\n unless @config.environments.include?(options[:environment].downcase.to_sym)\n say \"Invalid environment. Run `#{$0} environments` for a list of available environments.\", :red\n abort\n end\n\n unless @config.browsers.include?(options[:browser].downcase.to_sym)\n say \"Invalid browser. Run `#{$0} browsers` for a list of available browsers.\", :red\n abort\n end\n end", "def validate_options!\n if incomplete_policyfile_options?\n ui.error(\"Policy group and name must be specified together\")\n exit 1\n end\n true\n end", "def options_ok?\n end", "def options_valid?\n loaded_config? &&\n output_base_valid? &&\n have_sample_ids? &&\n sample_ids_in_config?\nend", "def validate_options; end", "def validate_public_options(options)\n options.each do |k, _v|\n case k\n when :controller_info,\n /enabled_for_/,\n :enabled,\n :whodunnit\n next\n else\n raise InvalidOption, \"Invalid option: #{k}\"\n end\n end\n end", "def check_sanity\n errors = []\n [:site, :uri, :user, :password, :confdir].each do |sym|\n if @config[sym].nil?# or @config[sym].empty?\n errors << \"Option '#{sym}' is required\"\n end\n end\n unless errors.empty?\n $stderr.puts 'ERROR: The following problems were detected:'\n errors.map { |e| $stderr.puts \" * #{e}\" }\n $stderr.puts \"\\nConfiguration options:\\n\"\n ap @config\n exit 1\n end\n end", "def valid_arguments?\n begin\n if(@options.file)\n raise LoadError,\"The file you specified doesn't exist: #{@options.file}\" if File.exist?(@options.file) == false\n else\n @log.error \"Select a file using -f or --file FILE\"\n exit!\n end\n \n if(@options.output)\n # not going to worry about this one.\n else\n @log.error \"No output was specified select using -o or --output\"\n exit!\n end\n rescue LoadError => bam\n @log.error bam\n exit!\n end\n \n return true\n end", "def validate_options\n required_keys = {\n # 'azure' => %w(cert_path subscription_id region default_key_name logical_name),\n 'azure' => %w(),\n 'registry' => []\n }\n\n missing_keys = []\n\n required_keys.each_pair do |key, values|\n values.each do |value|\n if !options.has_key?(key) || !options[key].has_key?(value)\n missing_keys << \"#{key}:#{value}\"\n end\n end\n end\n\n raise ArgumentError, \"missing configuration parameters > #{missing_keys.join(', ')}\" unless missing_keys.empty?\n end", "def verify_options_hook; end", "def validate(options); end", "def validate_options!(options)\n if options[:chef_api] == :config\n return true\n end\n\n missing_options = [:node_name, :client_key] - options.keys\n\n unless missing_options.empty?\n missing_options.collect! { |opt| \"'#{opt}'\" }\n msg = \"Source '#{name}' is a 'chef_api' location with a URL for it's value\"\n msg << \" but is missing options: #{missing_options.join(', ')}.\"\n\n raise Berkshelf::InvalidChefAPILocation, msg\n end\n\n self.class.validate_node_name!(options[:node_name])\n self.class.validate_client_key!(options[:client_key])\n self.class.validate_uri!(options[:chef_api])\n end", "def arguments_valid?\n valid_args = true\n valid_args = false if @options.min > @options.max\n valid_args = false if @options.user && [email protected]\n valid_args = false if @options.password && [email protected]\n valid_args\n end", "def validate_options(options)\n options.assert_valid_keys(:visibility, :null, :enum, :limit, :json)\n\n case options[:visibility]\n when nil then nil\n when :public then nil\n when :private then nil\n else raise ArgumentError, \"Invalid value for :visibility: #{options[:visibility].inspect}\"\n end\n\n case options[:json]\n when nil, String, Symbol then nil\n else raise ArgumentError, \"Invalid value for :json: #{options[:json].inspect}\"\n end\n\n unless [ nil, true, false ].include?(options[:null])\n raise ArgumentError, \"Invalid value for :null: #{options[:null].inspect}\"\n end\n end", "def valid_options\n valid = true\n has_one_checked = false\n #logger.debug \"Q#{self.id} OV: #{self.id}: #{self.options ? self.options.size : nil}, #{self._options ? self._options.size : nil}, #{self.json_options ? self.json_options.size : nil}\"\n \n self.options.each do |option|\n valid = false if option.invalid?\n has_one_checked = true if option.correct?\n end\n \n errors.add(:options, 'Question must have at least 1 option!') if self.options.empty?\n errors.add(:options, 'Question must have at least 1 correct option!') if !self.options.empty? && !has_one_checked\n errors.add(:options, 'All must have text!') if !valid\n \n #logger.debug \" ERROR #{errors.full_messages}\"\n end", "def check_option_support\n assert_option_supported(:foodcritic) &&\n assert_option_supported(:scmversion, 'thor-scmversion') &&\n assert_default_supported(:no_bundler, 'bundler')\n end", "def assert_valid_options\n valid_options = {\n :skip_gemfile => false,\n :skip_bundle => false,\n :skip_git => false,\n :skip_test_unit => false,\n :edge => false\n }\n valid_options.each do |key, expected|\n next unless options.key?(key)\n actual = options[key]\n unless actual == expected\n fail Rails::Generators::Error, \"Unsupported option: #{key}=#{actual}\"\n end\n end\nend", "def check_options\n unless @options[:stub]\n STDERR.puts \"Please specify a host to connect to using --host\" unless @options[:host]\n STDERR.puts \"Please specify a model to check using --model\" unless @options[:model]\n return false unless @options[:host] && @options[:model]\n end\n\n true\n end", "def valid_arguments?\n begin\n if(@options.file)\n raise LoadError,\"The file you specified doesn't exist: #{@options.file}\" if File.exist?(@options.file) == false\n else\n @log.error \"Select a file using -f or --file FILE\"\n end\n \n if(@options.output)\n # not going to worry about this one.\n else\n @log.error \"No output was specified select using -o or --output\"\n end\n rescue LoadError => bam\n @log.error bam\n exit\n end\n \n return true\n end", "def valid_arguments?\n begin\n if(@options.file)\n raise LoadError,\"The file you specified doesn't exist: #{@options.file}\" if File.exist?(@options.file) == false\n else\n @log.error \"Select a file using -f or --file FILE\"\n end\n \n if(@options.output)\n # not going to worry about this one.\n else\n @log.error \"No output was specified select using -o or --output\"\n end\n rescue LoadError => bam\n @log.error bam\n exit\n end\n \n return true\n end", "def valid_arguments?\n begin\n if(@options.file)\n raise LoadError,\"The file you specified doesn't exist: #{@options.file}\" if File.exist?(@options.file) == false\n else\n @log.error \"Select a file using -f or --file FILE\"\n end\n \n if(@options.output)\n # not going to worry about this one.\n else\n @log.error \"No output was specified select using -o or --output\"\n end\n rescue LoadError => bam\n @log.error bam\n exit\n end\n \n return true\n end", "def validate_pdf_options(args, options)\n if options.height < 1\n $stderr.puts(\"Error in --height flag. You must provide a positive height >= 1 in inch.\\n\")\n exit!\n end\n\n if options.width < 1\n $stderr.puts(\"Error in --width flag. You must provide a positive width >= 1 in inch.\\n\")\n exit!\n end\nend", "def validate_options(options)\n unless options.kind_of?(Hash)\n raise ArgumentError, \"You must pass a Hash, not: #{options.inspect}\"\n end\n\n options.assert_valid_keys(:visibility, :prefix, :delegate, :unknown_fields, :compress, :header)\n\n unless [ nil, :private, :public ].include?(options[:visibility])\n raise ArgumentError, \"Invalid value for :visibility: #{options[:visibility.inspect]}\"\n end\n\n unless [ :delete, :preserve, nil ].include?(options[:unknown_fields])\n raise ArgumentError, \"Invalid value for :unknown_fields: #{options[:unknown_fields].inspect}\"\n end\n\n unless [ true, false, nil ].include?(options[:compress]) || options[:compress].kind_of?(Integer)\n raise ArgumentError, \"Invalid value for :compress: #{options[:compress].inspect}\"\n end\n\n unless [ true, false, nil ].include?(options[:header])\n raise ArgumentError, \"Invalid value for :header: #{options[:header].inspect}\"\n end\n\n case options[:prefix]\n when nil then nil\n when String, Symbol then nil\n else raise ArgumentError, \"Invalid value for :prefix: #{options[:prefix].inspect}\"\n end\n\n unless [ nil, true, false, :private, :public ].include?(options[:delegate])\n raise ArgumentError, \"Invalid value for :delegate: #{options[:delegate].inspect}\"\n end\n\n if options[:visibility] == :private && options[:delegate] == :public\n raise ArgumentError, \"You can't have public delegation if methods in the flex column are private; this makes no sense, as methods in the model class would have *greater* visibility than methods on the flex column itself\"\n end\n end", "def assert_valid_options\n valid_options = {\n skip_gemfile: false,\n skip_bundle: false,\n skip_git: false,\n skip_system_test: false,\n # skip_test: false,\n # skip_test_unit: false,\n edge: false\n }\n valid_options.each do |key, expected|\n next unless options.key?(key)\n actual = options[key]\n unless actual == expected\n fail Rails::Generators::Error, \"Unsupported option: #{key}=#{actual}\"\n end\n end\nend", "def validate_options(options = {})\n base_unity_url = options.fetch(:base_unity_url, @base_unity_url)\n username = options.fetch(:username, @username)\n password = options.fetch(:password, @password)\n appname = options.fetch(:appname, @appname)\n\n raise ArgumentError, 'base_unity_url can not be nil' if base_unity_url.nil?\n raise ArgumentError, 'username can not be nil' if username.nil?\n raise ArgumentError, 'password can not be nil' if password.nil?\n raise ArgumentError, 'appname can not be nil' if appname.nil?\n end", "def assert_valid_options\n valid_options = {\n skip_gemfile: false,\n skip_bundle: false,\n skip_git: false,\n skip_system_test: false,\n skip_test: false,\n skip_test_unit: false,\n edge: false\n }\n valid_options.each do |key, expected|\n next unless options.key?(key)\n actual = options[key]\n unless actual == expected\n fail Rails::Generators::Error, \"Unsupported option: #{key}=#{actual}\"\n end\n end\nend", "def check_options_requirements(runtime_options)\n required_options = options[:requires] || options[:require]\n return true unless required_options\n\n ([required_options].flatten - runtime_options.keys).size == 0\n end", "def assert_valid_options\n valid_options = {\n skip_gemfile: false,\n skip_git: false,\n skip_test_unit: false,\n edge: false\n }\n valid_options.each do |key, expected|\n next unless options.key?(key)\n actual = options[key]\n unless actual == expected\n fail Rails::Generators::Error, \"Unsupported option: #{key}=#{actual}\"\n end\n end\nend", "def validate_options(opts)\n unless opts.branch_regex || opts.tag_regex\n # If git branches/tags are given, the source path might exist in the branch/tag, thus only check this\n # for non-git executions\n raise OptionParser::InvalidArgument, \"Could not find source path #{opts.srcdir}\" unless opts.srcdir.exist?\n end\n\n if opts.web_path\n raise OptionParser::InvalidArgument, \"The '-w/--web-path' flag is DEPRECATED. Use the --server-css-path and --server--search-path flags instead.\"\n end\n\n if opts.server_css && (opts.resource_dir || opts.style_name)\n raise OptionParser::InvalidArgument, \"The '-w' flag can not be used with either of the '-r' or '-s' flags\"\n end\n\n if opts.server_css && opts.format != \"html\"\n raise OptionParser::InvalidArgument, \"The '-w' flag can only be used for the 'html' format flags\"\n end\n\n if opts.style_name && opts.resource_dir.nil?\n raise OptionParser::InvalidArgument, \"The '-s' flag requires the use of the '-r' flag as well.\"\n end\n\n # if opts.resource_dir.nil? ^ opts.style_name.nil?\n # raise OptionParser::InvalidArgument, \"Either both '-s' and '-r' flags must be given or none of them.\"\n # end\n\n if opts.resource_dir && !opts.resource_dir.exist?\n raise OptionParser::InvalidArgument, \"Could not find resource path #{opts.resource_dir}\"\n end\n\n if opts.make_searchable && opts.format != \"html\"\n raise OptionParser::InvalidArgument, \"Error: The --make-searchable option \" \\\n \"is only supported for html rendering.\"\n end\n\n if opts.search_action_path && !opts.make_searchable\n raise OptionParser::InvalidArgument, \"Error: The --server-search-path \" \\\n \"flag is only supported in combination with the --make-searchable (-m) flag.\"\n end\n end", "def test_option_required\n\n # All options are optional by default\n assert(!Option.new(nil, nil).required)\n assert(!Option.new(\"-h|--help\", nil).required)\n\n # All options may be required\n assert(Option.new(nil, nil, required:true).required)\n assert(Option.new(\"-h|--help\", nil, required:true).required)\n end", "def validate_options(options = {})\n error_messages = []\n\n options.fetch(:ignore_selectors, []).each do | s |\n if s[:pacticipant_name].nil?\n error_messages << \"Please specify the pacticipant name to ignore\"\n else\n if s.key?(:pacticipant_version_number) && s.key?(:latest)\n error_messages << \"A version number and latest flag cannot both be specified for #{s[:pacticipant_name]} to ignore\"\n end\n end\n end\n\n destination_identifiers = [options[:tag], options[:environment_name], options[:main_branch]&.to_s].compact\n\n if destination_identifiers.size > 1\n error_messages << message(\"errors.validation.cannot_specify_more_than_one_destination_identifier\")\n end\n\n if options[:latest] && options[:environment_name]&.not_blank?\n error_messages << message(\"errors.validation.cannot_specify_latest_and_environment\")\n end\n\n if options[:environment_name]&.not_blank? && environment_service.find_by_name(options[:environment_name]).nil?\n error_messages << message(\"errors.validation.environment_with_name_not_found\", name: options[:environment_name])\n end\n\n if options[:limit] && options[:limit].to_i < 1\n error_messages << message(\"errors.validation.invalid_limit\")\n end\n\n error_messages\n end", "def post_processing\n @opts.each do |opt|\n if opt.required? && [email protected]?(opt.to_sym)\n fail \"The option #{opt.to_sym} is required\"\n end\n end\n end", "def validate_global_options(args, options)\n options.default :precision => PRECISION_DEFAULT\n options.default :naming => ''\n options.default :alpha => ALPHA_DEFAULT\n\n if options.precision < MIN_PRECISION_DEFAULT\n $stderr.puts(\"Error in --precision flag: Precision must be >= 20 points.\\n\")\n exit!\n end\n\n if options.precision < 0\n $stderr.puts(\"Error in --precision flag: Precision must be >= 1 point.\\n\")\n exit!\n end\n\n Ppbench::precision = options.precision\n\n if !options.naming.empty? && !File.exist?(options.naming)\n $stderr.puts(\"Error in --naming flag: File '#{options.naming}' does not exist.\")\n exit!\n end\n\n Ppbench::naming = {} if options.naming.empty?\n\n unless options.naming.empty?\n begin\n file = File.read(options.naming)\n Ppbench::naming = JSON.parse(file)\n rescue Exception => ex\n $stderr.puts(\"Error in naming file '#{options.naming}'. Does not seem to be a valid JSON file.\")\n exit!\n end\n end\n\n if options.alpha < 0.0 || options.alpha > 1.0\n $stderr.puts(\"Error in --alpha flag: Alpha must be between 0.0 and 1.0, but alpha was '#{options.alpha}'.\")\n exit!\n end\n\n Ppbench::alpha = options.alpha\nend", "def check_required_options(option_set_name, options = {})\n required_options = REQUIRED_OPTIONS[option_set_name]\n missing = []\n required_options.each{|option| missing << option if options[option].nil?}\n \n unless missing.empty?\n raise MissingInformationError.new(\"Missing #{missing.collect{|m| \":#{m}\"}.join(', ')}\")\n end\n end", "def validate()\n @options.each do |k,v|\n # self.send(k, k)\n self.send(k)\n end\n @args[:projectdir] = File.expand_path(\"./\") unless @args[:projectdir]\n end", "def validate_and_parse_options\n # Checking ARGV validity *before* parse_options because parse_options\n # mangles ARGV in some situations\n if no_command_given?\n print_help_and_exit(1, NO_COMMAND_GIVEN)\n elsif no_subcommand_given?\n if (want_help? || want_version?)\n print_help_and_exit\n else\n print_help_and_exit(2, NO_COMMAND_GIVEN)\n end\n end\n end", "def check(cmd)\n # check requisite options\n list.each do |item|\n if item.requisite and not(cmd.model[item.key])\n raise OptionError.new(cmd, 'option \"%s\" is requisite' % [item.long])\n end\n end\n end", "def arguments_valid?\n return false if (@options[:partition_class] and @options[:rmcls])\n\n if @options[:rmcls]\n return false unless (@options[:cls_file] and @options[:lrn_file])\n elsif @options[:partition_class]\n return false unless (@options[:cls_file] and @options[:name_file] and @options[:fasta_file])\n end\n true\n end", "def check_options_values\n # Check if files specified with -f option exist\n if @options[:files].nil?\n @files = ['lib', 'bin', 'app', 'test', 'spec', 'feature']\n else\n @files = @options[:files].split(',')\n @files.delete_if do |filename|\n unless File.exist?(filename)\n puts \"#{filename} does not exist. Ignore it.\"\n true\n end\n end\n if @files.empty?\n puts 'No file to analyze. Aborted!'\n exit\n end\n end\n # Check if files specified with -e option exist\n unless @options[:exclude].nil?\n @excluded_files = @options[:exclude].split(',')\n @excluded_files.delete_if do |filename|\n unless File.exist?(filename)\n puts \"#{filename} does not exist. Ignore it.\"\n true\n end\n end\n end\n end", "def arguement_check(options)\n args = [\"access_key_id\", \"secret_access_key\", \"bucket_name\"] \n args.each do |arg|\n begin\n options.fetch(arg)\n rescue KeyError\n raise ArgumentError, \"Argument #{arg} is required.\"\n end\n end\n end", "def check_inputs(daemon, client, option_template, _foreign_vars)\n # if group, user, and template are nil, throw an exception\n if daemon.nil? && client.nil? && option.nil?\n raise 'You must provide a daemon, client, and option!'\n elsif !user.nil? && !group.nil? && !template.nil?\n raise 'You cannot specify user, group, and template!'\n end\nend", "def invalid_options?\n options[:api_key].nil? || options[:blog].nil?\n end", "def check_input_files(inputfiles)\n inputfiles.each_key do | type |\n inputfiles[type].flatten!\n check = 0\n inputfiles[type].each do | symbol |\n if @options[symbol] == nil or @options[symbol] == ''\n if type == :required\n raise CheripicArgError.new \"Options #{inputfiles}, all must be specified. Try --help for further help.\"\n end\n else\n file = @options[symbol]\n if symbol == :bg_bulk or symbol == :bg_bulk_vcf\n if file.include? ','\n @options[symbol] = []\n file.split(',').each do | infile |\n @options[symbol] << File.expand_path(infile)\n file_exist?(symbol, infile)\n end\n end\n else\n @options[symbol] = File.expand_path(file)\n file_exist?(symbol, file)\n end\n check = 1\n end\n end\n if type == :either and check == 0\n raise CheripicArgError.new \"One of the options #{inputfiles}, must be specified. \" +\n 'Try --help for further help.'\n end\n end\n end", "def valid_options\n []\n end", "def validate_plot_options(args, options)\n\n if args.empty?\n $stderr.puts(\"You have to provide benchmark files (*.csv) to analyze.\")\n exit!\n end\n\n if options.recwindow < 0\n $stderr.puts(\"Error in --recwindow flag: TCP standard receive window must be >= 0 bytes\\n\")\n exit!\n end\n\n if options.confidence < 0 || options.confidence > 100\n $stderr.puts(\"Error in --confidence flag: Confidence interval must be between 0 and 100 %.\\n\")\n exit!\n end\n\n if options.yaxis_max < 0\n $stderr.puts(\"Error in --yaxis_max flag: Maximum value on yaxis must be >= 0.\\n\")\n end\n\n if options.yaxis_steps <= 0\n $stderr.puts(\"Error in --yaxis_steps flag: You must provide a positive step > 0.\\n\")\n exit!\n end\n\n if options.xaxis_max < 0\n $stderr.puts(\"Error in --xaxis_max flag: Maximum value on xaxis must be >= 0.\\n\")\n exit!\n end\n\n if options.xaxis_steps <= 0\n $stderr.puts(\"Error in --xaxis_steps flag: You must provide a positive step > 0.\\n\")\n exit!\n end\n\n if options.nopoints && !options.withbands\n $stderr.puts(\"Error in --nopoints flag. You must use --withbands if applying --nopoints. Otherwise nothing would be plotted.\\n\")\n exit!\n end\n\nend", "def arguments_valid?\n # right now, there is no set of invalid arguments.\n # (can I really just say true here? I don't have to return something?)\n true unless (@options.set_status == true and @options.status == nil)\n end", "def _check_options(text)\n ret = true\n if @options\n unless @options.include?(text)\n @last_error_message = \"Invalid option chosen (\\\"#{ text }\\\"); \" \\\n \"valid options are: #{ options }\"\n ret = false\n end\n end\n ret\n end", "def validate_options(options, *known_options)\n\toptions ||= Hash.new\n\topt, unknown = Kernel.filter_options(options.to_hash, *known_options)\n\tunless unknown.empty?\n\t not_valid = unknown.keys.map { |m| \"'#{m}'\" }.join(\" \")\n\t raise ArgumentError, \"unknown options #{not_valid}\", caller(1)\n\tend\n\n\topt\n end", "def check_mandatory!\n if options.work_dir.nil?\n kill \"missing file operand\"\n end\n if options.contents_template.nil?\n kill \"could not find contents template\"\n end\n end", "def opts_is_valid?(opts = {})\n !(opts[:file].nil? && opts[:temp].nil?) || (opts[:file] && opts[:temp])\n end", "def validate_options(options)\n raise TypeError.new(\"provided options must be a Hash\") unless options.is_a?(Hash)\n check_for_illegal_keys!(options)\n options = set_deduct_call_time_based_on_metric(DEFAULT_SETTINGS.merge(options))\n check_for_push_scope_and_metric(options)\n options\n end", "def arguments_valid?\n ret = false\n ret = true unless (@options.action == nil)\n end", "def valid_options!\n @options.keys.uniq.each do |key|\n raise WhmArgumentError.new(\"Not a valid parameter: #{key}\") unless @optional_params.include?(key)\n end\n end", "def check_validity!\n keys = CHECKS.keys\n options.slice(*keys).each do |option, value|\n next if is_time?(value) || value.is_a?(Proc) || value.is_a?(Symbol) || (defined?(ActiveSupport::TimeWithZone) and value.is_a? ActiveSupport::TimeWithZone)\n raise ArgumentError, \":#{option} must be a time, a date, a time_with_zone, a symbol or a proc\"\n end\n end", "def check_options(opts, args)\n\n @options[:models] = opts[:models].reject { |m| m.blank? }\n\n\n if no_options(opts) && @options[:models].empty?\n\n raise AASM_NoModels, AASM_NoModels.error_message(\"(cli-no_options) Missing model or '--all' option: You must either specify a model to diagram or use the --all option.\\n\\n args: #{args}\\n options: #{opts.inspect}\")\n\n # show help\n\n else\n\n if @options[:models].empty?\n\n if !opts[:all] && !version_or_graph_configs_opt?(args, opts)\n\n raise AASM_StateChart::AASM_NoModels, AASM_NoModels.error_message(\"(cli) Missing model or '--all' option: You must either specify a model to diagram or use the --all option.\\n\\n args: #{args}\\n options: #{opts.inspect}\")\n\n else # opts[:all] || version_or_graph_configs_opt?(args, opts)\n\n if opts.fetch(:dump_configs, false)\n error_unless_valid_dump_configs(opts)\n\n else\n unless opts.fetch(:version, false) || opts[:all]\n raise AASM_StateChart::AASM_NoModels, AASM_NoModels.error_message(\"(cli) Missing model or '--all' option: You must either specify a model to diagram or use the --all option.\\n\\n args: #{args}\\n options: #{opts.inspect}\")\n end\n end\n\n end\n\n else\n\n if opts[:all]\n raise AASM_StateChart::CLI_Inputs_ERROR, CLI_Inputs_ERROR.error_message(\"You can't use the '--all' options and specify a model. (need more: why)\")\n end\n\n error_unless_valid_dump_configs(opts)\n\n end\n\n end\n\n if opts.has_key?(:config_file) && !File.exist?(opts[:config_file])\n raise AASM_StateChart::NoConfigFile_Error, NoConfigFile_Error.error_message(\"(cli) The configuration file #{opts[:config_file]} cannot be found.\")\n end\n\n true\n\n end", "def valid_options(*opts)\n Array(opts).each do |opt|\n CookbookSource.add_valid_option(opt)\n end\n end", "def validate\n ensure_exclude_option_array_exists\n ensure_linter_section_exists\n ensure_linter_include_exclude_arrays_exist\n end", "def process_options\n options.delete_if { |x,y| y.nil? }\n if options.empty?\n puts @optparse \n exit 0\n end\n options.each do |x,y|\n begin\n if y.to_s.match('^-')\n raise BoilerMakerErr.new(\"Bad args: \\\"#{y}\\\" is not a valid arg to option, \\\"--#{x}\\\". Use the -h flag for syntax help.\")\n end\n rescue => error\n puts error.message + \"\\n\"\n exit 1\n end\n end\n end", "def validate_options!(_options)\n raise(NotImplemetedError, \"subclass responsability\")\n end", "def validate_params\n puts\n puts \"You're about to import data in your '#{Rails.env}' instance.\"\n puts \"You'll use the following source files:\"\n puts \" users: #{options['users'] || '-'} \"\n puts \" projects: #{options['projects'] || '-'}\"\n puts \" issues: #{options['issues'] || '-'}\"\n puts\n puts \"/!\\\\ Make sure to have a backup of your database before continuing.\"\n puts\n print 'Is this ok ? [y/n]: '\n STDOUT.flush\n ok = STDIN.gets.chomp!\n exit 2 if ok != 'y'\n puts\n end", "def validate_options(options, attrs)\n matched_attrs = options.keys & attrs\n if matched_attrs.to_set != attrs.to_set\n raise HelpScoutDocs::Error::OptionsError.new(\"#{(attrs - matched_attrs).join(\", \")} required options are missing\")\n end\n end", "def option_combinations_valid?\n # TO DO - implement your real logic here\n true \n end", "def validate_options(api, options = {})\n options = options.symbolize_keys\n version_args = ARGS[@version.to_sym]\n \n if !version_args\n raise ArgumentError.new(\"Invalid version API call: #{@version}, #{api}\")\n elsif version_args.has_key?(api) && !Set.new(options.keys).subset?(version_args[api])\n raise ArgumentError.new(\"Valid options for #{api} are: #{version_args[api].to_a.join(', ')}, provided: #{options.keys.join(', ')}\")\n end\n end", "def ab_options_valid?(ab_options = {})\n return false if ab_options.empty?\n\n unless ab_options_is_complete?(ab_options)\n logger.info \"**********************************************************************************************************\"\n logger.info \"[Bachanalytics] You need to specify the :a, b: and :goal options for your WebSiteOptimizer tests, aborting\"\n logger.info \"**********************************************************************************************************\"\n return false\n end\n\n unless uniq_goals?(ab_options)\n logger.info \"**************************************************************************************************\"\n logger.info \"[Bachanalytics] You can't specify a :goal as part of the your WebSiteOptimizer tests, not applying\"\n logger.info \"**************************************************************************************************\"\n return false\n end\n\n true\n end", "def validate_comparison_options(args, options)\n\n if args.empty?\n $stderr.puts(\"You have to provide benchmark files (*.csv) to analyze.\")\n exit!\n end\n\n if options.recwindow < 0\n $stderr.puts(\"Error in --recwindow flag: TCP standard receive window must be >= 0 bytes.\\n\")\n exit!\n end\n\n if options.yaxis_max < 0\n $stderr.puts(\"Error in --yaxis_max flag: Maximum value on yaxis must be >= 0.\\n\")\n end\n\n if options.xaxis_max < 0\n $stderr.puts(\"Error in --xaxis_max flag: Maximum value on xaxis must be >= 0.\\n\")\n exit!\n end\n\n if options.xaxis_steps <= 0\n $stderr.puts(\"Error in --xaxis_steps flag: You must provide a positive step > 0.\\n\")\n exit!\n end\n\nend", "def valid?(opts)\n errors(opts) == []\n end", "def check_arguments\n convert_boolean_strings\n check_output\n check_log_level\n check_input_entry\n check_input_types\n end", "def checkOptions\n if request.options?\n render nothing: true and return\n end\n end", "def check_options(options, *supported)\n unsupported = options.to_hash.keys - supported.flatten\n raise ArgumentError, \"No such option: #{unsupported.join(' ')}\" unless unsupported.empty?\n end", "def valid?(options = {})\n options = normalize_options(options)\n validate(options)\n return true\n rescue\n false\n end", "def valid_options\n @valid_options ||= defined?(self.class::VALID_OPTIONS) ? self.class::VALID_OPTIONS : {}\n end", "def options_parsed?\n opts = OptionParser.new() do |o|\n o.on('-v','--version') { output_version($stdout); exit(0) }\n o.on('-h','--help') { output_help($stdout); exit(0) }\n o.on('-V', '--verbose') { @options.verbose = true }\n o.on('-D', '--debug') { @options.debug = true }\n o.on('-l', '--local') { @options.run_local = true }\n\n o.on(\"-d\",\"--delay\", \"=REQUIRED\") do |amount|\n @options.delay = amount.to_i\n end\n\n o.on(\"-c\",\"--config\", \"=REQUIRED\") do |conf_file|\n @options.config_file = conf_file\n end\n\n o.on(\"-o\",\"--output\", \"=REQUIRED\") do |output_destination|\n @options.output_base = output_destination\n end\n\n o.on(\"-s\",\"--scheduler\", \"=REQUIRED\") do |qopts|\n @options.scheduler_opts = qopts\n end\n\n o.on(\"-t\",\"--tmp\", \"=REQUIRED\") do |topts|\n @options.tmp_dir_base = topts\n end\n end\n\n opts.parse!(@args) rescue return false\n @options.samples = @args\n return true\nend", "def valid_options\n @@valid_options\n end", "def validate_holding_options!(options)\n unpermitted_params = []\n required_params = {}\n\n case holding_opts[:on_hand_type]\n when :closed\n required_params[:amount] = [Integer]\n when :open\n required_params[:amount] = [Integer]\n when :none\n unpermitted_params << :amount\n end\n\n # Actual validation\n unpermitted_params = unpermitted_params.select { |p| options.key?(p) }\n .map { |p| \"'#{p}'\" }\n\n wrong_types = required_params\n .select { |k, v| options.key?(k) && v.select { |type| options[k].is_a?(type) }.length.zero? }\n .map { |k, v| \"'#{k}' must be a '#{v.join(' or ')}' but '#{options[k].class}' found\" }\n\n required_params = required_params.reject { |k, _v| options.key?(k) }\n .map { |k, _v| \"'#{k}'\" }\n\n # Raise OptionsInvalid if some invalid parameters were found\n if (unpermitted_params.length + required_params.length + wrong_types.length).positive?\n message = ''\n message << \" unpermitted parameters: #{unpermitted_params.join(',')}.\" unless unpermitted_params.empty?\n message << \" missing parameters: #{required_params.join(',')}.\" unless required_params.empty?\n message << \" parameters type mismatch: #{wrong_types.join(',')}\" unless wrong_types.empty?\n raise ActsAsHoldable::OptionsInvalid.new(self, message)\n end\n true\n end", "def arguments_valid?\n \n # true if @arguments.length == 1 && (File.directory?(@options.output) || File.exists?(File.dirname(@options.output))) && File.directory?(@options.input)\n case @options.mode\n when \"queue\"\n true if File.directory?(@options.output)\n when \"crawl\"\n true if File.directory?(@options.output) && File.directory?(@options.input?)\n else\n true if @arguments.length == 1\n end\n end", "def actual_options?(options)\n return false if options.nil?\n\n if (options.is_a?(String) || options.is_a?(Hash)) && !options.size.zero?\n true\n else\n false\n end\n end", "def validate_naming_options(args, options)\n if args.empty?\n $stderr.puts(\"Error due to missing files to analyze. You must provide at least one log file (csv).\\n\")\n exit!\n end\n\n if !options.update.empty? && !File.exist?(options.update)\n $stderr.puts(\"Error: File #{options.update} does not exist.\\n\")\n exit!\n end\nend", "def validate_run_options(args, options)\n\n if (options.machine.empty?)\n $stderr.puts(\"You have to tag your benchmark data with the --machine flag.\\n\")\n exit!\n end\n\n if (options.experiment.empty?)\n $stderr.puts(\"You have to tag your benchmark data with the --experiment flag.\\n\")\n exit!\n end\n\n if options.coverage < 0 || options.coverage > 1.0\n $stderr.puts(\"Error in --coverage flag: Coverage must be in [0..1.0]\\n\")\n exit!\n end\n\n if options.repetitions < 1\n $stderr.puts(\"Error in --repetitions flag: Repetitions must be >= 1\\n\")\n exit!\n end\n\n if options.concurrency < 1\n $stderr.puts(\"Error in --concurrency flag: Concurrency must be >= 1\\n\")\n exit!\n end\n\n if options.timeout < 1\n $stderr.puts(\"Error in --timeout flag: Timeout must be >= 1 seconds\\n\")\n exit!\n end\n\n if args.empty?\n $stderr.puts(\"You have to specify a log file.\\n\")\n exit!\n end\n\n if args.length > 1\n $stderr.puts(\"You should only specify one log file. You specified #{args.length} logfiles.\\n\")\n exit!\n end\n\n if File.exist?(args[0])\n $stderr.puts(\"Logfile #{args[0]} already exists. You do not want to overwrite collected benchmark data.\\n\")\n exit!\n end\n\nend", "def options\n validate_or_get_results :options\n end", "def check_input_types\n inputfiles = {}\n inputfiles[:required] = %i{assembly mut_bulk}\n inputfiles[:optional] = %i{bg_bulk}\n if @options[:input_format] == 'bam'\n inputfiles[:required] << %i{mut_bulk_vcf}\n inputfiles[:optional] << %i{bg_bulk_vcf}\n end\n if @options[:polyploidy]\n inputfiles[:either] = %i{mut_parent bg_parent}\n end\n check_input_files(inputfiles)\n end", "def arguments_valid?\n num = 0\n num += 1 if @options.stats\n num += 1 if @options.attach\n num += 1 if @options.detach\n return false if num > 1\n return true\n end", "def validate\n opts = options.dup\n opts[0] = \"1\"\n set(options: opts)\n end", "def check_required\n # handle case where no files or text were passed (a tty is connected or STDIN is EOF in the latter case)\n return unless @options.files.to_a.empty? && (@stdin.tty? || @stdin.eof?) # to_a.empty? handles nil or empty case\n\n puts 'Either specify input file(s) or pipe text to STDIN'\n show_help\n end", "def verify_options_hook=(_arg0); end" ]
[ "0.7486789", "0.72412026", "0.7234365", "0.72204375", "0.72187287", "0.71798575", "0.7179178", "0.7087661", "0.7074382", "0.7071132", "0.70461327", "0.7026061", "0.70239943", "0.70226896", "0.6992955", "0.69809914", "0.69570374", "0.6945098", "0.691861", "0.69168293", "0.69042575", "0.6891083", "0.68422955", "0.68416774", "0.6841639", "0.6840957", "0.68147975", "0.68036884", "0.6802439", "0.67838186", "0.67671144", "0.67536753", "0.6747587", "0.6741682", "0.6741682", "0.6741682", "0.6739407", "0.67357033", "0.67024255", "0.6675646", "0.66699487", "0.66686374", "0.66620666", "0.66586274", "0.6651924", "0.66482747", "0.66333157", "0.66213155", "0.6609912", "0.66017246", "0.6589415", "0.6589208", "0.65810925", "0.6573319", "0.6558401", "0.65495175", "0.65417314", "0.6530621", "0.65069044", "0.6499607", "0.64969105", "0.6480707", "0.6479163", "0.64711136", "0.64686865", "0.64579093", "0.6451462", "0.644798", "0.64424026", "0.6441332", "0.6438918", "0.643369", "0.64291203", "0.6405372", "0.63928336", "0.636658", "0.63578576", "0.6357064", "0.63526833", "0.6334541", "0.6332066", "0.6312546", "0.6308827", "0.62850916", "0.62819475", "0.6263461", "0.6259773", "0.62575406", "0.62551725", "0.6216851", "0.6216157", "0.62131435", "0.61954695", "0.619297", "0.61885405", "0.6180838", "0.61807954", "0.6170278", "0.6130158" ]
0.6584273
52
Checks AWS credentials settings to see if the CPI will be able to authenticate to AWS.
def validate_credentials_source credentials_source = options['aws']['credentials_source'] || 'static' if credentials_source != 'env_or_profile' && credentials_source != 'static' raise ArgumentError, "Unknown credentials_source #{credentials_source}" end if credentials_source == 'static' if options['aws']['access_key_id'].nil? || options['aws']['secret_access_key'].nil? raise ArgumentError, "Must use access_key_id and secret_access_key with static credentials_source" end end if credentials_source == 'env_or_profile' if !options['aws']['access_key_id'].nil? || !options['aws']['secret_access_key'].nil? raise ArgumentError, "Can't use access_key_id and secret_access_key with env_or_profile credentials_source" end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def verify_aws_keys()\n if (not (ENV['AWS_ACCESS_KEY'] and ENV['AWS_SECRET_KEY'])) then\n if not ENV['MORTAR_IGNORE_AWS_KEYS']\n return false\n else\n return true\n end\n else\n return true\n end\n end", "def validate_aws_credential_file!\n Chef::Log.debug \"Using AWS credential file at #{aws_cred_file_location}\"\n raise ArgumentError, \"The provided --aws_credential_file (#{aws_cred_file_location}) cannot be found on disk.\" unless File.exist?(aws_cred_file_location)\n\n # File format:\n # AWSAccessKeyId=somethingsomethingdarkside\n # AWSSecretKey=somethingsomethingcomplete\n # OR\n # [default]\n # aws_access_key_id = somethingsomethingdarkside\n # aws_secret_access_key = somethingsomethingdarkside\n aws_creds = ini_parse(File.read(aws_cred_file_location))\n profile = config[:aws_profile]\n Chef::Log.debug \"Using AWS profile #{profile}\"\n entries = if aws_creds.values.first.key?(\"AWSAccessKeyId\")\n aws_creds.values.first\n else\n aws_creds[profile]\n end\n\n if entries\n config[:aws_access_key_id] = entries[\"AWSAccessKeyId\"] || entries[\"aws_access_key_id\"]\n config[:aws_secret_access_key] = entries[\"AWSSecretKey\"] || entries[\"aws_secret_access_key\"]\n config[:aws_session_token] = entries[\"AWSSessionToken\"] || entries[\"aws_session_token\"]\n else\n raise ArgumentError, \"The provided --aws-profile '#{profile}' is invalid. Does the credential file at '#{aws_cred_file_location}' contain this profile?\"\n end\n end", "def validate_aws_config!(keys = %i{aws_access_key_id aws_secret_access_key})\n errors = [] # track all errors so we report on all of them\n\n validate_aws_config_file! if config[:aws_config_file]\n unless config[:use_iam_profile] # skip config file / key validation if we're using iam profile\n # validate the creds file if:\n # aws keys have not been passed in config / CLI and the default cred file location does exist\n # OR\n # the user passed aws_credential_file\n if (config.keys & %i{aws_access_key_id aws_secret_access_key}).empty? && aws_cred_file_location ||\n config[:aws_credential_file]\n\n unless (config.keys & %i{aws_access_key_id aws_secret_access_key}).empty?\n errors << \"Either provide a credentials file or the access key and secret keys but not both.\"\n end\n\n validate_aws_credential_file!\n end\n\n keys.each do |k|\n pretty_key = k.to_s.tr(\"_\", \" \").gsub(/\\w+/) { |w| (w =~ /(ssh)|(aws)/i) ? w.upcase : w.capitalize }\n if config[k].nil?\n errors << \"You did not provide a valid '#{pretty_key}' value.\"\n end\n end\n\n if errors.each { |e| ui.error(e) }.any?\n exit 1\n end\n end\n end", "def valid_credentials?\n return false if Sailpoint.config.username.blank? && Sailpoint.config.password.blank?\n\n !Sailpoint.config.hashed_credentials.blank?\n end", "def credentials?\n if credentials[:access_token]\n true\n elsif credentials[:client_id] && credentials[:client_secret]\n true\n else\n false\n end\n end", "def check_credentials\n raise \"Please set load_configuration with #{RightSignature2013::Connection.api_token_keys.join(',')} or #{RightSignature2013::Connection.oauth_keys.join(',')}\" unless has_api_token? || has_oauth_credentials?\n end", "def valid?\n @s3_region && @aws_bucket_name && @aws_access_key_id && @aws_secret_access_key\n end", "def aws?\n bosh_provider == \"aws\"\n end", "def valid_credentials?\n return false unless api_key?\n !activities.all.nil?\n rescue Rescuetime::Errors::InvalidCredentialsError\n false\n end", "def check_creds()\n debug_msg(\"Checking credentials for profile \\\"#{$profile}\\\"\")\n my_id=\"\"\n begin\n sts = Aws::STS::Client.new(region: \"eu-west-1\", credentials: $credentials)\n my_id= sts.get_caller_identity().account\n rescue => err\n err_msg (\"Unable to login using profile #{$profile}: #{err}\")\n exit(E_PROFILE_ERR)\n end\nend", "def valid_credentials?\n location = self.class.default_location\n find_rates(location,location,Package.new(100, [5,15,30]), :test => test_mode)\n rescue Omniship::ResponseError\n false\n else\n true\n end", "def require_aws_keys()\n unless verify_aws_keys()\n error(NO_AWS_KEYS_ERROR_MESSAGE)\n end\n end", "def required_credentials?(credentials = {})\n credentials_error unless REQUIRED_KEYS.all? { |k| credentials[k] }\n end", "def valid_credentials?(credentials)\n credentials_error if !credentials.is_a?(Hash) || credentials.empty?\n required_credentials?(credentials)\n end", "def use_authentication?\n auth = _get_option('use_authentication')\n if !auth || (_get_entity_detail('source') == \"configuration\")\n _log 'Cannot use authentication if bucket belongs to API key as false positives will occur.'\n auth = false\n end\n auth\n end", "def valid_credentials?\n cred = load_credentials if File.file?(credentials_file)\n cred && cred.has_key?(:uuid) && cred.has_key?(:token)\n end", "def checkS3Connection\n begin\n s3 = createS3Connection\n test_query = s3.buckets.collect(&:name)\n session[:s3connection] = \"Established\"\n rescue Exception => error\n session[:s3connection] = \"Disconnected (Error)\"\n flash.now[:danger] = \"<strong>Error!</strong>\".html_safe + \" Problem connecting to S3 Endpoint: #{error}.\"\n end\n end", "def credentials?\n oauth.values.all? || basic_auth.values.all?\n end", "def credentials_valid?\n zone_load_multi['result'] == 'success' rescue false\n end", "def credentials_valid?\n zone_load_multi['result'] == 'success' rescue false\n end", "def validate_aws_config_file!\n config_file = config[:aws_config_file]\n Chef::Log.debug \"Using AWS config file at #{config_file}\"\n\n raise ArgumentError, \"The provided --aws_config_file (#{config_file}) cannot be found on disk.\" unless File.exist?(config_file)\n\n aws_config = ini_parse(File.read(config_file))\n profile_key = config[:aws_profile]\n profile_key = \"profile #{profile_key}\" if profile_key != \"default\"\n\n unless aws_config.values.empty?\n if aws_config[profile_key]\n config[:region] = aws_config[profile_key][\"region\"]\n else\n raise ArgumentError, \"The provided --aws-profile '#{profile_key}' is invalid.\"\n end\n end\n end", "def valid_credentials?(credentials)\n !credentials[:username].to_s.empty? && !credentials[:password].to_s.empty?\n end", "def catch_aws_errors\n yield\n rescue Aws::Errors::MissingCredentialsError\n # The AWS error here is unhelpful:\n # \"unable to sign request without credentials set\"\n Inspec::Log.error \"It appears that you have not set your AWS credentials. You may set them using environment variables, or using the 'aws://region/aws_credentials_profile' target. See https://www.inspec.io/docs/reference/platforms for details.\"\n fail_resource(\"No AWS credentials available\")\n rescue Aws::Errors::ServiceError => e\n fail_resource e.message\n end", "def catch_aws_errors\n yield\n rescue Aws::Errors::MissingCredentialsError\n # The AWS error here is unhelpful:\n # \"unable to sign request without credentials set\"\n Inspec::Log.error \"It appears that you have not set your AWS credentials. You may set them using environment variables, or using the 'aws://region/aws_credentials_profile' target. See https://www.inspec.io/docs/reference/platforms for details.\"\n fail_resource(\"No AWS credentials available\")\n rescue Aws::Errors::ServiceError => e\n fail_resource e.message\n end", "def has_oauth_credentials?\n return false if @configuration.nil?\n RightSignature2013::Connection.oauth_keys.each do |key| \n return false if @configuration[key].nil? || @configuration[key].match(/^\\s*$/)\n end\n\n return true\n end", "def validate_credentials(options = {})\n !self.class.new(options).user.nil?\n rescue Nearmiss::Unauthorized\n false\n end", "def check_environment\n # Also, your VMs require public ssh keys for the root user\n # to exist that match your CLOUD_KEY private key path\n #\n # You must also have the following values set in your environment:\n #\n # ENV[\"API_KEY\"] -- cloudstack api key\n # ENV[\"API_SECRET\"] -- cloudstack api secret\n # ENV[\"API_URL\"] -- cloudstack api endpoint\n # ENV[\"CLOUD_KEY\"] -- local file path to your private SSH key material\n #\n %w{API_KEY API_SECRET API_URL CLOUD_KEY}.each do |var|\n raise \"ERROR: you must set #{var} in you environment\" unless ENV[var]\n end\n end", "def credentials?\n credentials.values.all?\n end", "def application_credentials?\n !!application_credentials\n end", "def connect_s3( access_key, secret_key )\n begin\n $s3_interface = RightAws::S3Interface.new( access_key, secret_key )\n return true\n rescue Exception => e\n $stderr.puts( \"connect_s3: exception in connecting to AWS (exception: \" + e.to_s() + \")\" )\n return false\n end\nend", "def authenticate?(params)\n user = get_user(params['AWSAccessKeyId'])\n return false if !user\n \n signature_params = params.reject { |key,value| \n key=='Signature' or key=='file' }\n\n signature = AWS.encode(\n user[:password], \n AWS.canonical_string(signature_params, @server_host),\n false)\n \n return params['Signature']==signature\n end", "def authenticable?\n Chimps.config[:catalog][:key] && Chimps.config[:catalog][:secret]\n end", "def get_credentials\n # http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html#cli-environment\n credentials_file = ENV.fetch(\"AWS_SHARED_CREDENTIALS_FILE\", ENV[\"AWS_CONFIG_FILE\"])\n shared_creds = ::Aws::SharedCredentials.new(\n profile_name: profile_name,\n path: credentials_file\n )\n instance_profile_creds = ::Aws::InstanceProfileCredentials.new(retries: 1)\n\n if ENV[\"AWS_ACCESS_KEY_ID\"] && ENV[\"AWS_SECRET_ACCESS_KEY\"]\n creds = ::Aws::Credentials.new(\n ENV[\"AWS_ACCESS_KEY_ID\"],\n ENV[\"AWS_SECRET_ACCESS_KEY\"],\n ENV[\"AWS_SESSION_TOKEN\"]\n )\n elsif shared_creds.set?\n creds = shared_creds\n elsif instance_profile_creds.set?\n creds = instance_profile_creds\n else\n raise LoadCredentialsError, \"Could not load credentials from the environment variables, the .aws/credentials file or the metadata service\"\n end\n creds\n end", "def authorized?\n auth_config = settings.config['authentication']\n @auth ||= Rack::Auth::Basic::Request.new(request.env)\n @auth.provided? and @auth.basic? and @auth.credentials and @auth.credentials == [auth_config['username'], auth_config['password']]\n end", "def configure_aws_region settings\n settings[:aws_region] ||= settings[:availability_zones].first.gsub(/^(\\w+-\\w+-\\d)[a-z]/, '\\1')\n settings[:ec2_url] ||= \"https://#{settings[:aws_region]}.ec2.amazonaws.com\"\n unless ((ENV['EC2_URL'].to_s == '' && settings[:aws_region] == 'us-east-1') || (ENV['EC2_URL'] == settings[:ec2_url]))\n warn \"******\\nThe EC2_URL environment variable should probably be #{settings[:ec2_url]} (from your availability zone), not #{AWS::EC2::DEFAULT_HOST}. Try invoking 'export EC2_URL=#{settings[:ec2_url]}' and re-run.\\n******\"\n end\nend", "def ask_for_credentials\n puts \"Enter your AWS credentials.\"\n\n print \"Access Key ID: \"\n user = ask\n\n print \"Secret Access Key: \"\n password = ask_for_password\n\n return [user, password]\n end", "def setup_credentials\n unless yes?('Would you like to configure and store your credentials?')\n $stderr.puts \"Unable to proceed without credentials\"\n exit 1\n end\n\n begin\n choice = choose do |menu|\n menu.prompt = 'Which type of credentials would you like to set up? (token is highly recommended) '\n menu.choices(:password, :token, :none)\n end.to_sym\n end until [:password, :token, :none].include? choice\n\n if choice == :password\n setup_password_credentials\n elsif choice == :token\n setup_token_credentials\n else\n return false\n end\n rescue StandardError => e\n options.debug ? warn(e) : raise(e)\n false\n end", "def run_me\r\n if deny_uploads_without_server_side_aws_kms_encryption?(\r\n Aws::S3::Client.new(region: 'us-west-2'),\r\n 'doc-example-bucket'\r\n )\r\n puts 'Policy added.'\r\n else\r\n puts 'Policy not added.'\r\n end\r\nend", "def verify_credentials\n if auth_supplied?\n response = get \"account/verify_credentials\"\n response.ok? ? response : false\n else\n false\n end\n end", "def check_credentials(username, password)\n return false\n end", "def aws_acl?\n ['private', 'public-read', 'public-read-write', 'authenticated-read', 'bucket-owner-read', 'bucket-owner-full-control'].include?(aws_access_control_list)\n end", "def correct_credentials?\n correct_username = 'admin'\n correct_password = 'admin'\n input_username = @request.params['username']\n input_password = @request.params['password']\n correct_username == input_username && correct_password == input_password\n end", "def valid?(env)\n request = Rack::Request.new(env)\n unless (access_id = request.params['AccessKeyID'])\n #puts \"AccessKeyID not found in Params\"\n return false\n end\n unless (secret_key = @accounts[access_id])\n #puts \"No SecretKey found for AccessKeyID #{access_id.inspect}\"\n return false\n end\n AuthenticatedApi::Server.valid_signature?(request, secret_key)\n end", "def verify_credentials\n return false unless has_credentials? # make sure we actually have an api token set\n begin\n if defined?(@api)\n valid = twitter_api.client.authorized?\n else\n twitter_api # just fetching it the first time does an authorized check\n valid = true\n end\n rescue InvalidToken => ex\n valid = false\n rescue Exception => ex\n valid = true # non twitter validation error assume ok since could be network issue\n end\n valid\n end", "def allow_auth_failures?\n return true if node.key?('virtualization') && node['virtualization']['system'] == 'vbox'\n return true if ::File.exist?('/.dockerenv')\n return true if ENV['CI']\n false\n end", "def check_settings(*required_values)\n has_all_settings =\n settings.values_at(*required_values).all? do |setting|\n setting && !setting.empty?\n end\n\n unless settings.is_a?(Hash) && has_all_settings\n fail Error::MissingCredentials,\n \"Please provide all credential values. Required: #{required_values}\"\n end\n end", "def loadProfile(profile=nil)\n # CAUTION: We are returning if profile is nil because otherwise AWS will try to use \"Default\" profile\n return nil if profile.nil?\n begin #ruby syntax equivalant to try\n # Read the credentials from the given profile\n credentials = Aws::SharedCredentials.new(profile_name: profile)\n # make sure profile exists before proceeding\n rescue Exception => e # ruby syntax equivalant to catch\n puts \"\\e[31mERROR: #{e.message}\\e[0m\"\n exit 1\n end\n return credentials if credentials.loadable?\n puts \"\\e[31mERROR: Credentials are not loadable. Make sure you have ~/.aws configured correctly.\\e[0m\"\n return nil \nend", "def initialize(options)\n @settings = options\n\n check_settings(:aws_access_key_id, :aws_default_region,\n :aws_secret_access_key)\n end", "def validate(credentials)\n begin\n $LOG.debug(\"Starting Active Resource authentication\")\n result = Helpers::Identity.authenticate(credentials.except(:request))\n extract_extra_attributes(result) if result\n !!result\n rescue ::ActiveResource::ConnectionError => e\n if e.response.blank? # band-aid for ARes 2.3.x -- craps out if to_s is called without a response\n e = e.class.to_s\n end\n $LOG.warn(\"Error during authentication: #{e}\")\n false\n end\n end", "def use_existing_vpc?\n return false unless configured?\n\n @aws_config['use_existing_vpc']\n end", "def authenticate_if_needed\n # Disable this extra authentication in test mode\n return true if Rails.env.test?\n if (is_hidden || is_staging) && !is_api_or_pdf\n authenticate_or_request_with_http_basic do |username, password|\n username == \"samvera\" && password == \"hyku\"\n end\n end\n end", "def authorized?\n @auth ||= Rack::Auth::Basic::Request.new(request.env)\n \n # read credentials from file\n credentials = open(\"credentials.txt\").read.split(\"\\n\").map{|c| c !~ /^#/ ? c : nil}.compact\n \n @auth.provided? && @auth.basic? && @auth.credentials && @auth.credentials == credentials\n end", "def in_aws_account?(*accts)\n return false if self.quiescent?\n return false unless self['ec2']\n\n accts.flatten!\n accts.include?(self['ec2']['account_id'])\n end", "def prereqs_check_aws_config\n exe \"aws ec2 describe-vpcs | jq -e '.Vpcs[0] | select(.VpcId==\\\"#{AWS_DEFAULT_VC}\\\")'\"\nend", "def run_me\r\n bucket_name = 'doc-example-bucket'\r\n permission = 'READ'\r\n owner_id = 'b380d412791d395dbcdc1fb1728b32a7cd07edae6467220ac4b7c0769EXAMPLE'\r\n region = 'us-west-2'\r\n s3_client = Aws::S3::Client.new(region: region)\r\n\r\n if bucket_acl_set_for_owner_id?(\r\n s3_client,\r\n bucket_name,\r\n permission,\r\n owner_id\r\n )\r\n puts 'Bucket ACL set.'\r\n else\r\n puts 'Bucket ACL not set.'\r\n end\r\nend", "def application_authenticated?\n !!(@client_id && @client_secret)\n end", "def aws_credentials(access_key, secret_access_key)\n unless access_key.empty? || secret_access_key.empty?\n credentials = { access_key_id: access_key, secret_access_key: secret_access_key }\n File.write('../.credentials.yml', credentials.to_yaml)\n $preferences_window.destroy\n else \n Tk.messageBox('type' => 'ok',\n 'icon' => 'error',\n 'title' => 'Keys',\n 'message' => 'Access and secret keys must not be empty') \n end \n end", "def aws_credentials\n {}\n end", "def sauce?\n !!(@user && @key && @host && @port)\n end", "def should_authenticate?(env)\n @config['should_authenticate_check'] ? @config['should_authenticate_check'].call(env) : true\n end", "def credentials?(uri, challenges); end", "def verify_credentials!\n raise AuthenticationError.new(\"missing client code\") if Applitrack.client_code.nil? || Applitrack.client_code.empty?\n raise AuthenticationError.new(\"missing username\") if Applitrack.username.nil? || Applitrack.username.empty?\n raise AuthenticationError.new(\"missing password\") if Applitrack.password.nil? || Applitrack.password.empty?\n end", "def use_authentication?\n auth = _get_option('use_authentication')\n if auth && _get_entity_detail('belongs_to_api_key')\n _log 'Cannot use authentication if bucket belongs to API key as false positives will occur.'\n _log 'Defaulting to using unauthenticated techniques.'\n auth = false\n end\n auth\n end", "def authentication_valid?(username,password)\n if EffectiveQbSync.quickbooks_username.present?\n return false unless Array(EffectiveQbSync.quickbooks_username).include?(username)\n end\n\n Array(EffectiveQbSync.quickbooks_password).include?(password)\n end", "def authenticated?\n response = institutions\n case response.code\n when 401\n false\n else\n true\n end\n end", "def old_test_aws_keys (key, secret)\n\n creds = {\n :access_key_id => key,\n :secret_access_key => secret\n }\n\n result = {\n :access_key_id => key,\n :secret_access_key => secret,\n :valid => false,\n :s3_active => false,\n :ec2_active => false,\n :s3_buckets => nil,\n :ec2_instances => nil,\n :s3_error => nil,\n :ec2_error => nil,\n }\n\n begin\n result[:s3_buckets] = AWS::S3.new(creds).buckets.count\n result[:s3_active] = true\n result[:valid] = true\n rescue AWS::S3::Errors::InvalidAccessKeyId => e\n result[:s3_error] = e.to_s\n rescue AWS::S3::Errors::SignatureDoesNotMatch => e\n result[:s3_error] = e.to_s\n rescue AWS::S3::Errors::AccessDenied => e\n result[:s3_error] = e.to_s\n result[:valid] = true\n rescue AWS::S3::Errors::NotSignedUp => e\n result[:s3_error] = e.to_s\n result[:valid] = true\n end\n\n begin\n result[:ec2_instances] = AWS::EC2.new(creds).instances.count\n result[:ec2_active] = true\n result[:valid] = true\n rescue AWS::EC2::Errors::AuthFailure => e\n result[:ec2_error] = e.to_s\n rescue AWS::EC2::Errors::InvalidAccessKeyId => e\n result[:ec2_error] = e.to_s\n rescue AWS::EC2::Errors::SignatureDoesNotMatch => e\n result[:ec2_error] = e.to_s\n rescue AWS::EC2::Errors::UnauthorizedOperation => e\n result[:ec2_error] = e.to_s\n result[:valid] = true\n rescue AWS::EC2::Errors::OptInRequired => e\n result[:ec2_error] = e.to_s\n result[:valid] = true\n end\n result\nend", "def authorized?\n @auth ||= Rack::Auth::Basic::Request.new(request.env)\n @auth.provided? &&\n @auth.basic? &&\n @auth.credentials &&\n check(@auth.credentials)\n end", "def check_security_for_service(_service_name, _vault_driver, _secrets_manager)\n get_all_aws_keys\n is_ses_key=false\n service_security_info=nil\n code=nil\n\n if _secrets_manager != nil\n puts \"loading security info for repo from secrets manager\"\n _secrets_manager.repository = _service_name\n service_security_info, code = _secrets_manager.get_secrets_hash\n puts \"security info tried to be loaded from secrets manager\"\n else\n throw \"can't proceed with a nil driver\" if _vault_driver.nil?\n throw \"can't proceed with a offline driver\" if !_vault_driver.get_vault_status\n #get the keys\n puts \"loading security info for repo from vault\"\n service_security_info, code = get_service_info_from_vault_driver(_vault_driver, _service_name)\n puts \"security info tried to be loaded from vault\"\n end\n\n return code if code > 399\n return nil,nil if (!service_security_info.key?(AWS_SERVICE_KEY))\n puts \"looking for #{service_security_info[AWS_SERVICE_KEY]}(#{service_security_info[AWS_SERVICE_KEY].class}) in the keys directory\"\n selected_key_information=@keys_data_index[service_security_info[AWS_SERVICE_KEY]]\n return nil,\"can't find data entry for this key: #{service_security_info[AWS_SERVICE_KEY]}\" if selected_key_information.nil?\n is_ses_key,error=is_key_ses_key(service_security_info[AWS_SERVICE_KEY])\n throw \"Can't check ses key - #{error}\" if (!error.nil?)\n puts \"service is not using ses \" if is_ses_key==false\n puts \"This is a ses service\" if is_ses_key==true\n username=selected_key_information[:username]\n all_user_keys_info=@all_users_keys[username]\n updated_key_info,error=update_user_iam_keys({username=>all_user_keys_info})\n puts \"error: #{error}\" if error!=nil\n puts \"nothing to update\" if updated_key_info.nil?\n if error.nil? && !updated_key_info.nil?\n puts \"new key created:#{updated_key_info.access_key.access_key_id}\"\n update_hash={AWS_SERVICE_KEY=>updated_key_info.access_key.access_key_id,\"aws_service_secret\"=>updated_key_info.access_key.secret_access_key}\n #if is_ses_key\n # puts \"this service is using ses\"\n # update_hash[\"email_client_username\"]=updated_key_info.access_key.access_key_id\n # update_hash[\"email_client_password\"]=calculate_ses_password(updated_key_info.access_key.secret_access_key)\n #end\n if _secrets_manager != nil\n code = _secrets_manager.set_secret_value(update_hash, true)\n else\n code=_vault_driver.put_json_for_repo(_service_name,\n update_hash,\n true,) \n end\n end\n return code,nil\n end", "def secrets_exists?\n len = PrivateChef.credentials.length\n len -= 1 if PrivateChef.credentials.exist?('chef-server', 'webui_key')\n len -= 1 if PrivateChef.credentials.exist?('chef-server', 'superuser_key')\n len > 0\n end", "def assert_configuration_valid(credentials)\n mandatory_config_keys =\n [ENTERPRISE_SERVER_URL_SERVICE_CONFIG_KEY, SENSOR_HOST_SERVICE_CONFIG_KEY,\n SENSOR_PORT_SERVICE_CONFIG_KEY, SEEKER_SERVER_URL_CONFIG_KEY]\n mandatory_config_keys.each do |config_key|\n raise \"'#{config_key}' credential must be set\" unless credentials[config_key]\n end\n end", "def invalid_identity_credentials?\n params[:provider] == \"identity\" && params.has_key?(:auth_key)\n end", "def authenticate\n config = YAML.load_file(\"#{Rails.root}/config/vcloudair.yml\")[Rails.env]\n\n if config[\"api-shared-secret\"] != request.headers[\"X-Api-Shared-Secret\"]\n render text: \"api-shared-secret/X-Api-Shared-Secret mismatch error\", status: 403\n end\n end", "def verify_credentials(_auth_type = nil, _options = {})\n begin\n connect\n rescue => err\n raise MiqException::MiqInvalidCredentialsError, err.message\n end\n\n true\n end", "def fixup_aws_environment_variables\n unless node['aws']['aws_access_key_id'].nil?\n ENV['AWS_ACCESS_KEY_ID'] = node['aws']['aws_access_key_id']\n ENV['AWS_SECRET_ACCESS_KEY'] = node['aws']['aws_secret_access_key']\n end\n ENV['AWS_REGION'] = node['aws']['region'] unless node['aws']['region'].nil?\nend", "def prod_s3_credentials\n {:bucket => \"alphadelta-pro\", :access_key_id => ENV['S3_KEY'], :secret_access_key => ENV['S3_SECRET_KEY'] }\n end", "def validate_config!\n valid = false\n begin\n names_to_ids # Force re-evaluation of ids\n valid = (!image_id.nil? && !flavor_id.nil? && !region_id.nil?)\n rescue Excon::Errors::Unauthorized => e\n add_error \"Invalid credentials\"\n end\n valid\n end", "def authorized(authorization)\n if (ENV['SECRET_NAME'])\n creds = get_secret(ENV['SECRET_NAME'])\n calculated = \"Basic \" + Base64.encode64(\"#{creds['a2user'] || creds['A2USER']}:#{creds['a2pass'] || creds['A2PASS']}\")\n return (authorization || \"\").strip == (calculated || \"\").strip\n end\n if (ENV['A2USER'] && ENV['A2PASS'])\n calculated = \"Basic \" + Base64.encode64(\"#{ENV['A2USER']}:#{ENV['A2PASS']}\")\n return (authorization || \"\").strip == (calculated || \"\").strip\n end\n return false\nend", "def authorized?\n @auth ||= Rack::Auth::Basic::Request.new(request.env)\n user = ENV[\"HTTP_USER\"]\n pass = ENV[\"HTTP_PASS\"]\n @auth.provided? && @auth.basic? && @auth.credentials && @auth.credentials == [user, pass]\n end", "def authorized?\n @auth ||= Rack::Auth::Basic::Request.new(request.env)\n return @auth.provided? && @auth.basic? && @auth.credentials && @auth.credentials == [@@username, @@password]\n end", "def authorized?\n @auth ||= Rack::Auth::Basic::Request.new(request.env)\n\n @auth.provided? && @auth.basic? && @auth.credentials && @auth.credentials == [\n ENV[\"HTTP_USER\"],\n ENV[\"HTTP_PASS\"]\n ]\n end", "def check_client_authentication\n raise GithubAdapterError.new(\n \"Error Invalid Client Credentials!\", 401\n ) unless authenticated?\n end", "def valid?\n header.schema == 'Basic' && credentials.length == 2\n end", "def should_run_ssh_wizard?\n not File.exists? ssh_priv_key_file_path\n end", "def google_oauth_enabled?\n ENV.has_key?('GOOGLE_CLIENT_ID') and ENV.has_key?('GOOGLE_CLIENT_SECRET')\nend", "def has_configuration?\n @config[:username] and @config[:password]\n end", "def authenticated?(config)\n config.has_key?(:username) && config.has_key?(:password)\n end", "def run_me\r\n bucket_name = 'doc-example-bucket'\r\n kms_master_key_id = '9041e78c-7a20-4db3-929e-828abEXAMPLE'\r\n region = 'us-west-2'\r\n s3_client = Aws::S3::Client.new(region: region)\r\n\r\n if default_bucket_encryption_sse_cmk_set?(\r\n s3_client,\r\n bucket_name,\r\n kms_master_key_id\r\n )\r\n puts 'Default encryption state set.'\r\n else\r\n puts 'Default encryption state not set.'\r\n end\r\nend", "def aws_secret_access_key \n if @aws_secret_access_key\n return @aws_secret_access_key\n elsif !ENV.has_key?('AWS_SECRET_ACCESS_KEY')\n raise FogSettingError, \"secret access key must be set in ENV or configure block\"\n end\n @aws_secret_access_key ||= ENV['AWS_SECRET_ACCESS_KEY']\n end", "def check_authn\n response = self.class.head @endpoint\n\n return true if response.success?\n\n if response.code == 401 && response.headers[\"www-authenticate\"]\n if response.headers[\"www-authenticate\"].start_with? \"Keystone\"\n keystone_uri = /^Keystone uri='(.+)'$/.match(response.headers[\"www-authenticate\"])[1]\n\n if keystone_uri\n if @auth_options[:type] == \"x509\"\n body = { \"auth\" => { \"voms\" => true } }\n else\n body = {\n \"auth\" => {\n \"passwordCredentials\" => {\n \"username\" => @auth_options[:username],\n \"password\" => @auth_options[:password]\n }\n }\n }\n end\n\n headers = self.class.headers.clone\n headers['Content-Type'] = \"application/json\"\n headers['Accept'] = headers['Content-Type']\n\n response = self.class.post(keystone_uri + \"/v2.0/tokens\", :body => body.to_json, :headers => headers)\n\n if response.success?\n self.class.headers['X-Auth-Token'] = response['access']['token']['id']\n return true\n end\n end\n end\n end\n\n false\n end", "def validate_authentication_options!\n auth_mech = options[:auth_mech]\n user = options[:user]\n password = options[:password]\n auth_source = options[:auth_source]\n mech_properties = options[:auth_mech_properties]\n\n if auth_mech.nil?\n if user && user.empty?\n raise Mongo::Auth::InvalidConfiguration, 'Empty username is not supported for default auth mechanism'\n end\n\n if auth_source == ''\n raise Mongo::Auth::InvalidConfiguration, 'Auth source cannot be empty for default auth mechanism'\n end\n\n return\n end\n\n if !Mongo::Auth::SOURCES.key?(auth_mech)\n raise Mongo::Auth::InvalidMechanism.new(auth_mech)\n end\n\n if user.nil? && !%i(aws mongodb_x509).include?(auth_mech)\n raise Mongo::Auth::InvalidConfiguration, \"Username is required for auth mechanism #{auth_mech}\"\n end\n\n if password.nil? && !%i(aws gssapi mongodb_x509).include?(auth_mech)\n raise Mongo::Auth::InvalidConfiguration, \"Password is required for auth mechanism #{auth_mech}\"\n end\n\n if password && auth_mech == :mongodb_x509\n raise Mongo::Auth::InvalidConfiguration, 'Password is not supported for :mongodb_x509 auth mechanism'\n end\n\n if auth_mech == :aws && user && !password\n raise Mongo::Auth::InvalidConfiguration, 'Username is provided but password is not provided for :aws auth mechanism'\n end\n\n if %i(aws gssapi mongodb_x509).include?(auth_mech)\n if !['$external', nil].include?(auth_source)\n raise Mongo::Auth::InvalidConfiguration, \"#{auth_source} is an invalid auth source for #{auth_mech}; valid options are $external and nil\"\n end\n else\n # Auth source is the database name, and thus cannot be the empty string.\n if auth_source == ''\n raise Mongo::Auth::InvalidConfiguration, \"Auth source cannot be empty for auth mechanism #{auth_mech}\"\n end\n end\n\n if mech_properties && !%i(aws gssapi).include?(auth_mech)\n raise Mongo::Auth::InvalidConfiguration, \":mechanism_properties are not supported for auth mechanism #{auth_mech}\"\n end\n end", "def connect_to_s3\n unless AWS::S3::Base.connected?\n AWS::S3::Base.establish_connection!(:access_key_id => AWSCredentials.access_key, :secret_access_key => AWSCredentials.secret_access_key)\n end\n end", "def private_key_required?\n @uri.ssh?\n end", "def credentials? uri, challenges\n challenges.any? do |challenge|\n credentials_for uri, challenge.realm_name\n end\n end", "def api_accessible?\n oauth_token.present? && oauth_secret.present?\n end", "def credentials_obj\n @credentials_obj ||= Aws::Credentials.new(\n access_key,\n secret_key\n )\n end", "def has_valid_credentials\n\t\tif session[:player_id] == nil\n\t\t\treturn false\n\t\telse\n\t\t\tsessionPlayer = Player.find_by_id(session[:player_id])\n\t\t\treturn ((session[:email] == sessionPlayer.email) and (params[:id].to_s == session[:player_id].to_s))\n\t\tend\n\tend", "def validate_credentials(credentials)\n Client.new(\n AccesstypeAdyen::CONFIG[credentials[:environment].to_sym],\n credentials\n ).validate_credentials(\n credentials[:merchant_account]\n )\n end", "def consumer_site_is_authorized?\n return false unless current_user\n if user_owns_identity_url?\n return true if (trust = current_user.trusts.find_by_trust_root(openid_request.trust_root)) && trust.active?\n end\n return false\n end", "def valid?\n !Kontagent.configuration.base_url.nil? && !Kontagent.configuration.api_key.nil? && !Kontagent.configuration.secret_key.nil? \n end" ]
[ "0.7359459", "0.70773035", "0.65204555", "0.6419615", "0.6400618", "0.6329888", "0.6253651", "0.61920524", "0.6189333", "0.6185766", "0.6180546", "0.6120668", "0.6113438", "0.6111507", "0.6065728", "0.5973568", "0.59704953", "0.59539354", "0.5948529", "0.5948529", "0.59207237", "0.5911666", "0.5899773", "0.5899773", "0.5852626", "0.5799478", "0.5767784", "0.5765944", "0.5762014", "0.57595193", "0.57541335", "0.57336426", "0.57200116", "0.57152873", "0.5712766", "0.5710691", "0.56818426", "0.566755", "0.5663788", "0.56441784", "0.562703", "0.56083226", "0.5608216", "0.559171", "0.55680674", "0.55680215", "0.5525768", "0.5510585", "0.5508392", "0.54986405", "0.5498605", "0.54769874", "0.5465928", "0.54621255", "0.54406476", "0.54241455", "0.54100126", "0.5409094", "0.5394844", "0.53710955", "0.53664035", "0.53631234", "0.5359527", "0.53563905", "0.5342064", "0.53355175", "0.5333073", "0.5326855", "0.53197014", "0.5316551", "0.5311123", "0.5306542", "0.530578", "0.52862066", "0.5281018", "0.5280989", "0.52734196", "0.5271209", "0.5270665", "0.52648544", "0.52619547", "0.52602744", "0.5236409", "0.52337086", "0.5233499", "0.5232096", "0.52294457", "0.5221871", "0.521896", "0.5217292", "0.5215272", "0.5213764", "0.52102685", "0.5205687", "0.520211", "0.51993436", "0.5199006", "0.519066", "0.5190013" ]
0.58678293
25
Generates initial agent settings. These settings will be read by agent from AWS registry (also a BOSH component) on a target instance. Disk conventions for amazon are: system disk: /dev/sda ephemeral disk: /dev/sdb EBS volumes can be configured to map to other device names later (sdf through sdp, also some kernels will remap sd to xvd).
def initial_agent_settings(agent_id, network_spec, environment, root_device_name, block_device_agent_info) settings = { "vm" => { "name" => "vm-#{SecureRandom.uuid}" }, "agent_id" => agent_id, "networks" => agent_network_spec(network_spec), "disks" => { "system" => root_device_name, "persistent" => {} } } settings["disks"].merge!(block_device_agent_info) settings["disks"]["ephemeral"] = settings["disks"]["ephemeral"][0]["path"] settings["env"] = environment if environment settings.merge(agent_properties) end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def initial_agent_settings(vm_id, agent_id, network_spec, ephemeral_disk,\n existing_disk, environment, blobstore, mbus)\n disk_letters = ('a'..'z').to_a\n config = { vm: { name: vm_id }, agent_id: agent_id,\n environment: environment, blobstore: blobstore,\n mbus: mbus, networks: network_spec,\n disks: { system: \"/dev/sd#{disk_letters.shift}\",\n persistent: {} } }\n if ephemeral_disk\n config[:disks][:ephemeral] = \"/dev/sd#{disk_letters.shift}\"\n end\n config\n end", "def initial_agent_settings(uuid, agent_id, network_spec, environment, has_ephemeral)\n settings = {\n 'vm' => {\n 'name' => uuid,\n },\n 'agent_id' => agent_id,\n 'networks' => agent_network_spec(network_spec),\n 'disks' => {\n 'system' => '/dev/sda',\n 'persistent' => {},\n },\n }\n\n settings['disks']['ephemeral'] = has_ephemeral ? '/dev/sdb' : nil\n settings['env'] = environment if environment\n settings.merge(@agent_properties)\n end", "def initial_agent_settings(agent_id, network_spec, environment, vm_params, config)\n settings = {\n 'vm' => {\n 'name' => vm_params[:name]\n },\n 'agent_id' => agent_id,\n 'networks' => _agent_network_spec(network_spec),\n 'disks' => {\n 'system' => '/dev/sda',\n 'persistent' => {}\n }\n }\n\n unless vm_params[:ephemeral_disk].nil?\n # Azure uses a data disk as the ephemeral disk and the lun is 0\n settings['disks']['ephemeral'] = {\n 'lun' => '0',\n 'host_device_id' => Bosh::AzureCloud::Helpers::AZURE_SCSI_HOST_DEVICE_ID\n }\n end\n\n settings['env'] = environment if environment\n settings.merge(config.agent.to_h)\n end", "def create_instances\n min_count = max_count = @bs.number_of_nodes\n puts \"\\nCreating #{max_count} on-demand instance(s)\"\n options = {\n 'ClientToken' => generate_token,\n 'KeyName' => Chef::Config[:knife][:aws_ssh_key_id],\n 'InstanceType' => @bs.flavor,\n 'SubnetId' => @bs[:novpc] ? nil : @bs.subnet_id,\n 'Placement.AvailabilityZone' => @bs.mixins.az.data,\n 'SecurityGroupId' => @bs.mixins.sg.data\n }\n options['EbsOptimized'] = !! @bs[:ebs_optimized]\n\n ## REVIEW\n if ami.root_device_type == \"ebs\"\n ami_map = ami.block_device_mapping.first\n block_device_mapping = {\n 'DeviceName' => ami_map['deviceName'],\n 'Ebs.VolumeSize' => ami_map['volumeSize'].to_s,\n 'Ebs.DeleteOnTermination' => ami_map['deleteOnTermination']\n }\n options['BlockDeviceMapping'] = [block_device_mapping]\n end\n\n ## Optionally only include mapped devices\n ## This way we get all of the ephemeral drives, some unmapped however\n if @bs.mixins.volume.data[:ephemeral_available]\n ephmap = @bs.mixins.volume.data.ephemeral_available.each_with_index.map do |d,i|\n {\n 'VirtualName' => \"ephemeral#{i}\",\n 'DeviceName' => d\n }\n end\n options['BlockDeviceMapping'].concat( ephmap )\n end\n\n if (max_count == 1) and @bs[:private_ip_address]\n options['PrivateIpAddress'] = @bs.private_ip_address\n puts \"Assigning IP ADDRESS : #{options['PrivateIpAddress']}\"\n end\n\n if Chef::Config[:knife][:aws_user_data]\n begin\n options['UserData']= File.read(Chef::Config[:knife][:aws_user_data])\n rescue\n ui.warn(\"Cannot read #{Chef::Config[:knife][:aws_user_data]}:\"\\\n \" #{$!.inspect}. Ignoring option.\")\n end\n end\n\n # -----------------------------------------------------------------\n tries = 5\n print_table(options, 'Launch Config')\n begin\n puts \"\\nSending request...\"\n response = connection.run_instances(@bs.image, min_count,\n max_count, options)\n ui.msg(response.inspect)\n rescue Exception => e\n ui.warn(\"#{e.message}\\nException creating instances\")\n if (tries -= 1) <= 0\n ui.warn(\"\\n\\nMax tries reached. Exiting.\\n\\n\")\n exit 1\n else\n ui.msg(\"Trying again.\\n\")\n retry\n end\n end\n # now we have our servers\n instances = response.body['instancesSet']\n # select only instances that have instanceId key and collect those ids\n # into an array\n @bs[:instance_ids] =\n instances.select {|i| i.has_key?('instanceId')}.collect do |i|\n i['instanceId']\n end\n\n puts \"\\nNumber of instances started: #{@bs.instance_ids.size}\\n\"\n sleep 10\n puts \"Getting servers..\"\n # collect an array of servers retrieved based on the instance ids we\n # obtained above\n @bs[:servers] = @bs.instance_ids.collect do |id|\n begin\n server = connection.servers.get(id)\n rescue Exception => e\n sleep 7\n retry\n end\n raise Ec2Error.new(\"server #{id} was nil\") if server.nil?\n server\n end\n end", "def presets\n h = Beaker::Options::OptionsHash.new\n h.merge({\n :project => 'Beaker',\n :department => 'unknown',\n :created_by => ENV['USER'] || ENV['USERNAME'] || 'unknown',\n :host_tags => {},\n :openstack_api_key => ENV.fetch('OS_PASSWORD', nil),\n :openstack_username => ENV.fetch('OS_USERNAME', nil),\n :openstack_auth_url => \"#{ENV.fetch('OS_AUTH_URL', nil)}/tokens\",\n :openstack_tenant => ENV.fetch('OS_TENANT_NAME', nil),\n :openstack_keyname => ENV.fetch('OS_KEYNAME', nil),\n :openstack_network => ENV.fetch('OS_NETWORK', nil),\n :openstack_region => ENV.fetch('OS_REGION', nil),\n :openstack_volume_support => ENV['OS_VOLUME_SUPPORT'] || true,\n :jenkins_build_url => nil,\n :validate => true,\n :configure => true,\n :log_level => 'info',\n :trace_limit => 10,\n :\"master-start-curl-retries\" => 120,\n :masterless => false,\n :options_file => nil,\n :type => 'pe',\n :provision => true,\n :preserve_hosts => 'never',\n :root_keys => false,\n :quiet => false,\n :project_root => File.expand_path(File.join(__dir__, \"../\")),\n :xml_dir => 'junit',\n :xml_file => 'beaker_junit.xml',\n :xml_time => 'beaker_times.xml',\n :xml_time_enabled => false,\n :xml_stylesheet => 'junit.xsl',\n :default_log_prefix => 'beaker_logs',\n :log_dir => 'log',\n :log_sut_event => 'sut.log',\n :color => true,\n :dry_run => false,\n :test_tag_and => '',\n :test_tag_or => '',\n :test_tag_exclude => '',\n :timeout => 900, # 15 minutes\n :fail_mode => 'slow',\n :test_results_file => '',\n :accept_all_exit_codes => false,\n :timesync => false,\n :set_env => true,\n :disable_updates => true,\n :repo_proxy => false,\n :package_proxy => false,\n :add_el_extras => false,\n :consoleport => 443,\n :pe_dir => '/opt/enterprise/dists',\n :pe_version_file => 'LATEST',\n :pe_version_file_win => 'LATEST-win',\n :host_env => {},\n :host_name_prefix => nil,\n :ssh_env_file => '~/.ssh/environment',\n :profile_d_env_file => '/etc/profile.d/beaker_env.sh',\n :dot_fog => File.join(ENV.fetch('HOME', nil), '.fog'),\n :ec2_yaml => 'config/image_templates/ec2.yaml',\n :help => false,\n :collect_perf_data => 'none',\n :puppetdb_port_ssl => 8081,\n :puppetdb_port_nonssl => 8080,\n :puppetserver_port => 8140,\n :nodeclassifier_port => 4433,\n :cache_files_locally => false,\n :aws_keyname_modifier => rand(10**10).to_s.rjust(10, '0'), # 10 digit random number string\n :run_in_parallel => [],\n :use_fog_credentials => true,\n :ssh => {\n :config => false,\n :verify_host_key => false,\n :auth_methods => [\"publickey\"],\n :port => 22,\n :forward_agent => true,\n :keys => [\"#{ENV.fetch('HOME', nil)}/.ssh/id_rsa\"],\n :user_known_hosts_file => \"#{ENV.fetch('HOME', nil)}/.ssh/known_hosts\",\n :keepalive => true,\n },\n })\n end", "def setDefaults\n ips = []\n if $IN_AWS\n [\"public-ipv4\", \"local-ipv4\"].each { |addr|\n begin\n Timeout.timeout(2) do\n ip = URI.open(\"http://169.254.169.254/latest/meta-data/#{addr}\").read\n ips << ip if !ip.nil? and ip.size > 0\n end\n rescue OpenURI::HTTPError, Timeout::Error, SocketError\n # these are ok to ignore\n end\n }\n elsif $IN_GOOGLE\n base_url = \"http://metadata.google.internal/computeMetadata/v1\"\n begin\n Timeout.timeout(2) do\n # TODO iterate across multiple interfaces/access-configs\n ip = URI.open(\"#{base_url}/instance/network-interfaces/0/ip\", \"Metadata-Flavor\" => \"Google\").read\n ips << ip if !ip.nil? and ip.size > 0\n ip = URI.open(\"#{base_url}/instance/network-interfaces/0/access-configs/0/external-ip\", \"Metadata-Flavor\" => \"Google\").read\n ips << ip if !ip.nil? and ip.size > 0\n end\n rescue OpenURI::HTTPError, Timeout::Error, SocketError => e\n # This is fairly normal, just handle it gracefully\n end\n end\n\n\n $CONFIGURABLES[\"allow_invade_foreign_vpcs\"][\"default\"] = false\n $CONFIGURABLES[\"public_address\"][\"default\"] = $possible_addresses.first\n $CONFIGURABLES[\"hostname\"][\"default\"] = Socket.gethostname\n $CONFIGURABLES[\"banner\"][\"default\"] = \"Mu Master at #{$CONFIGURABLES[\"public_address\"][\"default\"]}\"\n if $IN_AWS\n # XXX move this crap to a callback hook for puttering around in the AWS submenu\n aws = JSON.parse(URI.open(\"http://169.254.169.254/latest/dynamic/instance-identity/document\").read)\n iam = nil\n begin\n iam = URI.open(\"http://169.254.169.254/latest/meta-data/iam/security-credentials\").read\n rescue OpenURI::HTTPError, SocketError\n end\n # $CONFIGURABLES[\"aws\"][\"subtree\"][\"account_number\"][\"default\"] = aws[\"accountId\"]\n $CONFIGURABLES[\"aws\"][\"subtree\"][\"region\"][\"default\"] = aws[\"region\"]\n if iam and iam.size > 0\n # XXX can we think of a good way to test our permission set?\n $CONFIGURABLES[\"aws\"][\"subtree\"][\"access_key\"][\"desc\"] = $CONFIGURABLES[\"aws\"][\"subtree\"][\"access_key\"][\"desc\"] + \". Not necessary if IAM Profile #{iam.bold} has sufficient API access.\"\n $CONFIGURABLES[\"aws\"][\"subtree\"][\"access_secret\"][\"desc\"] = $CONFIGURABLES[\"aws\"][\"subtree\"][\"access_key\"][\"desc\"] + \". Not necessary if IAM Profile #{iam.bold} has sufficient API access.\"\n end\n end\n $CONFIGURABLES[\"aws\"][\"subtree\"][\"log_bucket_name\"][\"default\"] = $CONFIGURABLES[\"hostname\"][\"default\"]\n $CONFIGURABLES[\"google\"][\"subtree\"][\"log_bucket_name\"][\"default\"] = $CONFIGURABLES[\"hostname\"][\"default\"]\n end", "def defaults\n owner.volume(:root).reverse_merge!({\n :device => '/dev/sda1',\n :mount_point => '/',\n :mountable => false,\n })\n self.reverse_merge!({\n :availability_zones => ['us-east-1d'],\n :backing => 'ebs',\n :flavor => 't1.micro',\n })\n super\n end", "def setup_defaults()\n {\n 'box' => {\n 'memory' => '2048',\n 'cpus' => '2'\n }\n }\nend", "def set_default_properties\n @properties = {\n :InstanceType => \"t2.micro\",\n :ImageId => \"ami-d05e75b8\"\n }\n end", "def generate_aws_config(vmname, config)\n {\n :ssh_username => option_handler(config, :ssh_username),\n :use_private_ip_for_ssh => option_handler(config, :use_private_ip_for_ssh),\n :bootstrap_options => {\n :key_name => option_handler(config, :keypair_name),\n :instance_type => option_handler(config, :instance_type),\n :ebs_optimized => option_handler(config, :ebs_optimized),\n :image_id => option_handler(config, :image_id),\n :subnet_id => option_handler(config, :subnet_id),\n :associate_public_ip_address => option_handler(config, :associate_public_ip_address),\n # :user_data => nil, #TODO\n :block_device_mappings => [\n { device_name: option_handler(config, :root_block_device),\n ebs: {\n volume_size: option_handler(config, :root_block_device_size),\n volume_type: option_handler(config, :root_block_device_type),\n delete_on_termination: true\n }\n }\n ] + ephemeral_volumes(option_handler(config, :instance_type))\n },\n :aws_tags => option_handler(config, :aws_tags),\n :convergence_options => {\n :install_sh_arguments => option_handler(config, :install_sh_arguments),\n :bootstrap_proxy => option_handler(config, :bootstrap_proxy),\n :chef_config => option_handler(config, :chef_config),\n :chef_version => option_handler(config, :chef_version)\n }\n }\n end", "def set_sb_conf\n # resources allocated for sandboxes' sake\n @conf[:mem_sb] = (@conf[:mem] * 0.8).round\n @conf[:hd_sb] = (@conf[:hd] * 0.8).round\n @conf[:cpu_sb] = @conf[:cpu]\n\n @conf[:sandboxes] = []\n @conf[:sandboxes].push({:name => 'sb0', :type => 'first'})\n end", "def configure_disks(vb, server, hostname, name)\n vminfo = vm_info(name)\n disks = server['disks'] || {}\n unless vminfo =~ /Storage Controller Name \\(1\\): *SATA Controller/\n # puts \"Attaching SATA Controller\"\n vb.customize [\n 'storagectl', :id,\n '--name', 'SATA Controller',\n '--add', 'sata',\n '--portcount', disks.size\n ]\n # else\n # puts 'SATA Controller already attached'\n end\n\n disks.each_with_index do |disk, i|\n disk_name = disk.first\n disk_size = disk.last['size']\n disk_uuid = disk.last['uuid']\n real_uuid = \"00000000-0000-0000-0000-#{disk_uuid.rjust(12,'0')}\"\n if server['cluster']\n disk_filename = File.join(VAGRANT_ROOT, \"#{disk_name}_#{server['cluster']}.vdi\")\n else\n disk_filename = File.join(VAGRANT_ROOT, \"#{disk_name}.vdi\")\n end\n\n if File.file?(disk_filename)\n # puts \"Disk #{disk_filename} already created\"\n disk_hash = `VBoxManage showmediuminfo \"#{disk_filename}\"`.scan(/(.*): *(.*)/).to_h\n current_uuid = disk_hash['UUID']\n else\n # puts \"Creating disk #{disk_filename}\"\n current_uuid = '0'\n if server['cluster']\n vb.customize [\n 'createhd',\n '--filename', disk_filename,\n '--size', disk_size.to_s,\n '--variant', 'Fixed'\n ]\n vb.customize [\n 'modifyhd', disk_filename,\n '--type', 'shareable'\n ]\n else\n vb.customize [\n 'createhd',\n '--filename', disk_filename,\n '--size', disk_size.to_s,\n '--variant', 'Standard'\n ]\n end\n end\n\n # Conditional for adding disk_uuid\n if server['cluster'] && current_uuid == real_uuid\n # puts \"Attaching shareable disk #{disk_filename}\"\n vb.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', (i + 1).to_s,\n '--device', 0,\n '--type', 'hdd',\n '--medium', disk_filename,\n '--mtype', 'shareable'\n ]\n elsif server['cluster']\n # puts \"Attaching shareable disk #{disk_filename}, adding UUID #{real_uuid}\"\n vb.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', (i + 1).to_s,\n '--device', 0,\n '--type', 'hdd',\n '--medium', disk_filename,\n '--mtype', 'shareable',\n '--setuuid', real_uuid\n ]\n elsif current_uuid == real_uuid\n # puts \"Attaching normal disk #{disk_filename}\"\n vb.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', (i + 1).to_s,\n '--device', 0,\n '--type', 'hdd',\n '--medium', disk_filename\n ]\n else\n # puts \"Attaching normal disk #{disk_filename}, adding UUID #{real_uuid}\"\n vb.customize [\n 'storageattach', :id,\n '--storagectl', 'SATA Controller',\n '--port', (i + 1).to_s,\n '--device', 0,\n '--type', 'hdd',\n '--medium', disk_filename,\n '--setuuid', real_uuid\n ]\n end\n end\nend", "def initialize(hash)\n super(hash)\n # Static config\n @hidden = false\n @template = :vmware_hypervisor\n @name = \"vmware_esxi_5_dhcp\"\n @description = \"VMware ESXi 5 DHCP Deployment\"\n @osversion = \"5_dhcp\"\n # Metadata vars\n @hostname_prefix = nil\n # State / must have a starting state\n @current_state = :init\n # Image UUID\n @image_uuid = true\n # Image prefix we can attach\n @image_prefix = \"esxi\"\n # Enable agent brokers for this model\n @broker_plugin = :proxy\n @final_state = :os_complete\n # Metadata vars\n @esx_license = nil\n @hostname_prefix = nil\n @vcenter_name = nil\n @vcenter_datacenter_path = nil\n @vcenter_cluster_path = nil\n @packages = []\n # Metadata\n @req_metadata_hash = {\n \"@esx_license\" => { :default => \"\",\n :example => \"AAAAA-BBBBB-CCCCC-DDDDD-EEEEE\",\n :validation => '^[A-Z\\d]{5}-[A-Z\\d]{5}-[A-Z\\d]{5}-[A-Z\\d]{5}-[A-Z\\d]{5}$',\n :required => true,\n :description => \"ESX License Key\" },\n \"@root_password\" => { :default => \"test1234\",\n :example => \"P@ssword!\",\n :validation => '^[\\S]{8,}',\n :required => true,\n :description => \"root password (> 8 characters)\"\n },\n \"@hostname_prefix\" => { :default => \"\",\n :example => \"esxi-node\",\n :validation => '^[A-Za-z\\d-]{3,}$',\n :required => true,\n :description => \"Prefix for naming node\" }\n }\n @opt_metadata_hash = {\n \"@vcenter_name\" => { :default => \"\",\n :example => \"vcenter01\",\n :validation => '^[\\w.-]{3,}$',\n :required => false,\n :description => \"Optional for broker use: the vCenter to attach ESXi node to\" },\n \"@vcenter_datacenter_path\" => { :default => \"\",\n :example => \"Datacenter01\",\n :validation => '^[a-zA-Z\\d-]{3,}$',\n :required => false,\n :description => \"Optional for broker use: the vCenter Datacenter path to place ESXi host in\" },\n \"@vcenter_cluster_path\" => { :default => \"\",\n :example => \"Cluster01\",\n :validation => '^[a-zA-Z\\d-]{3,}$',\n :required => false,\n :description => \"Optional for broker use: the vCenter Cluster to place ESXi node in\" },\n \"@packages\" => { :default => \"\",\n :example => \"\",\n :validation => '',\n :required => false,\n :description => \"Optional for broker use: the vCenter Cluster to place ESXi node in\" }\n }\n\n from_hash(hash) unless hash == nil\n end", "def configure_instance(aws_node, private_ip_address, node_name, node_config)\n # Spin up EC2 instances\n aws_node.vm.provider :aws do |ec2, override|\n ec2.keypair_name = KEYPAIR_NAME\n ec2.access_key_id = ACCESS_KEY_ID\n ec2.secret_access_key = SECRET_ACCESS_KEY\n ec2.security_groups = SECURITY_GROUPS\n override.ssh.private_key_path = PRIVATE_KEY_PATH\n\n # read region, ami etc from json.\n ec2.region = AWS_CFG['region']\n ec2.subnet_id = AWS_CFG['subnet_id']\n ec2.availability_zone = AWS_CFG['region'] + AWS_CFG['availability_zone']\n ec2.ami = node_config['ami_id']\n ec2.instance_type = node_config['instance_type']\n ec2.private_ip_address = private_ip_address\n ec2.associate_public_ip = true\n\n if node_config.key?('volume_size')\n # Size in GB\n # (untested)\n ec2.block_device_mapping = [{ 'DeviceName' => '/dev/sda1', 'Ebs.VolumeSize' => node_config['volume_size'] }]\n end\n\n override.ssh.username = AWS_CFG['ssh_username']\n\n # Collect tags (can't be longer than 250 chars)\n ec2.tags = ({})\n ec2.tags['Name'] = node_name[0..245]\n ec2.tags['Type'] = 'Hyperledger'\n ec2.tags['Version'] = VERSION\n ec2.tags['Fabric'] = node_config['fabric'].map { |f| f['role'] }.join(',')[0..245]\n end\nend", "def config_agent\n @config[:root] = DAEMON_ROOT\n @config[:daemonize] = false\n @config[:actors_dir] = File.join(DAEMON_ROOT, 'lib', 'actors')\n end", "def config_agent\n @config[:root] = DAEMON_ROOT\n @config[:daemonize] = false\n @config[:actors_dir] = File.join(DAEMON_ROOT, 'lib', 'actors')\n end", "def start_gpg_agent_old\n write_gpg_agent_startup_script\n gpg_agent_output = %x(./#{@gpg_agent_script}).strip\n\n # By the time we get here, we can be assured we will be starting a\n # new agent, because the directory is cleaned out.\n #\n # Follow-on gpg actions will read the agent's information from\n # the env-file the agent writes at startup.\n\n # We're using the --sh option which will spew out the agent config\n # when the agent starts. If it is empty, this is a problem.\n warn(empty_gpg_agent_message) if gpg_agent_output.empty?\n\n agent_info = gpg_agent_info\n generate_key(agent_info[:info])\n agent_info\n end", "def elasticsearch_init(storage_name,\n allocated_storage: 5,\n instance_type: 't2.medium',\n properties: {},\n zone_name: nil)\n\n @es_key_name = gen_ssh_key_name(\"Elasticsearch#{storage_name}\", region, stack_name)\n pre_run { create_ssh_key(@es_key_name, region, force_create: false) }\n\n mapping 'AWSElasticsearchAMI', ElasticsearchBitnami.get_mapping\n\n parameter_allocated_storage \"Elasticsearch#{storage_name}\",\n default: allocated_storage,\n min: 5,\n max: 1024\n\n parameter_ec2_instance_type \"Elasticsearch#{storage_name}\", type: instance_type\n\n properties[:KeyName] = @es_key_name\n properties[:InstanceType] = ref(\"Elasticsearch#{storage_name}InstanceType\")\n\n version_tag = {\n Key: 'Version',\n Value: ElasticsearchBitnami.get_release_version\n }\n\n cluster_name_tag = {\n Key: 'ClusterName',\n Value: storage_name.downcase\n }\n\n plugin_tags = [version_tag, cluster_name_tag]\n\n # Set instance tags\n if properties.key?(:Tags) && !properties[:Tags].empty?\n properties[:Tags].concat(plugin_tags)\n else\n properties[:Tags] = plugin_tags\n end\n\n # Configure instance using user-data\n if !properties.key?(:UserData) || !properties[:UserData].empty?\n properties[:UserData] = Base64.encode64(read_user_data('elasticsearch'))\n end\n\n # Assign IAM role to instance\n properties[:IamInstanceProfile] = iam_instance_profile_with_full_access(storage_name, *%w(ec2 s3))\n\n storage_resource_name = \"Elasticsearch#{storage_name}\"\n instance_vpc storage_resource_name,\n find_in_map('AWSElasticsearchAMI', ref('AWS::Region'), :hvm),\n ref_application_subnets.first,\n [ref_private_security_group, ref_resource_security_group],\n depends_on: [],\n properties: properties\n\n # create s3 bucket for cluster snapshots\n account_id = aws_account_id\n bucket_name = \"elasticsearch-bitnami-#{region}-#{account_id}\"\n resource \"Elasticsearch#{storage_name}S3Bucket\",\n Type: 'AWS::S3::Bucket',\n DeletionPolicy: 'Retain',\n Properties: {\n BucketName: bucket_name\n }\n\n # create a DNS record in route53 for instance private ip\n record_name = %W(#{storage_name.downcase.dasherize} #{region} #{zone_name}).join('.')\n create_single_dns_record(\"#{storage_name}PrivateZone\",\n stack_name,\n zone_name,\n record_name,\n resource_records: [get_att(storage_resource_name, 'PrivateIp')])\n end", "def run\n ngen_auth\n @validate = Validator.new\n stack = EcoSystem.new\n @instanceparameters = stack.yaml_reader(config[:yaml])\n stack.validate = @validate\n config[:action] = 'create'\n stack.options = config\n stack.supress_output ='1'\n stack.instanceparameters = @instanceparameters\n inst_result = stack.opt_parse\n ho_hum\n \n inst_result.each do |server|\n config[:inst] = server['server']['display_name']\n instance = {}\n # puts chef_attrs = server['server']['userdata'].at(0)['chef']\n chef_attrs = server['server']['userdata'].at(0)['chef'] if !server['server']['userdata'].at(0)['chef'].nil?\n chef_attrs.each do |attr, value|\n instance[attr] = value\n end\n chef_node_configuration(instance)\n config[:chef_node_name] = config[:inst]\n inst_details = AttrFinder.new(server)\n inst_details.options = config\n inst_details.validate = @validate\n inst_details.function = 'server' \n inst = InstanceClient.new\n inst.validate = @validate\n inst.options = config\n inst.supress_output ='1'\n inst.instanceparameters = @instanceparameters\n ssh_host = inst.list_instance_ip(inst_details.compartment, inst_details.instance).at(1)\n bootstrap_for_linux_node(ssh_host).run\n node_attributes(ssh_host, 'IaaS')\n end\n end", "def packer_config\n JSON.dump(JSON.parse(super).tap do |config|\n config['builders'] = [\n {\n \"type\" => \"vsphere\",\n \"vcenter_server\" => Stemcell::Builder::validate_env('VCENTER_SERVER'),\n \"username\" => Stemcell::Builder::validate_env('VCENTER_USERNAME'),\n \"password\" => Stemcell::Builder::validate_env('VCENTER_PASSWORD'),\n \"insecure_connection\" => true,\n\n \"template\" => Stemcell::Builder::validate_env('BASE_TEMPLATE'),\n \"folder\" => Stemcell::Builder::validate_env('VCENTER_VM_FOLDER'),\n \"vm_name\" => \"packer-vmx\",\n \"host\" => Stemcell::Builder::validate_env('VCENTER_HOST'),\n \"resource_pool\" => \"\",\n # \"ssh_username\" => 'Administrator',\n # \"ssh_password\" => Stemcell::Builder::validate_env('ADMINISTRATOR_PASSWORD'),\n 'communicator' => 'winrm',\n 'winrm_username' => 'Administrator',\n 'winrm_password' => Stemcell::Builder::validate_env('ADMINISTRATOR_PASSWORD'),\n 'winrm_timeout' => '3h',\n 'winrm_insecure' => true,\n \"CPUs\" => ENV.fetch('NUM_VCPUS', '4'),\n \"RAM\" => ENV.fetch('MEM_SIZE', '4096'),\n }\n ]\n end)\n end", "def setup\n @executor = NagiosHerald::Executor.new\n @options = {}\n @options[:env] = File.join(File.dirname(__FILE__), '..', 'env_files', 'check_disk.CRITICAL_ICINGA')\n end", "def configure_settings(bootstrap_options)\r\n config = {\r\n environment: bootstrap_options['environment'],\r\n chef_extension_root: @chef_extension_root,\r\n user_client_rb: @client_rb,\r\n log_location: @azure_plugin_log_location,\r\n secret: @secret,\r\n first_boot_attributes: @first_boot_attributes\r\n }\r\n\r\n config[:chef_node_name] = bootstrap_options['chef_node_name'] if bootstrap_options['chef_node_name']\r\n config[:chef_server_url] = bootstrap_options['chef_server_url'] if bootstrap_options['chef_server_url']\r\n config[:validation_client_name] = bootstrap_options['validation_client_name'] if bootstrap_options['validation_client_name']\r\n config[:node_verify_api_cert] = bootstrap_options['node_verify_api_cert'] if bootstrap_options['node_verify_api_cert']\r\n config[:node_ssl_verify_mode] = bootstrap_options['node_ssl_verify_mode'] if bootstrap_options['node_ssl_verify_mode']\r\n\r\n config\r\n end", "def init\n create_file options[:inventory_config] do\n<<-YML\n# sources:\n# - \"https://supermarket.getchef.com\"\n# cookbooks:\n# cookbook-name:\n# versions:\n# - \"~> 4.0.2\"\n# - \"> 5.0.0\"\n# git:\n# location: url | path\n# branches:\n# - a_branch_name\n# refs:\n# - SHA\n\nYML\n end\n end", "def abs_initialize\n # only proceed if the user has a token\n user_has_token = false\n if get_abs_token\n user_has_token = true\n\n @abs_base_url = ENV[\"ABS_BASE_URL\"] ? ENV[\"ABS_BASE_URL\"] : ABS_BASE_URL\n @aws_platform = ENV[\"ABS_AWS_PLATFORM\"] ? ENV[\"ABS_AWS_PLATFORM\"] : AWS_PLATFORM\n @aws_image_id = ENV[\"ABS_AWS_IMAGE_ID\"] ? ENV[\"ABS_AWS_IMAGE_ID\"] : AWS_IMAGE_ID\n @aws_region = ENV[\"ABS_AWS_REGION\"] ? ENV[\"ABS_AWS_REGION\"] : AWS_REGION\n @aws_reap_time = ENV[\"ABS_AWS_REAP_TIME\"] ? ENV[\"ABS_AWS_REAP_TIME\"] : AWS_REAP_TIME\n @mom_size = ENV[\"ABS_AWS_MOM_SIZE\"]\n @mom_volume_size = ENV[\"ABS_AWS_MOM_VOLUME_SIZE\"] ? ENV[\"ABS_AWS_MOM_VOLUME_SIZE\"] : MOM_VOLUME_SIZE\n @metrics_size = ENV[\"ABS_AWS_METRICS_SIZE\"]\n @metrics_volume_size = ENV[\"ABS_METRICS_VOLUME_SIZE\"] ? ENV[\"ABS_METRICS_VOLUME_SIZE\"] : METRICS_VOLUME_SIZE\n @abs_beaker_pe_version = ENV[\"BEAKER_PE_VER\"] ? ENV[\"BEAKER_PE_VER\"] : nil\n end\n user_has_token\n end", "def provision_chef_common(chef, key, chef_config, vm_config)\n # set_instance_variable(chef_config, chef, 'environment', '_default' )\n # set_instance_variable(chef_config, chef, 'version', '12.19.36' )\n properties = [\n 'attempts',\n 'enable_reporting',\n 'encrypted_data_bag_secret_key_path',\n 'environment',\n 'run_list',\n 'verbose_logging',\n 'version'\n ]\n properties.each do | property_name |\n set_instance_variable(chef_config, chef, property_name )\n end\nend", "def createEc2Instance\n name = @config[\"name\"]\n node = @config['mu_name']\n\n instance_descriptor = {\n :image_id => @config[\"ami_id\"],\n :key_name => @deploy.ssh_key_name,\n :instance_type => @config[\"size\"],\n :disable_api_termination => true,\n :min_count => 1,\n :max_count => 1\n }\n\n arn = nil\n if @config['generate_iam_role']\n role = @deploy.findLitterMate(name: @config['name'], type: \"roles\")\n s3_objs = [\"#{@deploy.deploy_id}-secret\", \"#{role.mu_name}.pfx\", \"#{role.mu_name}.crt\", \"#{role.mu_name}.key\", \"#{role.mu_name}-winrm.crt\", \"#{role.mu_name}-winrm.key\"].map { |file| \n 'arn:'+(MU::Cloud::AWS.isGovCloud?(@config['region']) ? \"aws-us-gov\" : \"aws\")+':s3:::'+MU.adminBucketName+'/'+file\n }\n role.cloudobj.injectPolicyTargets(\"MuSecrets\", s3_objs)\n\n @config['iam_role'] = role.mu_name\n arn = role.cloudobj.createInstanceProfile\n# @cfm_role_name, @cfm_prof_name\n\n elsif @config['iam_role'].nil?\n raise MuError, \"#{@mu_name} has generate_iam_role set to false, but no iam_role assigned.\"\n end\n if !@config[\"iam_role\"].nil?\n if arn\n instance_descriptor[:iam_instance_profile] = {arn: arn}\n else\n instance_descriptor[:iam_instance_profile] = {name: @config[\"iam_role\"]}\n end\n end\n\n security_groups = []\n if @dependencies.has_key?(\"firewall_rule\")\n @dependencies['firewall_rule'].values.each { |sg|\n security_groups << sg.cloud_id\n }\n end\n\n if security_groups.size > 0\n instance_descriptor[:security_group_ids] = security_groups\n else\n raise MuError, \"Didn't get any security groups assigned to be in #{@mu_name}, that shouldn't happen\"\n end\n\n if !@config['private_ip'].nil?\n instance_descriptor[:private_ip_address] = @config['private_ip']\n end\n\n vpc_id = subnet = nil\n if [email protected]? and @config.has_key?(\"vpc\")\n subnet_conf = @config['vpc']\n subnet_conf = @config['vpc']['subnets'].first if @config['vpc'].has_key?(\"subnets\") and !@config['vpc']['subnets'].empty?\n tag_key, tag_value = subnet_conf['tag'].split(/=/, 2) if !subnet_conf['tag'].nil?\n\n subnet = @vpc.getSubnet(\n cloud_id: subnet_conf['subnet_id'],\n name: subnet_conf['subnet_name'],\n tag_key: tag_key,\n tag_value: tag_value\n )\n if subnet.nil?\n raise MuError, \"Got null subnet id out of #{subnet_conf['vpc']}\"\n end\n MU.log \"Deploying #{node} into VPC #{@vpc.cloud_id} Subnet #{subnet.cloud_id}\"\n punchAdminNAT\n instance_descriptor[:subnet_id] = subnet.cloud_id\n end\n\n if [email protected]? and [email protected]?\n instance_descriptor[:user_data] = Base64.encode64(@userdata)\n end\n\n MU::Cloud::AWS::Server.waitForAMI(@config[\"ami_id\"], region: @config['region'], credentials: @config['credentials'])\n\n # Figure out which devices are embedded in the AMI already.\n image = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).describe_images(image_ids: [@config[\"ami_id\"]]).images.first\n ext_disks = {}\n if !image.block_device_mappings.nil?\n image.block_device_mappings.each { |disk|\n if !disk.device_name.nil? and !disk.device_name.empty? and !disk.ebs.nil? and !disk.ebs.empty?\n ext_disks[disk.device_name] = MU.structToHash(disk.ebs)\n end\n }\n end\n\n configured_storage = Array.new\n cfm_volume_map = {}\n if @config[\"storage\"]\n @config[\"storage\"].each { |vol|\n # Drop the \"encrypted\" flag if a snapshot for this device exists\n # in the AMI, even if they both agree about the value of said\n # flag. Apparently that's a thing now.\n if ext_disks.has_key?(vol[\"device\"])\n if ext_disks[vol[\"device\"]].has_key?(:snapshot_id)\n vol.delete(\"encrypted\")\n end\n end\n mapping, cfm_mapping = MU::Cloud::AWS::Server.convertBlockDeviceMapping(vol)\n configured_storage << mapping\n }\n end\n\n instance_descriptor[:block_device_mappings] = configured_storage\n instance_descriptor[:block_device_mappings].concat(@ephemeral_mappings)\n instance_descriptor[:monitoring] = {enabled: @config['monitoring']}\n\n MU.log \"Creating EC2 instance #{node}\"\n MU.log \"Instance details for #{node}: #{instance_descriptor}\", MU::DEBUG\n#\t\t\t\tif instance_descriptor[:block_device_mappings].empty?\n#\t\t\t\t\tinstance_descriptor.delete(:block_device_mappings)\n#\t\t\t\tend\n\n retries = 0\n begin\n response = MU::Cloud::AWS.ec2(region: @config['region'], credentials: @config['credentials']).run_instances(instance_descriptor)\n rescue Aws::EC2::Errors::InvalidGroupNotFound, Aws::EC2::Errors::InvalidSubnetIDNotFound, Aws::EC2::Errors::InvalidParameterValue => e\n if retries < 10\n if retries > 7\n MU.log \"Seeing #{e.inspect} while trying to launch #{node}, retrying a few more times...\", MU::WARN, details: instance_descriptor\n end\n sleep 10\n retries = retries + 1\n retry\n else\n raise MuError, e.inspect\n end\n end\n\n instance = response.instances.first\n MU.log \"#{node} (#{instance.instance_id}) coming online\"\n\n return instance\n\n end", "def build_vm_image\n options.verbose? ? @@log.level = Logger::DEBUG : @@log.level = Logger::ERROR\n def_constants(guess_os(\"fedora\"))\n\n # Override the machine type to launch if necessary\n $amz_options[:instance_type] = options[:instance_type] if options[:instance_type]\n $amz_options[:block_device_mappings] = {\"/dev/sdb\" => \"ephemeral0\"}\n \n # Establish a new connection\n conn = connect(options.region)\n \n image = nil\n # Create a new builder instance\n if (options.region?nil)\n image = conn.images[AMI[\"us-east-1\"]]\n elsif AMI[options.region].nil?\n puts \"No AMI specified for region:\" + options.region\n exit 1\n else\n image = conn.images[AMI[options.region]]\n end\n\n puts \"Launching AMI: #{image.id} - #{image.name}\"\n instance = launch_instance(image, \"oso-image-builder\", 1, SSH_USER)\n hostname = instance.dns_name\n puts \"Done\"\n puts \"Hostname: #{hostname}\"\n ssh(hostname, 'su - -c \"setenforce 0\"' , 60, false, 1, SSH_USER)\n ssh(hostname, 'su - -c \"yum update -y\"' , 300, false, 1, SSH_USER)\n ssh(hostname, 'su - -c \"yum install -y appliance-tools qemu-img\"' , 60, false, 1, SSH_USER)\n scp_to(hostname, File.expand_path(\"#{__FILE__ }/../templates/openshift-origin.ks\"), '/home/ec2-user/openshift-origin.ks', 60, 5,SSH_USER)\n scp_to(hostname, File.expand_path(\"#{__FILE__ }/../templates/openshift-origin.vmx\"), '/home/ec2-user/openshift-origin.vmx', 60, 5,SSH_USER)\n scp_to(hostname, File.expand_path(\"#{__FILE__ }/../templates/openshift-origin.vbox\"), '/home/ec2-user/openshift-origin.vbox', 60, 5,SSH_USER) \n ssh(hostname, 'su - -c \"mv -f /home/ec2-user/openshift-origin.ks /mnt/\"' , 60, false, 1, SSH_USER)\n ssh(hostname, 'su - -c \"mkdir -p /mnt/tmp/build && mkdir -p /mnt/openshift-origin\"', 60, false, 1,SSH_USER)\n ssh(hostname, 'su - -c \"which VBoxManage 2>&1 > /dev/null || yum install -y http://download.virtualbox.org/virtualbox/4.2.8/VirtualBox-4.2-4.2.8_83876_fedora18-1.x86_64.rpm\"', 60, false, 1, SSH_USER)\n ssh(hostname, 'su - -c \"cd /mnt && appliance-creator -d -n openshift-origin -c openshift-origin.ks ' +\n '--format=vmdk --vmem=1024 --checksum --logfile=build.log --tmpdir=/mnt/tmp/build --cache /mnt/tmp/cache/\"', 2400, false, 1, SSH_USER)\n ssh(hostname, 'su - -c \"' + %{\n /bin/cp -f /mnt/openshift-origin.ks /mnt/openshift-origin/ &&\n /bin/mv -f /home/ec2-user/openshift-origin.v* /mnt/openshift-origin/ && \n mkdir -p /root/.VirtualBox/ &&\n /bin/cp -f /mnt/openshift-origin/openshift-origin.vbox /root/.VirtualBox/openshift-origin.vbox &&\n ln -sf /mnt/openshift-origin/openshift-origin-sda.vmdk /root/.VirtualBox/openshift-origin-sda.vmdk &&\n pushd /root/.VirtualBox/ &&\n VBoxManage registervm openshift-origin.vbox &&\n VBoxManage storageattach \\\\\\\"OpenShift Origin\\\\\\\" --storagectl SATA --type hdd --port 0 --medium openshift-origin-sda.vmdk &&\n /bin/cp -f /root/.VirtualBox/openshift-origin.vbox /mnt/openshift-origin/openshift-origin.vbox && \n popd && \n cd /mnt/openshift-origin && tar zcf openshift-origin.tgz * &&\n VBoxManage unregistervm \\\\\\\"OpenShift Origin\\\\\\\"\n } + '\"' , 2400, false, 1, SSH_USER)\n FileUtils.mkdir_p \"vm/\"\n scp_from(hostname, \"/mnt/openshift-origin/openshift-origin.tgz\", \"vm/\", 1200, SSH_USER)\n begin\n terminate_instance(hostname) if options.terminate?\n rescue\n # suppress termination errors - they have been logged already\n end\n end", "def create_aws_instance(config, name, instance_type=\"m3.medium\")\n config.ssh.pty = true\n config.vm.define name do |server|\n server.vm.box = AWS_BOX\n server.vm.provider :aws do |aws, override|\n aws.instance_type = instance_type\n aws.region = AWS_REGION\n aws.ami = AWS_AMI\n aws.keypair_name = AWS_PRIVATE_KEY\n override.ssh.username = AWS_SSH_USERNAME\n override.ssh.private_key_path = AWS_PRIVATE_KEY_PATH\n yield(aws,override,server)\n end\n end\nend", "def initialize(default_target, arguments)\n option_parser = OptionParser.new\n option_parser.on('--no-ivy') { @no_ivy = true }\n option_parser.on('--no-compile') { @no_compile = true }\n option_parser.on('--no-demo') { @no_demo = true }\n option_parser.on('--no-schema') { @no_schema = true }\n option_parser.on('--no-jre') { @no_jre = true }\n option_parser.on('--no-no') do \n @no_ivy = true\n @no_demo = true\n @no_schema = true\n end\n option_parser.on('--emma') do Registry[:emma] = true end\n \n @start_time = Time.now\n @basedir = FilePath.new(File.dirname(File.expand_path(__FILE__)), \"..\").canonicalize\n Registry[:basedir] = @basedir\n puts(\"Building with base directory: '#@basedir'.\")\n puts\n @default_target = default_target\n @ant = TerracottaAnt.new\n Registry[:ant] = @ant\n @platform = CrossPlatform.create_implementation(:ant => @ant)\n Registry[:platform] = @platform\n \n # The CommandLineConfigSource actually parses its arguments, and returns only the ones\n # that aren't configuration property settings (e.g., of the form 'a=b').\n arguments = option_parser.parse(arguments)\n @arguments, command_line_source = CommandLineConfigSource.from_args(arguments)\n @internal_config_source = InternalConfigSource.new\n Registry[:internal_config_source] = @internal_config_source\n @config_source = create_config_source(command_line_source, @internal_config_source)\n Registry[:config_source] = @config_source\n Registry[:command_line_config] = command_line_source\n\n @script_results = ScriptResults.new\n\n if Registry[:emma]\n Registry[:emma_home] = FilePath.new(@basedir.to_s, \"..\", \"..\", \"buildsystems\", \"emma-2.0.5312\").canonicalize.to_s\n fail(\"EMMA_HOME does not exist: #{Registry[:emma_home]}\") unless File.exists?(Registry[:emma_home])\n Registry[:emma_lib] = \"#{Registry[:emma_home]}/lib/emma.jar\"\n puts \"EMMA_HOME: #{Registry[:emma_home]}\"\n end\n\n # XXX: this is a hack to get around jruby script converting JAVA_HOME to unix path\n begin\n if `uname` =~ /CYGWIN/i\n ENV['JAVA_HOME'] = `cygpath -w #{ENV['JAVA_HOME']}`.strip\n end\n rescue\n # do nothing\n end\n\n reset\n end", "def setup\n add_standard_properties\n #\n create_banner\n create_standard_options\n create_advanced_options\n create_mode_options\n create_application_options\n create_feature_options\n create_tail_options\n #\n parse_options\n load_config_configuration\n create_result_directory\n load_results_archive\n end", "def init_settings\n s = Biopsy::Settings.instance\n s.set_defaults\n libdir = File.dirname(__FILE__)\n s.target_dir = [File.join(libdir, 'assemblotron/assemblers/')]\n s.objectives_dir = [File.join(libdir, 'assemblotron/objectives/')]\n @log.debug \"initialised Biopsy settings\"\n end", "def machine_options\n opts = {\n convergence_options: {\n bootstrap_proxy: @bootstrap_proxy,\n chef_config: @chef_config,\n chef_version: @chef_version,\n install_sh_path: @install_sh_path\n },\n bootstrap_options: {\n instance_type: @flavor,\n key_name: @key_name,\n security_group_ids: @security_group_ids\n },\n ssh_username: @ssh_username,\n image_id: @image_id,\n use_private_ip_for_ssh: @use_private_ip_for_ssh\n }\n\n # Add any optional machine options\n require 'chef/mixin/deep_merge'\n @subnet_id = Server::Helpers::Component.subnet_id('us-west-2b')\n if @subnet_id\n opts = Chef::Mixin::DeepMerge.hash_only_merge(opts,\n bootstrap_options: { subnet_id: @subnet_id })\n end\n\n opts\n end", "def launch_configuration(asg, sg_tcp_80_priv, sg_tcp_22_priv, instance_type, ami)\n asg.create_launch_configuration(launch_configuration_name: 'lc-nginx_auto',\n associate_public_ip_address: false,\n # key_name: 'UbuntuKeyPair', # TODO: Change/Remove\n image_id: ami, # Ubuntu base AMI from ubuntu.com\n instance_type: instance_type,\n security_groups: [sg_tcp_80_priv], # sg_tcp_22_priv\n instance_monitoring: { enabled: true }, # true=CloudWatch monitoring (60sec)\n user_data: Base64.encode64(\"#!/bin/bash -ex\\n\"\\\n \"export DEBIAN_FRONTEND=noninteractive\\n\"\\\n \"apt-get -q=2 update && apt-get -q=2 upgrade\\n\"\\\n \"apt-get -q=2 install nginx\\n\"\\\n \"URL=http://169.254.169.254/latest/meta-data\\n\"\\\n \"cat >> /var/www/html/index.html <<EOF\\n\"\\\n \"<meta http-equiv=refresh content=2 /><br>\\n\"\\\n \"FROM: Launch Configuration / ASG<br>\\n\"\\\n \"INSTANCE ID: $(curl $URL/instance-id)<br>\\n\"\\\n \"PUBLIC IP: [NONE], using NAT instances<br>\\n\"\\\n \"INTERNAL IP: $(curl $URL/local-ipv4)<br>\\n\"\\\n 'EOF'))\n sleep 5\nend", "def define_vm config, role, index, ip, memory = 512\n id = (index + 1).to_s.rjust(3, '0')\n config.vm.define \"#{role}_#{id}\" do |box|\n box.vm.customize [ \"modifyvm\", :id, \"--memory\", memory ]\n box.vm.box = \"centos_6_3\"\n box.vm.box_url = \"https://dl.dropbox.com/u/7225008/Vagrant/CentOS-6.3-x86_64-minimal.box\"\n box.vm.network :hostonly, \"192.168.34.#{ip}\", :netmask => \"255.255.255.0\"\n box.vm.host_name = \"#{role.downcase.gsub(/[^a-z0-9]+/, '-')}-#{id}.esi.dev\"\n #box.vm.provision :shell, :path => \"script/bootstrap-vm.sh\"\n box.vm.provision :puppet, :module_path => \"modules\" do |p|\n p.manifests_path = \"manifests\"\n p.manifest_file = \"site.pp\"\n end\n end\nend", "def initialize\n @key_files = []\n @host_key_files = []\n @use_agent = true\n @agent = nil\n end", "def initialize( location = 'stage', env_file = File.expand_path(\"#{__FILE__}/../../config/stage.sh\"))\n @location = location\n @env_file = env_file\n envfile_hash = process_env_file( env_file )\n #ENV explicit settings override values found in a locations config file\n @data = Hash[ SQA_ECOMM_SERVER_URL: ENV.fetch( 'SQA_ECOMM_SERVER_URL', envfile_hash['SQA_ECOMM_SERVER_URL'] ),\n SQA_ECOMM_API_SERVER_URL: ENV.fetch( 'SQA_ECOMM_API_SERVER_URL', envfile_hash['SQA_ECOMM_API_SERVER_URL'] ),\n SQA_ECOMM_DB_SERVER: ENV.fetch( 'SQA_ECOMM_DB_SERVER', envfile_hash['SQA_ECOMM_DB_SERVER'] ),\n SQA_ECOMM_DB: ENV.fetch( 'SQA_ECOMM_DB', envfile_hash['SQA_ECOMM_DB'] ),\n SQA_ECOMM_DB_UPDATE_USER: ENV.fetch( 'SQA_ECOMM_DB_UPDATE_USER', envfile_hash['SQA_ECOMM_DB_UPDATE_USER'] ),\n SQA_ECOMM_DB_UPDATE_PW: ENV.fetch( 'SQA_ECOMM_DB_UPDATE_PW', envfile_hash['SQA_ECOMM_DB_UPDATE_PW'] ),\n SQA_ECOMM_DB_READONLY_USER: ENV.fetch( 'SQA_ECOMM_DB_READONLY_USER', envfile_hash['SQA_ECOMM_DB_READONLY_USER'] ),\n SQA_ECOMM_DB_READONLY_PW: ENV.fetch( 'SQA_ECOMM_DB_READONLY_PW', envfile_hash['SQA_ECOMM_DB_READONLY_PW'] ),\n SQA_ORACLE_DB_SERVER: ENV.fetch( 'SQA_ORACLE_DB_SERVER', envfile_hash['SQA_ORACLE_DB_SERVER'] ),\n SQA_ORACLE_DB_UPDATE_USER: ENV.fetch( 'SQA_ORACLE_DB_UPDATE_USER', envfile_hash['SQA_ORACLE_DB_UPDATE_USER'] ),\n SQA_ORACLE_DB_UPDATE_PW: ENV.fetch( 'SQA_ORACLE_DB_UPDATE_PW', envfile_hash['SQA_ORACLE_DB_UPDATE_PW'] ),\n SQA_ORACLE_DB_READONLY_USER: ENV.fetch( 'SQA_ORACLE_DB_READONLY_USER', envfile_hash['SQA_ORACLE_DB_READONLY_USER'] ),\n SQA_ORACLE_DB_READONLY_PW: ENV.fetch( 'SQA_ORACLE_DB_READONLY_PW', envfile_hash['SQA_ORACLE_DB_READONLY_PW'] ),\n SQA_HJ_DB_SERVER: ENV.fetch( 'SQA_HJ_DB_SERVER', envfile_hash['SQA_HJ_DB_SERVER'] ),\n SQA_HJ_DB: ENV.fetch( 'SQA_HJ_DB', envfile_hash['SQA_HJ_DB'] ),\n SQA_HJ_DB_UPDATE_USER: ENV.fetch( 'SQA_HJ_DB_UPDATE_USER', envfile_hash['SQA_HJ_DB_UPDATE_USER'] ),\n SQA_HJ_DB_UPDATE_PW: ENV.fetch( 'SQA_HJ_DB_UPDATE_PW', envfile_hash['SQA_HJ_DB_UPDATE_PW'] ),\n SQA_HJ_DB_READONLY_USER: ENV.fetch( 'SQA_HJ_DB_READONLY_USER', envfile_hash['SQA_HJ_DB_READONLY_USER'] ),\n SQA_HJ_DB_READONLY_PW: ENV.fetch( 'SQA_HJ_DB_READONLY_PW', envfile_hash['SQA_HJ_DB_READONLY_PW'] ),\n SQA_RUDI_SERVER: ENV.fetch( 'SQA_RUDI_SERVER', envfile_hash['SQA_RUDI_SERVER'] ),\n SQA_RUDI_VERSION: ENV.fetch( 'SQA_RUDI_VERSION', envfile_hash['SQA_RUDI_VERSION'] ),\n SQA_UNIBLAB_SERVER: ENV.fetch( 'SQA_UNIBLAB_SERVER', envfile_hash['SQA_UNIBLAB_SERVER'] ),\n SQA_UNIBLAB_VERSION: ENV.fetch( 'SQA_UNIBLAB_VERSION', envfile_hash['SQA_UNIBLAB_VERSION'] ) ]\n end", "def default_config\n data = {\n 'acr_values' => ENV['acr_values'] || 'http://idmanagement.gov/ns/assurance/ial/1',\n 'client_id' => ENV['client_id'] || 'urn:gov:gsa:openidconnect:sp:sinatra',\n 'mock_irs_client_id' => ENV['mock_irs_client_id'] ||\n 'urn:gov:gsa:openidconnect:sp:mock_irs',\n 'redirect_uri' => ENV['redirect_uri'] || 'http://localhost:9292/',\n 'sp_private_key_path' => ENV['sp_private_key_path'] || './config/demo_sp.key',\n 'redact_ssn' => true,\n 'cache_oidc_config' => true,\n }\n\n # EC2 deployment defaults\n\n env = ENV['idp_environment'] || 'int'\n domain = ENV['idp_domain'] || 'identitysandbox.gov'\n\n data['idp_url'] = ENV.fetch('idp_url', nil)\n unless data['idp_url']\n if env == 'prod'\n data['idp_url'] = \"https://secure.#{domain}\"\n else\n data['idp_url'] = \"https://idp.#{env}.#{domain}\"\n end\n end\n data['sp_private_key'] = ENV.fetch('sp_private_key', nil)\n\n data\n end", "def configure_tpm2_0_tools(hosts)\n start_tpm2sim_on(hosts)\n config_abrmd_for_tpm2sim_on(hosts)\nend", "def initialize(options)\n @options = normalize_options(options)\n\n validate_options\n initialize_registry\n\n @logger = Bosh::Clouds::Config.logger\n\n @agent_properties = @options.fetch('agent', {})\n openstack_properties = @options['openstack']\n @default_key_name = openstack_properties['default_key_name']\n @default_security_groups = openstack_properties['default_security_groups']\n @default_volume_type = openstack_properties['default_volume_type']\n @stemcell_public_visibility = openstack_properties['stemcell_public_visibility']\n @boot_from_volume = openstack_properties['boot_from_volume']\n @use_dhcp = openstack_properties['use_dhcp']\n @human_readable_vm_names = openstack_properties['human_readable_vm_names']\n @enable_auto_anti_affinity = openstack_properties['enable_auto_anti_affinity']\n @use_config_drive = !!openstack_properties.fetch('config_drive', false)\n @config_drive = openstack_properties['config_drive']\n\n @openstack = Bosh::OpenStackCloud::Openstack.new(@options['openstack'])\n\n @az_provider = Bosh::OpenStackCloud::AvailabilityZoneProvider.new(\n @openstack,\n openstack_properties['ignore_server_availability_zone'],\n )\n\n @metadata_lock = Mutex.new\n\n @instance_type_mapper = Bosh::OpenStackCloud::InstanceTypeMapper.new\n end", "def config_options\n {\n 'datacenter' => new_resource.datacenter,\n 'template_path' => new_resource.template_path,\n 'power_on' => true,\n 'datastore' => new_resource.datastore,\n 'wait' => true,\n 'hostname' => new_resource.hostname,\n 'name' => new_resource.name,\n 'customization_spec' => {\n 'domain' => new_resource.domain,\n 'ipsettings' => {\n 'ip' => new_resource.ip || node['vcac_vm']['ip'],\n 'gateway' => new_resource.gateway,\n 'subnetMask' => new_resource.subnet_mask,\n },\n }\n }\nend", "def bootstrap_options\n @bootstrap_options ||= begin\n opts = {}\n\n opts[:ssh_user] = config[:ssh_user] || 'root'\n opts[:ssh_password] = config[:ssh_password]\n if opts[:ssh_password].nil?\n opts[:identity_file] = config[:identity_file] || hazetug_identity\n end\n\n template = options[:opts][:bootstrap] || 'bootstrap.erb'\n validation = config[:chef_validation_key] || 'validation.pem'\n\n opts[:validation_key] = File.expand_path(validation)\n opts[:template_file] = File.expand_path(template)\n opts[:environment] = config[:chef_environment]\n opts[:tags] = config[:tags]\n opts[:host_key_verify] = config[:host_key_verify] || false\n opts[:chef_server_url] = config[:chef_server_url]\n opts\n end\n end", "def settings_for_node\n cluster_name = self.parent.name.to_sym\n cluster_role = self.name.to_sym\n node_settings = {\n :user_data => { :attributes => { :run_list => [] } },\n :cluster_name => cluster_name,\n :cluster_role => cluster_role,\n }.deep_merge(Settings)\n node_settings.delete :pools\n raise \"Please define the '#{cluster_name}' cluster and the '#{cluster_role}' role in your ~/.chef/cluster_chef.yaml\" if (Settings[:pools][cluster_name].blank? || Settings[:pools][cluster_name][cluster_role].blank?)\n node_settings = node_settings.deep_merge(\n Settings[:pools][cluster_name][:common] ||{ }).deep_merge(\n Settings[:pools][cluster_name][cluster_role] ||{ })\n configure_aws_region node_settings\n node_settings\nend", "def configure_ks_pxe_client(options)\n options['ip'] = single_install_ip(options)\n tftp_pxe_file = options['mac'].gsub(/:/,\"\")\n tftp_pxe_file = tftp_pxe_file.upcase\n tftp_pxe_file = \"01\"+tftp_pxe_file+\".pxelinux\"\n test_file = options['tftpdir']+\"/\"+tftp_pxe_file\n tmp_file = \"/tmp/pxecfg\"\n if File.symlink?(test_file)\n message = \"Information:\\tRemoving old PXE boot file \"+test_file\n command = \"rm #{test_file}\"\n execute_command(options,message,command)\n end\n pxelinux_file = \"pxelinux.0\"\n message = \"Information:\\tCreating PXE boot file for \"+options['name']+\" with MAC address \"+options['mac']\n command = \"cd #{options['tftpdir']} ; ln -s #{pxelinux_file} #{tftp_pxe_file}\"\n execute_command(options,message,command)\n if options['service'].to_s.match(/live/)\n iso_dir = options['tftpdir']+\"/\"+options['service']\n message = \"Information:\\tDetermining install ISO location\"\n command = \"ls #{iso_dir}/*.iso\"\n iso_file = execute_command(options,message,command) \n iso_file = iso_file.chomp\n install_iso = File.basename(iso_file)\n end\n if options['biostype'].to_s.match(/efi/)\n shim_efi_file = \"/usr/lib/shim/shimx64.efi\"\n if !File.exist?(shim_efi_file)\n install_package(options,\"shim\")\n end\n shim_grub_file = options['tftpdir']+\"/shimx64.efi\"\n net_efi_file = \"/usr/lib/grub/x86_64-efi/monolithic/grubnetx64.efi\"\n if !File.exist?(net_efi_file)\n install_package(options,\"grub-efi-amd64-bin\")\n end\n net_grub_file = options['tftpdir']+\"/grubx64.efi\"\n check_dir_exists(options,options['tftpdir'])\n check_dir_owner(options,options['tftpdir'],options['uid'])\n if !File.exist?(shim_efi_file)\n install_package(options,\"shim-signed\")\n end\n if !File.exist?(net_efi_file)\n install_package(options,\"grub-efi-amd64-signed\")\n end\n if !File.exist?(shim_grub_file)\n message = \"Information:\\tCopying #{shim_efi_file} to #{shim_grub_file}\"\n command = \"cp #{shim_efi_file} #{shim_grub_file}\"\n execute_command(options,message,command)\n check_file_owner(options,shim_grub_file,options['uid'])\n end\n if !File.exist?(net_grub_file)\n message = \"Information:\\tCopying #{net_efi_file} to #{net_grub_file}\"\n command = \"cp #{net_efi_file} #{net_grub_file}\"\n execute_command(options,message,command)\n check_file_owner(options,net_grub_file,options['uid'])\n end\n tmp_cfg_octs = options['ip'].split(\".\")\n pxe_cfg_octs = [] \n tmp_cfg_octs.each do |octet|\n hextet = octet.convert_base(10, 16)\n if hextet.length < 2\n hextet = \"0\"+hextet\n end\n pxe_cfg_octs.push(hextet.upcase) \n end\n pxe_cfg_txt = pxe_cfg_octs.join\n pxe_cfg_file = \"grub.cfg-\"+pxe_cfg_txt\n pxe_cfg_dir = options['tftpdir']+\"/grub\"\n check_dir_exists(options,pxe_cfg_dir)\n check_dir_owner(options,pxe_cfg_dir,options['uid'])\n pxe_cfg_file = pxe_cfg_dir+\"/\"+pxe_cfg_file\n else\n pxe_cfg_dir = options['tftpdir']+\"/pxelinux.cfg\"\n pxe_cfg_file = options['mac'].gsub(/:/,\"-\")\n pxe_cfg_file = \"01-\"+pxe_cfg_file\n pxe_cfg_file = pxe_cfg_file.downcase\n pxe_cfg_file = pxe_cfg_dir+\"/\"+pxe_cfg_file\n end\n if options['service'].to_s.match(/sles/)\n vmlinuz_file = \"/\"+options['service']+\"/boot/#{options['arch']}/loader/linux\"\n else\n if options['service'].to_s.match(/live/)\n vmlinuz_file = \"/\"+options['service']+\"/casper/vmlinuz\"\n else\n vmlinuz_file = \"/\"+options['service']+\"/images/pxeboot/vmlinuz\"\n end\n end\n if options['service'].to_s.match(/ubuntu/)\n if options['service'].to_s.match(/live/)\n initrd_file = \"/\"+options['service']+\"/casper/initrd\"\n else\n if options['service'].to_s.match(/x86_64/)\n initrd_file = \"/\"+options['service']+\"/images/pxeboot/netboot/ubuntu-installer/amd64/initrd.gz\"\n linux_file = \"/\"+options['service']+\"/images/pxeboot/netboot/ubuntu-installer/amd64/linux\"\n else\n initrd_file = \"/\"+options['service']+\"/images/pxeboot/netboot/ubuntu-installer/i386/initrd.gz\"\n end\n end\n ldlinux_link = options['tftpdir']+\"/ldlinux.c32\"\n if not File.exist?(ldlinux_link) and not File.symlink?(ldlinux_link)\n ldlinux_file = options['service']+\"/images/pxeboot/netboot/ldlinux.c32\"\n message = \"Information:\\tCreating symlink for ldlinux.c32\"\n command = \"ln -s #{ldlinux_file} #{ldlinux_link}\"\n execute_command(options,message,command)\n end\n else\n if options['service'].to_s.match(/sles/)\n initrd_file = \"/\"+options['service']+\"/boot/#{options['arch']}/loader/initrd\"\n else\n initrd_file = \"/\"+options['service']+\"/images/pxeboot/initrd.img\"\n end\n end\n if options['host-os-name'].to_s.match(/Darwin/)\n vmlinuz_file = vmlinuz_file.gsub(/^\\//,\"\")\n initrd_file = initrd_file.gsub(/^\\//,\"\")\n end\n if options['service'].to_s.match(/packer/)\n host_info = options['vmgateway']+\":\"+options['httpport']\n else\n host_info = options['hostip']\n end\n #ks_url = \"http://\"+host_info+\"/clients/\"+options['service']+\"/\"+options['name']+\"/\"+options['name']+\".cfg\"\n #autoyast_url = \"http://\"+host_info+\"/clients/\"+options['service']+\"/\"+options['name']+\"/\"+options['name']+\".xml\"\n base_url = \"http://\"+options['hostip']+\"/\"+options['name']\n if options['service'].to_s.match(/live/)\n iso_url = \"http://\"+options['hostip']+\"/\"+options['service']+\"/\"+install_iso\n end\n ks_url = \"http://\"+options['hostip']+\"/\"+options['name']+\"/\"+options['name']+\".cfg\"\n autoyast_url = \"http://\"+options['hostip']+\"/\"+options['name']+\"/\"+options['name']+\".xml\"\n install_url = \"http://\"+host_info+\"/\"+options['service']\n file = File.open(tmp_file,\"w\")\n if options['biostype'].to_s.match(/efi/)\n menuentry = \"menuentry \\\"\"+options['name']+\"\\\" {\\n\"\n file.write(menuentry)\n else\n if options['serial'] == true\n file.write(\"serial 0 115200\\n\")\n file.write(\"prompt 0\\n\")\n end\n file.write(\"DEFAULT LINUX\\n\")\n file.write(\"LABEL LINUX\\n\")\n file.write(\" KERNEL #{vmlinuz_file}\\n\")\n if options['service'].to_s.match(/live/)\n file.write(\" INITRD #{initrd_file}\\n\")\n end\n end\n if options['service'].to_s.match(/ubuntu/)\n options['ip'] = options['q_struct']['ip'].value\n install_domain = options['q_struct']['domain'].value\n install_nic = options['q_struct']['nic'].value\n options['vmgateway'] = options['q_struct']['gateway'].value\n options['netmask'] = options['q_struct']['netmask'].value\n options['vmnetwork'] = options['q_struct']['network_address'].value\n disable_dhcp = options['q_struct']['disable_dhcp'].value\n if disable_dhcp.match(/true/)\n if options['biostype'].to_s.match(/efi/)\n if options['service'].to_s.match(/live/)\n linux_file = \"/\"+options['service'].to_s+\"/casper/vmlinuz\"\n initrd_file = \"/\"+options['service'].to_s+\"/casper/initrd\"\n if options['biosdevnames'] == true\n append_string = \" linux #{linux_file} net.ifnames=0 biosdevname=0 root=/dev/ram0 ramdisk_size=1500000 ip=dhcp url=#{iso_url} autoinstall ds=nocloud-net;s=#{base_url}/\"\n else\n append_string = \" linux #{linux_file} root=/dev/ram0 ramdisk_size=1500000 ip=dhcp url=#{iso_url} autoinstall ds=nocloud-net;s=#{base_url}/\"\n end\n initrd_string = \" initrd #{initrd_file}\"\n else\n if options['biosdevnames'] == true\n append_string = \" linux #{linux_file} --- auto=true priority=critical preseed/url=#{ks_url} console-keymaps-at/keymap=us locale=en_US hostname=#{options['name']} domain=#{install_domain} interface=#{install_nic} grub-installer/bootdev=#{options['rootdisk']} netcfg/get_ipaddress=#{options['ip']} netcfg/get_netmask=#{options['netmask']} netcfg/get_gateway=#{options['vmgateway']} netcfg/get_nameservers=#{options['nameserver']} netcfg/disable_dhcp=true initrd=#{initrd_file} net.ifnames=0 biosdevname=0\"\n else\n append_string = \" linux #{linux_file} --- auto=true priority=critical preseed/url=#{ks_url} console-keymaps-at/keymap=us locale=en_US hostname=#{options['name']} domain=#{install_domain} interface=#{install_nic} grub-installer/bootdev=#{options['rootdisk']} netcfg/get_ipaddress=#{options['ip']} netcfg/get_netmask=#{options['netmask']} netcfg/get_gateway=#{options['vmgateway']} netcfg/get_nameservers=#{options['nameserver']} netcfg/disable_dhcp=true initrd=#{initrd_file}\"\n end\n initrd_string = \" initrd #{initrd_file}\"\n end\n else\n if options['service'].to_s.match(/live/)\n if options['biosdevnames'] == true\n append_string = \" APPEND root=/dev/ram0 ramdisk_size=1500000 ip=dhcp url=#{iso_url} autoinstall ds=nocloud-net;s=#{base_url}/ net.ifnames=0 biosdevname=0\"\n else\n append_string = \" APPEND root=/dev/ram0 ramdisk_size=1500000 ip=dhcp url=#{iso_url} autoinstall ds=nocloud-net;s=#{base_url}/\"\n end\n else\n if options['biosdevnames'] == true\n append_string = \" APPEND auto=true priority=critical preseed/url=#{ks_url} console-keymaps-at/keymap=us locale=en_US hostname=#{options['name']} domain=#{install_domain} interface=#{install_nic} grub-installer/bootdev=#{options['rootdisk']} netcfg/get_ipaddress=#{options['ip']} netcfg/get_netmask=#{options['netmask']} netcfg/get_gateway=#{options['vmgateway']} netcfg/get_nameservers=#{options['nameserver']} netcfg/disable_dhcp=true initrd=#{initrd_file} net.ifnames=0 biosdevname=0\"\n else\n append_string = \" APPEND auto=true priority=critical preseed/url=#{ks_url} console-keymaps-at/keymap=us locale=en_US hostname=#{options['name']} domain=#{install_domain} interface=#{install_nic} grub-installer/bootdev=#{options['rootdisk']} netcfg/get_ipaddress=#{options['ip']} netcfg/get_netmask=#{options['netmask']} netcfg/get_gateway=#{options['vmgateway']} netcfg/get_nameservers=#{options['nameserver']} netcfg/disable_dhcp=true initrd=#{initrd_file}\"\n end\n end\n end\n else\n append_string = \" APPEND \"\n end\n else\n if options['service'].to_s.match(/sles/)\n if options['biosdevnames'] == true\n append_string = \" APPEND initrd=#{initrd_file} install=#{install_url} autoyast=#{autoyast_url} language=#{options['language']} net.ifnames=0 biosdevname=0\"\n else\n append_string = \" APPEND initrd=#{initrd_file} install=#{install_url} autoyast=#{autoyast_url} language=#{options['language']}\"\n end\n else\n if options['service'].to_s.match(/fedora_2[0-3]/)\n if options['biosdevnames'] == true\n append_string = \" APPEND initrd=#{initrd_file} ks=#{ks_url} ip=#{options['ip']} netmask=#{options['netmask']} net.ifnames=0 biosdevname=0\"\n else\n append_string = \" APPEND initrd=#{initrd_file} ks=#{ks_url} ip=#{options['ip']} netmask=#{options['netmask']}\"\n end\n else\n if options['service'].to_s.match(/live/)\n if options['biosdevnames'] == true\n append_string = \" APPEND net.ifnames=0 biosdevname=0 root=/dev/ram0 ramdisk_size=1500000 ip=dhcp url=#{iso_url}\"\n else\n append_string = \" APPEND root=/dev/ram0 ramdisk_size=1500000 ip=dhcp url=#{iso_url}\"\n end\n else \n if options['biosdevnames'] == true\n append_string = \" APPEND initrd=#{initrd_file} ks=#{ks_url} ksdevice=bootif ip=#{options['ip']} netmask=#{options['netmask']} net.ifnames=0 biosdevname=0\"\n else\n append_string = \" APPEND initrd=#{initrd_file} ks=#{ks_url} ksdevice=bootif ip=#{options['ip']} netmask=#{options['netmask']}\"\n end\n end\n end\n end\n end\n if options['text'] == true\n if options['service'].to_s.match(/sles/)\n append_string = append_string+\" textmode=1\"\n else\n append_string = append_string+\" text\"\n end\n end\n if options['serial'] == true\n append_string = append_string+\" serial console=ttyS0\"\n end\n append_string = append_string+\"\\n\"\n file.write(append_string)\n if options['biostype'].to_s.match(/efi/)\n initrd_string = initrd_string+\"\\n\"\n file.write(initrd_string)\n file.write(\"}\\n\")\n end\n file.flush\n file.close\n if options['biostype'].to_s.match(/efi/)\n grub_file = pxe_cfg_dir+\"/grub.cfg\"\n if File.exist?(grub_file)\n File.delete(grub_file)\n end\n FileUtils.touch(grub_file)\n grub_file = File.open(grub_file, \"w\")\n file_list = Dir.entries(pxe_cfg_dir)\n file_list.each do |file_name|\n if file_name.match(/cfg\\-/) and !file_name.match(/#{options['name'].to_s}/)\n temp_file = pxe_cfg_dir+\"/\"+file_name\n temp_array = File.readlines(temp_file)\n temp_array.each do |temp_line|\n grub_file.write(temp_line)\n end\n end\n end\n menuentry = \"menuentry \\\"\"+options['name']+\"\\\" {\\n\"\n grub_file.write(menuentry)\n grub_file.write(append_string)\n grub_file.write(initrd_string)\n grub_file.write(\"}\\n\")\n grub_file.flush\n grub_file.close\n grub_file = pxe_cfg_dir+\"/grub.cfg\"\n FileUtils.touch(grub_file)\n print_contents_of_file(options,\"\",grub_file)\n end\n message = \"Information:\\tCreating PXE configuration file \"+pxe_cfg_file\n command = \"cp #{tmp_file} #{pxe_cfg_file} ; rm #{tmp_file}\"\n execute_command(options,message,command)\n print_contents_of_file(options,\"\",pxe_cfg_file)\n return\nend", "def initialize options = {}\n require 'aws/ec2'\n\n options.each_key do | key |\n raise ArgumentError, \"unrecognized option \\'#{key}\\'\" unless RECOGNIZED_OPTIONS.include? key\n end\n \n @ec2 = options[ :instance ]\n @start_options = options[ :start_options ] || {}\n \n if @ec2 \n puts \"\\'credentials\\' ignored as an instance was given too\" if options.key? :credentials\n else\n ::AWS.config( options[ :credentials ] )\n \n @ec2 = ::AWS::EC2.new\n @ec2 = @ec2.regions[ options[ :region ] ] if options.key? :region\n end\n end", "def setup\n\n setup_path\n save_application_details\n add_jvm_args\n rename_server_instance\n\n \"/bin/bash ./#{SETUP_ENV_SCRIPT}\"\n end", "def create_config_base\n # Create keys directory for environment\n FileUtils.cd(self.project_root) { FileUtils.mkdir_p \"config/environments/#{self.name}\" }\n FileUtils.cd(\"#{project_root}/config/environments/#{self.name}\") { FileUtils.mkdir_p %w{steps keys} }\n # Create ssh key for puppet user if environment is vagrant\n generate_puppet_user_keys('vagrant') if self.name == 'vagrant'\n end", "def ec2_instance_data # rubocop:disable Metrics/MethodLength, Metrics/AbcSize\n i = {\n :placement => {\n :availability_zone => config[:availability_zone]\n },\n :instance_type => config[:instance_type],\n :ebs_optimized => config[:ebs_optimized],\n :image_id => config[:image_id],\n :key_name => config[:aws_ssh_key_id],\n :subnet_id => config[:subnet_id],\n :private_ip_address => config[:private_ip_address]\n }\n i[:block_device_mappings] = block_device_mappings unless block_device_mappings.empty?\n i[:security_group_ids] = config[:security_group_ids] if config[:security_group_ids]\n i[:user_data] = prepared_user_data if prepared_user_data\n if config[:iam_profile_name]\n i[:iam_instance_profile] = { :name => config[:iam_profile_name] }\n end\n if !config.fetch(:associate_public_ip, nil).nil?\n i[:network_interfaces] =\n [{\n :device_index => 0,\n :associate_public_ip_address => config[:associate_public_ip],\n :delete_on_termination => true\n }]\n # If specifying `:network_interfaces` in the request, you must specify\n # network specific configs in the network_interfaces block and not at\n # the top level\n if config[:subnet_id]\n i[:network_interfaces][0][:subnet_id] = i.delete(:subnet_id)\n end\n if config[:private_ip_address]\n i[:network_interfaces][0][:private_ip_address] = i.delete(:private_ip_address)\n end\n if config[:security_group_ids]\n i[:network_interfaces][0][:groups] = i.delete(:security_group_ids)\n end\n end\n i\n end", "def config_file_defaults\n Chef::Config[:knife].save(true) # this is like \"dup\" to a (real) Hash, and includes default values (and user set values)\n end", "def generate_instance_params(node_params)\n if node_params[:platform] == 'windows'\n user = 'jenkins'\n private_key_file_path = ConfigurationReader.path_to_user_file('mdbci/windows.pem')\n if private_key_file_path.nil?\n return Result.error('Please create the windows.pem file in the configuration directory')\n end\n else\n user = @user\n private_key_file_path = @private_key_file_path\n end\n labels = @configuration_labels.merge(hostname: TerraformService.format_string(Socket.gethostname),\n username: TerraformService.format_string(user),\n machinename: TerraformService.format_string(node_params[:name]))\n node_params = node_params.merge(\n labels: labels,\n instance_name: self.class.generate_instance_name(@configuration_id, node_params[:name]),\n network: network_name,\n user: user,\n is_own_vpc: !use_existing_network?,\n key_file: private_key_file_path,\n use_only_private_ip: use_only_private_ip?\n )\n CloudServices.choose_instance_type(@gcp_service.machine_types_list, node_params).and_then do |machine_type|\n Result.ok(node_params.merge(machine_type: machine_type))\n end\n end", "def setup\n\n setup_path\n save_application_details\n add_jvm_args\n rename_server_instance\n\n \"/bin/sh ./#{SETUP_ENV_SCRIPT}\"\n end", "def setup_common(id, options = {})\n node = options[:for_node]\n options[:memory] ||= 1024\n\n # Base setup: Ubuntu Server 14.04 LTS (Trusty Tahr) 64-bit for Parallels\n node.vm.box = \"parallels/ubuntu-14.04\"\n\n # Setup provider\n node.vm.provider \"parallels\" do |provider|\n provider.memory = options[:memory]\n end\n\n # Puppet setup\n node.vm.provision :puppet do |pp|\n pp.module_path = \"Puppet/modules\"\n pp.manifests_path = \"Puppet/manifests\"\n pp.manifest_file = \"init_#{id}.pp\"\n pp.hiera_config_path = \"Hiera/#{id}.yaml\"\n end\nend", "def set_vars\n #Set up vars with AEM package manager urls, etc.\n vars = {}\n vars[:recursive] = new_resource.recursive ? '\\\\&recursive=true' : \"\"\n vars[:file_name] = \"#{new_resource.name}-#{new_resource.version}\" +\n \"#{new_resource.file_extension}\"\n vars[:download_url] = new_resource.package_url\n vars[:file_path] = \"#{Chef::Config[:file_cache_path]}/#{vars[:file_name]}\"\n vars[:user] = new_resource.user\n vars[:password] = new_resource.password\n vars[:port] = new_resource.port\n vars[:group_id] = new_resource.group_id\n vars[:upload_cmd] = \"curl -s -S -u #{vars[:user]}:#{vars[:password]} -F\" +\n \" package=@#{vars[:file_path]} http://localhost:\" +\n \"#{vars[:port]}/crx/packmgr/service/.json?cmd=upload\"\n vars[:delete_cmd] = \"curl -s -S -u #{vars[:user]}:#{vars[:password]} -X\" +\n \" POST http://localhost:#{vars[:port]}/crx/packmgr/\" +\n \"service/.json/etc/packages/#{vars[:group_id]}/\" +\n \"#{vars[:file_name]}?cmd=delete\"\n vars[:install_cmd] = \"curl -s -S -u #{vars[:user]}:#{vars[:password]} -X\" +\n \" POST http://localhost:#{vars[:port]}/crx/packmgr/\" +\n \"service/.json/etc/packages/#{vars[:group_id]}/\" +\n \"#{vars[:file_name]}?cmd=install#{vars[:recursive]}\"\n vars[:activate_cmd] = \"curl -s -S -u #{vars[:user]}:#{vars[:password]} -X\" +\n \" POST http://localhost:#{vars[:port]}/crx/packmgr/\" +\n \"service/.json/etc/packages/#{vars[:group_id]}/\" +\n \"#{vars[:file_name]}?cmd=replicate\"\n vars[:uninstall_cmd] = \"curl -s -S -u #{vars[:user]}:#{vars[:password]} -X\" +\n \" POST http://localhost:#{vars[:port]}/crx/packmgr/\" +\n \"service/.json/etc/packages/#{vars[:group_id]}/\" +\n \"#{vars[:file_name]}?cmd=uninstall\"\n\n vars\nend", "def initialize(opts)\n resume = opts[:resume]\n source_host_def = define_source(opts[:config])\n source_host_ssh = CLI.spinner(\"Logging in to #{source_host_def[:host]}\") do\n host_login(source_host_def)\n end\n\n profile = CLI.spinner(\"Checking source host\") do\n profile = Profile.new(source_host_ssh)\n profile.build\n profile\n end\n platform = Platform::V2.new(profile[:cpe])\n\n memory = profile[:memory]\n memory_percent = memory[:mem_used].to_f / memory[:total] * 100\n swapping = memory[:swapping?]\n ftag = \"#{CLI.bold}%15s#{CLI.reset}:\"\n hist_mem = profile[:memory_hist][:mem_used]\n\n puts\n puts \"#{CLI.bold}System Information#{CLI.reset}\"\n puts \"#{ftag} #{platform} (#{profile[:cpe]})\" % \"OS\"\n puts \"#{ftag} #{profile[:arch]}\" % \"Arch\"\n puts \"#{ftag} #{profile[:hostname]}\" % \"Hostname\"\n puts\n\n puts \"#{CLI.bold}CPU Statistics#{CLI.reset}\"\n puts \"#{ftag} %d\" % [\"CPU Count\", profile[:cpu][:count]]\n puts \"#{ftag} %d MHz\" % [\"CPU Speed\", profile[:cpu][:speed]]\n puts \n\n puts \"#{CLI.bold}Memory Statistics#{CLI.reset}\"\n puts \"#{ftag} %d MiB\" % [\"Total RAM\", memory[:total]]\n puts \"#{ftag} %d MiB (%2.1f%%)\" % [\"RAM Used\", memory[:mem_used],\n memory_percent]\n puts \"#{ftag} %d MiB\" % [\"Swap Used\", memory[:swap_used]] if swapping\n puts \"#{ftag} %d%%\" % [\"Hist. RAM Used\", hist_mem] unless hist_mem.nil?\n puts \n\n puts \"#{CLI.bold}Hard Disk Statistics#{CLI.reset}\"\n puts \"#{ftag} %2.1f GB\" % [\"Disk Used\", profile[:disk]]\n puts\n\n puts \"#{CLI.bold}System Statistics#{CLI.reset}\"\n puts \"#{ftag} #{profile[:io][:uptime]}\" % \"Uptime\"\n puts \"#{ftag} #{profile[:io][:wait]}\" % \"I/O Wait\"\n puts\n\n puts \"#{CLI.bold}IP Information#{CLI.reset}\"\n puts \"#{ftag} #{profile[:ip][:public].join(', ')}\" % \"Public\"\n puts \"#{ftag} #{profile[:ip][:private].join(', ')}\" % \"Private\"\n puts\n\n puts \"#{CLI.bold}MySQL Databases#{CLI.reset}\"\n puts \"#{ftag} #{profile[:db][:count]}\" % \"Number\"\n puts \"#{ftag} #{profile[:db][:size]}\" % \"Total Size\"\n puts\n\n puts \"#{CLI.bold}Libraries#{CLI.reset}\"\n puts \"#{ftag} #{profile[:lib][:libc]}\" % \"LIBC\"\n puts \"#{ftag} #{profile[:lib][:perl]}\" % \"Perl\"\n puts \"#{ftag} #{profile[:lib][:python]}\" % \"Python\"\n puts \"#{ftag} #{profile[:lib][:ruby]}\" % \"Ruby\"\n puts \"#{ftag} #{profile[:lib][:php]}\" % \"PHP\"\n unless profile.warnings.empty?\n puts\n print CLI.red + CLI.bold\n profile.warnings.each { |warning| puts warning }\n print CLI.reset\n end\n\n source_host_ssh.logout!\n end", "def initialize(options = {})\n @options = DEFAULTS\n @options.merge!(options)\n\n version_path = File.dirname(__FILE__) + '/../../VERSION'\n\n if File.exists? version_path\n @options[:agent] = \"OpenNebula #{File.read(version_path)}\"\n end\n end", "def create_new_asg config\n delete_launch_configs\n\n auto_scaling = new_auto_scaling\n\n #\n # 1. create the launh configuration\n #\n options = {\n :security_groups => [AMI_SECURITY_GROUP],\n :key_pair => AMI_KEY_PAIR,\n :user_data => user_data\n }\n\n launch_config = auto_scaling.launch_configurations.create(\n launch_config_name, \n config[\"ami\"],\n AMI_INSTANCE_TYPE,\n options\n )\n\n #\n # now create the asg\n #\n\n tags = [\n {:key => \"server\", :value => APP_NAME},\n {:key => \"build\", :value => VERSION},\n {:key => \"env\", :value => APP_ENV}\n ]\n\n options = {\n :load_balancers => [AMI_ELB],\n :launch_configuration => launch_config,\n :availability_zones => [AMI_AZ],\n :min_size => 1,\n :max_size => 1,\n :tags => tags\n }\n\n puts \"creating asg\"\n puts \"\\toptions => #{options}\"\n puts \"\\ttags => #{tags}\"\n auto_scaling.groups.create(launch_config_name, options)\nend", "def start\n create_machine :config_id => self.setting, :cluster_number => 1\n end", "def configure_settings\n @env = deep_merge(env.dup, configuration[:env]) if configuration[:env]\n @vm = vm.merge(configuration[:vm]) if configuration[:vm]\n end", "def initialize\n define_os\n define_path\n read_settings\n end", "def initialize\n super()\n @vagrant_dir = @system.try_opt(:vagrant_dir)\n create_new_vm_group unless current_folder_has_souffle_config?\n generate_vagrant_config\n end", "def saveChefMetadata\n self.class.loadChefLib\n @server.getSSHConfig # why though\n MU.log \"Saving #{@server.mu_name} Chef artifacts\"\n\n begin\n chef_node = ::Chef::Node.load(@server.mu_name)\n rescue Net::HTTPServerException\n @server.deploy.sendAdminSlack(\"Couldn't load Chef metadata on `#{@server.mu_name}` :crying_cat_face:\")\n raise MU::Groomer::RunError, \"Couldn't load Chef node #{@server.mu_name}\"\n end\n\n # Figure out what this node thinks its name is\n system_name = chef_node['fqdn'] if !chef_node['fqdn'].nil?\n MU.log \"#{@server.mu_name} local name is #{system_name}\", MU::DEBUG\n\n chef_node.normal.app = @config['application_cookbook'] if !@config['application_cookbook'].nil?\n chef_node.normal[\"service_name\"] = @config[\"name\"]\n chef_node.normal[\"credentials\"] = @config[\"credentials\"]\n chef_node.normal[\"windows_admin_username\"] = @config['windows_admin_username']\n chef_node.chef_environment = MU.environment.downcase\n if @server.config['cloud'] == \"AWS\"\n chef_node.normal[\"ec2\"] = MU.structToHash(@server.cloud_desc)\n end\n\n if @server.windows?\n chef_node.normal['windows_admin_username'] = @config['windows_admin_username']\n chef_node.normal['windows_auth_vault'] = @server.mu_name\n chef_node.normal['windows_auth_item'] = \"windows_credentials\"\n chef_node.normal['windows_auth_password_field'] = \"password\"\n chef_node.normal['windows_auth_username_field'] = \"username\"\n chef_node.normal['windows_ec2config_password_field'] = \"ec2config_password\"\n chef_node.normal['windows_ec2config_username_field'] = \"ec2config_username\"\n chef_node.normal['windows_sshd_password_field'] = \"sshd_password\"\n chef_node.normal['windows_sshd_username_field'] = \"sshd_username\"\n end\n\n # If AD integration has been requested for this node, give Chef what it'll need.\n if !@config['active_directory'].nil?\n chef_node.normal['ad']['computer_name'] = @server.mu_windows_name\n chef_node.normal['ad']['node_class'] = @config['name']\n chef_node.normal['ad']['domain_name'] = @config['active_directory']['domain_name']\n chef_node.normal['ad']['node_type'] = @config['active_directory']['node_type']\n chef_node.normal['ad']['domain_operation'] = @config['active_directory']['domain_operation']\n chef_node.normal['ad']['domain_controller_hostname'] = @config['active_directory']['domain_controller_hostname'] if @config['active_directory'].has_key?('domain_controller_hostname')\n chef_node.normal['ad']['netbios_name'] = @config['active_directory']['short_domain_name']\n chef_node.normal['ad']['computer_ou'] = @config['active_directory']['computer_ou'] if @config['active_directory'].has_key?('computer_ou')\n chef_node.normal['ad']['domain_sid'] = @config['active_directory']['domain_sid'] if @config['active_directory'].has_key?('domain_sid')\n chef_node.normal['ad']['dcs'] = @config['active_directory']['domain_controllers']\n chef_node.normal['ad']['domain_join_vault'] = @config['active_directory']['domain_join_vault']['vault']\n chef_node.normal['ad']['domain_join_item'] = @config['active_directory']['domain_join_vault']['item']\n chef_node.normal['ad']['domain_join_username_field'] = @config['active_directory']['domain_join_vault']['username_field']\n chef_node.normal['ad']['domain_join_password_field'] = @config['active_directory']['domain_join_vault']['password_field']\n chef_node.normal['ad']['domain_admin_vault'] = @config['active_directory']['domain_admin_vault']['vault']\n chef_node.normal['ad']['domain_admin_item'] = @config['active_directory']['domain_admin_vault']['item']\n chef_node.normal['ad']['domain_admin_username_field'] = @config['active_directory']['domain_admin_vault']['username_field']\n chef_node.normal['ad']['domain_admin_password_field'] = @config['active_directory']['domain_admin_vault']['password_field']\n end\n\n # Amazon-isms, possibly irrelevant\n awscli_region_widget = {\n \"compile_time\" => true,\n \"config_profiles\" => {\n \"default\" => {\n \"options\" => {\n \"region\" => @config['region']\n }\n }\n }\n }\n chef_node.normal['awscli'] = awscli_region_widget\n\n if [email protected]?\n chef_node.normal['cloudprovider'] = @server.cloud\n\n # XXX In AWS this is an OpenStruct-ish thing, but it may not be in\n # others.\n chef_node.normal[@server.cloud.to_sym] = MU.structToHash(@server.cloud_desc)\n end\n\n tags = MU::MommaCat.listStandardTags\n tags.merge!(MU::MommaCat.listOptionalTags) if @config['optional_tags']\n\n if !@config['tags'].nil?\n @config['tags'].each { |tag|\n tags[tag['key']] = tag['value']\n }\n end\n\n if @config.has_key?(\"monitor\") and !@config['monitor']\n tags['nomonitor'] = true\n end\n\n chef_node.normal['tags'] = tags\n chef_node.save\n\n # If we have a database make sure we grant access to that vault.\n # In some cases the cached getLitter response will not have all the resources in the deploy, so lets not use the cache.\n if @config.has_key?('dependencies')\n deploy = MU::MommaCat.getLitter(MU.deploy_id, use_cache: false)\n @config['dependencies'].each{ |dep|\n if dep['type'] == \"database\" && deploy.deployment.has_key?(\"databases\") && deploy.deployment[\"databases\"].has_key?(dep['name'])\n deploy.deployment[\"databases\"][dep['name']].values.each { |database|\n grantSecretAccess(database['vault_name'], database['vault_item']) if database.has_key?(\"vault_name\") && database.has_key?(\"vault_item\")\n }\n end\n }\n end\n\n # Finally, grant us access to some pre-existing Vaults.\n if !@config['vault_access'].nil?\n @config['vault_access'].each { |vault|\n grantSecretAccess(vault['vault'], vault['item'])\n }\n end\n end", "def default_config\n data = {\n 'acr_values' => 'http://idmanagement.gov/ns/assurance/loa/1',\n 'client_id' => 'urn:gov:gsa:openidconnect:sp:sinatra',\n }\n\n if LoginGov::Hostdata.in_datacenter?\n # EC2 deployment defaults\n\n env = LoginGov::Hostdata.env\n domain = LoginGov::Hostdata.domain\n\n if env == 'prod'\n data['idp_url'] = \"https://secure.#{domain}\"\n else\n data['idp_url'] = \"https://idp.#{env}.#{domain}\"\n end\n data['redirect_uri'] = \"https://sp-oidc-sinatra.#{env}.#{domain}/\"\n data['sp_private_key_path'] = \"aws-secretsmanager:#{env}/sp-oidc-sinatra/oidc.key\"\n data['redact_ssn'] = true\n else\n # local dev defaults\n data['idp_url'] = 'http://localhost:3000'\n data['redirect_uri'] = 'http://localhost:9292/'\n data['sp_private_key_path'] = demo_private_key_path\n data['redact_ssn'] = false\n end\n\n data\n end", "def boot_aws_inception_vm\n say \"\" # glowing whitespace\n\n unless settings[\"inception\"][\"ip_address\"]\n say \"Provisioning IP address for inception VM...\"\n settings[\"inception\"][\"ip_address\"] = acquire_ip_address\n save_settings!\n end\n\n unless settings[\"inception\"] && settings[\"inception\"][\"server_id\"]\n username = \"ubuntu\"\n size = \"m1.small\"\n ip_address = settings[\"inception\"][\"ip_address\"]\n key_name = settings[\"inception\"][\"key_pair\"][\"name\"]\n say \"Provisioning #{size} for inception VM...\"\n inception_vm_attributes = {\n :groups => [settings[\"inception\"][\"security_group\"]],\n :key_name => key_name,\n :private_key_path => inception_vm_private_key_path,\n :flavor_id => size,\n :bits => 64,\n :username => \"ubuntu\",\n :public_ip_address => ip_address\n }\n if vpc?\n raise \"must create subnet before creating VPC inception VM\" unless settings[\"subnet\"] && settings[\"subnet\"][\"id\"]\n inception_vm_attributes[:subnet_id] = settings[\"subnet\"][\"id\"]\n inception_vm_attributes[:private_ip_address] = \"10.0.0.5\"\n end\n server = provider.bootstrap(inception_vm_attributes)\n unless server\n error \"Something mysteriously cloudy happened and fog could not provision a VM. Please check your limits.\"\n end\n\n settings[\"inception\"].delete(\"create_new\")\n settings[\"inception\"][\"server_id\"] = server.id\n settings[\"inception\"][\"username\"] = username\n save_settings!\n end\n\n server ||= fog_compute.servers.get(settings[\"inception\"][\"server_id\"])\n\n unless settings[\"inception\"][\"disk_size\"]\n disk_size = DEFAULT_INCEPTION_VOLUME_SIZE # Gb\n device = \"/dev/sdi\"\n provision_and_mount_volume(server, disk_size, device)\n\n settings[\"inception\"][\"disk_size\"] = disk_size\n settings[\"inception\"][\"disk_device\"] = device\n save_settings!\n end\n\n # settings[\"inception\"][\"host\"] is used externally to determine\n # if an inception VM has been assigned already; so we leave it\n # until last in this method to set this setting.\n # This way we can always rerun the CLI and rerun this method\n # and idempotently get an inception VM\n unless settings[\"inception\"][\"host\"]\n settings[\"inception\"][\"host\"] = server.dns_name\n save_settings!\n end\n\n confirm \"Inception VM has been created\"\n display_inception_ssh_access\n end", "def initialize_generate\n super\n add_accessors\n @flavor.class.do_declare_resources do\n templates_if_missing << 'metadata.rb'\n templates_if_missing << 'README.md'\n templates_if_missing << 'CHANGELOG.md'\n end\n declare_gemfile\n declare_berksfile\n declare_rakefile\n declare_chefignore_patterns\n end", "def setup\n copy_definitions\n package_agent\n end", "def setup\n copy_definitions\n package_agent\n end", "def setup!(requesting_actor_id, options=Hash.new)\n create_database!\n\n policy = OrgAuthPolicy.new(self, requesting_actor_id, options)\n policy.apply!\n\n # Environments are in erchef / SQL. Make an HTTP request to create the default environment\n headers = {:headers => {'x-ops-request-source' => 'web'}}\n rest = Chef::REST.new(Chef::Config[:chef_server_host_uri],\n Chef::Config[:web_ui_proxy_user],\n Chef::Config[:web_ui_private_key], headers)\n rest.post_rest(\"organizations/#{name}/environments\",\n {\n 'name' => '_default',\n 'description' => 'The default Chef environment'\n })\n end", "def metric_config\n # This is bad... but I just want things to work!!!!!\n @mount_dir = ::ArchiveRoot\n\tdefault = Defaults.new\n\tdefault.metric_taxonomy= 'hsa'\n\tdefault.metric_instrument_type = 'ORBI'\n\tdefault\n end", "def configure_cloud\n message \"Configuring master\"\n build_and_send_config_files_in_temp_directory\n remote_configure_instances\n \n nodes.each do |node|\n node.configure\n end \n end", "def setup\n @subject = Fog::Compute[:google].servers\n @factory = ServersFactory.new(namespaced_name)\n @servers = ServersFactory.new(namespaced_name)\n @disks = DisksFactory.new(namespaced_name)\n end", "def ec2_driver_config\n {\n 'name' => 'ec2',\n 'aws_ssh_key_id' => \"#{cookbook_name}-kitchen\",\n 'security_group_ids' => ENV['AWS_SECURITY_GROUP_ID'] ? [ENV['AWS_SECURITY_GROUP_ID']] : [DEFAULT_EC2_SECURITY_GROUP_ID],\n 'subnet_id' => ENV['AWS_SUBNET_ID'] || DEFAULT_EC2_SUBNET_ID,\n # Because kitchen-rackspace also has a thing called flavor_id.\n 'flavor_id' => nil,\n }\n end", "def initialize(aki, sak, region, prefix, instance_id = open(\"http://169.254.169.254/latest/meta-data/instance-id\").read)\n\n @instance_id = instance_id\n @prefix = prefix\n\n @compute = Fog::Compute.new({:provider => 'AWS', :aws_access_key_id => aki, :aws_secret_access_key => sak, :region => region })\n end", "def register_new_agent_with_instance (agent_instance)\n throw \"not an aws instance!\" if !agent_instance.is_a?AwsInstance\n url_string=\"http://#{@server_address}:#{@server_port}/agents\"\n url=URI(url_string)\n req = Net::HTTP::Post.new(url)\n agent_instance.notify \"registering new agent\"\n res=Net::HTTP.start(url.hostname,url.port) do |http|\n req.basic_auth @username,@password\n req.body=\"name=#{agent_instance.name}&ip=#{agent_instance.aws_instance_data.private_ip_address}\"\n http.request(req)\n end\n if res.code.to_i <=399\n agent_instance.notify \"registered ok! getting key\"\n client_data=JSON.parse(res.body)\n client_id=client_data['data']\n url_string=\"http://#{@server_address}:#{@server_port}/agents/#{client_id}/key\"\n url=URI(url_string)\n req = Net::HTTP::Get.new(url)\n res=Net::HTTP.start(url.hostname,url.port) do |http|\n req.basic_auth @username,@password\n http.request(req)\n end\n if res.code.to_i<=399\n agent_instance.notify \"key for agent obtained! installing on agent...\"\n client_key_data=JSON.parse(res.body)\n ssh_command=\"sudo echo \\\"y\\\" | sudo /var/ossec/bin/manage_agents -i #{client_key_data['data']}\"\n s3_client=Helpers.create_aws_S3_client\n agent_instance.notify \"loading configuration for agent...\"\n resp=s3_client.get_object({\n bucket: \"btrz-aws-automation\",\n key: \"ossec/agent/ossec.conf\"\n })\n xml_content=resp.body.read\n noko_xml=Nokogiri::XML(xml_content)\n noko_xml.css(\"ossec_config client server-ip\").first.content=@private_server_address\n agent_instance.notify \"deploying configuration to agent, server ip=#{@private_server_address}\"\n agent_instance.upload_data_to_file noko_xml.root.to_s,\"/home/ubuntu/ossec.conf\"\n agent_instance.notify \"removing old keys\"\n agent_instance.run_ssh_command \"sudo rm /var/ossec/etc/client.keys\"\n agent_instance.notify \"updating configuration\"\n agent_instance.run_ssh_command \"sudo cp /home/ubuntu/ossec.conf /var/ossec/etc/ossec.conf && sudo chown root:ossec /var/ossec/etc/ossec.conf && sudo chmod 640 /var/ossec/etc/ossec.conf\"\n answer=agent_instance.run_ssh_command ssh_command\n agent_instance.notify \"restarting agent...\"\n agent_status=agent_instance.run_ssh_command \"sudo /var/ossec/bin/ossec-control restart\"\n if (answer.index( \"Added\") !=nil and agent_status.index( \"Completed\")!=nil)\n return true,{\n agent_key: client_key_data['data'],\n answer: answer,\n status: agent_status,\n }\n else\n return false,{\n agent_key: client_key_data['data'],\n answer: answer,\n status: agent_status,\n }\n end\n else\n return false, res.code\n end\n else\n agent_instance.notify \"authentication error connecting to server #{@server_address} in #{@environment}\"\n return false, res.code\n end\n end", "def setup_vagrant_instance(instance_alias, state)\n instance = {}\n instance[:id] = instance_alias\n instance[:state] = state\n instance[:provider] = 'vagrant'\n instance[:platform] = Rubber::Platforms::LINUX\n\n if ENV.has_key?('VAGRANT_CWD')\n instance[:provider_options] ||= {}\n instance[:provider_options][:vagrant_cwd] = ENV['VAGRANT_CWD']\n end\n\n # IP addresses\n ip = instance_external_ip(instance_alias)\n if ! ip.empty?\n capistrano.logger.info(\"Using #{ip} for external and internal IP address\")\n instance[:external_ip] = instance[:internal_ip] = ip\n else\n instance[:external_ip] = capistrano.rubber.get_env('EXTERNAL_IP', \"External IP address for host '#{instance_alias}'\", true)\n instance[:internal_ip] = capistrano.rubber.get_env('INTERNAL_IP', \"Internal IP address for host '#{instance_alias}'\", true, instance[:external_ip])\n end\n\n instance\n end", "def init_gce(google, override, instance_name)\n google.google_project_id = $GCE_PROJECT_ID\n google.google_client_email = $GCE_CLIENT_EMAIL\n google.google_json_key_location = $GCE_JSON_KEY\n google.zone = $GCE_ZONE\n google.image = $GCE_IMAGE\n google.machine_type = $GCE_MACHINE_TYPE\n google.name = instance_name\n\n override.ssh.username = $REMOTE_USER\n override.ssh.private_key_path = $REMOTE_SSH_KEY\nend", "def apply_system_defaults\n if @region.nil? && @zone.nil?\n @region, @zone = Rudy::DEFAULT_REGION, Rudy::DEFAULT_ZONE\n elsif @region.nil?\n @region = @zone.to_s.gsub(/[a-z]$/, '').to_sym\n elsif @zone.nil?\n @zone = \"#{@region}b\".to_sym\n end\n \n @environment ||= Rudy::DEFAULT_ENVIRONMENT\n @role ||= Rudy::DEFAULT_ROLE\n @localhost ||= Rudy.sysinfo.hostname || 'localhost'\n @auto = false if @auto.nil?\n end", "def load_attrs\n self.availability_zone = cluster_config[:availability_zone] || Settings.availability_zone\n self.image_id = cluster_config[:image_id] if cluster_config[:image_id]\n self.instance_type = cluster_config[:instance_type] if cluster_config[:instance_type]\n # self.deletes_on_termination = cluster_config[:deletes_on_termination] if cluster_config[:deletes_on_termination]\n end", "def sends_aws_keys settings\n settings[:user_data][:attributes][:aws] ||= {}\n settings[:user_data][:attributes][:aws][:access_key] ||= Settings[:access_key]\n settings[:user_data][:attributes][:aws][:secret_access_key] ||= Settings[:secret_access_key]\n settings[:user_data][:attributes][:aws][:aws_region] ||= Settings[:aws_region]\nend", "def configure_chef_only_once\r\n bootstrap_options = value_from_json_file(handler_settings_file,'runtimeSettings','0','handlerSettings', 'publicSettings', 'bootstrap_options')\r\n bootstrap_options = eval(bootstrap_options) ? eval(bootstrap_options) : {}\r\n\r\n if File.directory?(\"#{bootstrap_directory}\")\r\n puts \"#{Time.now} Bootstrap directory [#{bootstrap_directory}] already exists, skipping creation...\"\r\n else\r\n puts \"#{Time.now} Bootstrap directory [#{bootstrap_directory}] does not exist, creating...\"\r\n FileUtils.mkdir_p(\"#{bootstrap_directory}\")\r\n end\r\n\r\n puts \"#{Time.now} Creating chef configuration files\"\r\n\r\n copy_settings_file\r\n\r\n load_settings\r\n\r\n begin\r\n require 'chef/azure/core/bootstrap_context'\r\n\r\n config = configure_settings(bootstrap_options)\r\n\r\n Chef::Config[:validation_key_content] = @validation_key\r\n Chef::Config[:client_key_content] = @client_key\r\n Chef::Config[:chef_server_ssl_cert_content] = @chef_server_ssl_cert\r\n template_file = File.expand_path(File.dirname(File.dirname(__FILE__)))\r\n runlist = @run_list.empty? ? [] : escape_runlist(@run_list)\r\n load_cloud_attributes_in_hints if ! @ohai_hints.empty?\r\n\r\n if windows?\r\n context = Chef::Knife::Core::WindowsBootstrapContext.new(config, runlist, Chef::Config, config[:secret])\r\n template_file += \"\\\\bootstrap\\\\windows-chef-client-msi.erb\"\r\n bootstrap_bat_file ||= \"#{ENV['TMP']}/bootstrap.bat\"\r\n template = IO.read(template_file).chomp\r\n bash_template = Erubis::Eruby.new(template).evaluate(context)\r\n File.open(bootstrap_bat_file, 'w') {|f| f.write(bash_template)}\r\n bootstrap_command = \"cmd.exe /C #{bootstrap_bat_file}\"\r\n else\r\n context = Chef::Knife::Core::BootstrapContext.new(config, runlist, Chef::Config, config[:secret])\r\n template_file += '/bootstrap/chef-full.erb'\r\n template = IO.read(template_file).chomp\r\n bootstrap_command = Erubis::Eruby.new(template).evaluate(context)\r\n end\r\n\r\n result = shell_out(bootstrap_command)\r\n result.error!\r\n puts \"#{Time.now} Created chef configuration files\"\r\n\r\n # remove the temp bootstrap file\r\n FileUtils.rm(bootstrap_bat_file) if windows?\r\n rescue Mixlib::ShellOut::ShellCommandFailed => e\r\n Chef::Log.warn \"chef-client configuration files creation failed (#{e})\"\r\n @chef_client_error = \"chef-client configuration files creation failed (#{e})\"\r\n return\r\n rescue => e\r\n Chef::Log.error e\r\n @chef_client_error = \"chef-client configuration files creation failed (#{e})\"\r\n return\r\n end\r\n\r\n if @extended_logs == 'true'\r\n @chef_client_success_file = windows? ? \"c:\\\\chef_client_success\" : \"/tmp/chef_client_success\"\r\n end\r\n\r\n # Runs chef-client with custom recipe to set the run_list and environment\r\n begin\r\n current_dir = File.expand_path(File.dirname(File.dirname(__FILE__)))\r\n first_client_run_recipe_path = windows? ? \"#{current_dir}\\\\first_client_run_recipe.rb\" : \"#{current_dir}/first_client_run_recipe.rb\"\r\n if !config[:first_boot_attributes][\"policy_name\"].nil? and !config[:first_boot_attributes][\"policy_group\"].nil?\r\n command = \"chef-client -j #{bootstrap_directory}/first-boot.json -c #{bootstrap_directory}/client.rb -L #{@azure_plugin_log_location}/chef-client.log --once\"\r\n else\r\n command = \"chef-client #{first_client_run_recipe_path} -j #{bootstrap_directory}/first-boot.json -c #{bootstrap_directory}/client.rb -L #{@azure_plugin_log_location}/chef-client.log --once\"\r\n end\r\n command += \" -E #{config[:environment]}\" if config[:environment]\r\n result = shell_out(command)\r\n result.error!\r\n rescue Mixlib::ShellOut::ShellCommandFailed => e\r\n Chef::Log.error \"First chef-client run failed. (#{e})\"\r\n @chef_client_error = \"First chef-client run failed (#{e})\"\r\n return\r\n rescue => e\r\n Chef::Log.error e\r\n @chef_client_error = \"First chef-client run failed (#{e})\"\r\n end\r\n\r\n params = \"-c #{bootstrap_directory}/client.rb -L #{@azure_plugin_log_location}/chef-client.log --once \"\r\n\r\n # Runs chef-client in background using scheduled task if windows else using process\r\n if windows?\r\n puts \"#{Time.now} Creating scheduled task with runlist #{runlist}..\"\r\n schtask = \"SCHTASKS.EXE /Create /TN \\\"Chef Client First Run\\\" /RU \\\"NT Authority\\\\System\\\" /RP /RL \\\"HIGHEST\\\" /SC ONCE /TR \\\"cmd /c 'C:\\\\opscode\\\\chef\\\\bin\\\\chef-client #{params}'\\\" /ST \\\"#{Time.now.strftime('%H:%M')}\\\" /F\"\r\n\r\n begin\r\n result = @extended_logs == 'true' ? shell_out(\"#{schtask} && touch #{@chef_client_success_file}\") : shell_out(schtask)\r\n result.error!\r\n @chef_client_run_start_time = Time.now\r\n\r\n # call to run scheduled task immediately after creation\r\n result = shell_out(\"SCHTASKS.EXE /Run /TN \\\"Chef Client First Run\\\"\")\r\n result.error!\r\n rescue Mixlib::ShellOut::ShellCommandFailed => e\r\n Chef::Log.error \"Creation or running of scheduled task for first chef-client run failed (#{e})\"\r\n @chef_client_error = \"Creation or running of scheduled task for first chef-client run failed (#{e})\"\r\n rescue => e\r\n Chef::Log.error e\r\n @chef_client_error = \"Creation or running of scheduled task for first chef-client run failed (#{e})\"\r\n end\r\n puts \"#{Time.now} Created and ran scheduled task for first chef-client run with runlist #{runlist}\"\r\n else\r\n command = @extended_logs == 'true' ? \"chef-client #{params} && touch #{@chef_client_success_file}\" : \"chef-client #{params}\"\r\n @child_pid = Process.spawn command\r\n @chef_client_run_start_time = Time.now\r\n Process.detach @child_pid\r\n puts \"#{Time.now} Successfully launched chef-client process with PID [#{@child_pid}]\"\r\n end\r\n end", "def init_options( opts )\n options = default_options.merge( opts ) \n @environment = options[:environment]\n @perf_threshold = options[:perf_threshold]\n @moleable = options[:moleable]\n @app_name = options[:app_name]\n @user_key = options[:user_key]\n @store = options[:store]\n @excluded_paths = options[:excluded_paths]\n end", "def initialize(setname, config, custom_prefabs_path, options)\n super\n\n # Valid supported ENV variables\n options = [:host, :user, :pass, :dest_dir, :template_dir, :rpool,\n :cluster, :ssh_keys, :datacenter, :node_timeout, :node_tries,\n :node_sleep, :connect_timeout, :connect_tries]\n\n # Devise defaults, use fog configuration from file system if it exists\n defaults = load_fog_config()\n defaults = defaults.merge({\n :node_timeout => 1200,\n :node_tries => 10,\n :node_sleep => 30 + rand(60),\n :connect_timeout => 60,\n :connect_tries => 10,\n })\n\n # Traverse the ENV variables and load them into our config automatically\n @vmconf = defaults\n ENV.each do |k,v|\n next unless k =~/^RS(PEC)?_VSPHERE_/\n var = k.sub(/^RS(PEC)?_VSPHERE_/, '').downcase.to_sym\n unless options.include?(var)\n log.info(\"Ignoring unknown environment variable #{k}\")\n next\n end\n @vmconf[var] = v\n end\n\n # Initialize node storage if not already\n RSpec.configuration.rs_storage[:nodes] ||= {}\n end", "def initiate(nodes, params, _opts = {})\n indexes = nodes.map { |r| r[:id] }\n set_indexes!(indexes)\n ndx_pbuilderid_to_node_info = nodes.inject({}) do |h, n|\n h.merge(n.pbuilderid => { id: n[:id], display_name: n.assembly_node_print_form() })\n end\n callbacks = {\n on_msg_received: proc do |msg|\n\n response = CommandAndControl.parse_response__execute_action(nodes, msg)\n if response && response[:pbuilderid] && response[:status] == :ok\n node_info = ndx_pbuilderid_to_node_info[response[:pbuilderid]]\n\n unless response[:data][:error]\n component_type = :authorized_ssh_public_key\n attr_hash = {\n linux_user: params[:system_user],\n key_name: params[:rsa_pub_name],\n key_content: params[:rsa_pub_key]\n }\n node = nodes.find { |n| n[:id] == node_info[:id] }\n\n if (@agent_action == :grant_access)\n Component::Instance::Interpreted.create_or_update?(node, component_type, attr_hash)\n else\n Component::Instance::Interpreted.delete(node, component_type, attr_hash)\n end\n end\n\n push(node_info[:display_name], response[:data])\n else\n Log.error(\"Agent '#{msg[:senderagent]}' error, Code: #{msg[:body][:statuscode]} - #{msg[:body][:statusmsg]}\")\n end\n\n end\n }\n CommandAndControl.request__execute_action(:ssh_agent, @agent_action, nodes, callbacks, params)\n end", "def configure_ai_client(client_name,client_arch,client_mac,client_ip,client_model,publisher_host,service_name,image_name)\n # Populate questions for AI profile\n if !service_name.match(/i386|sparc/)\n service_name = service_name+\"_\"+client_arch\n end\n check_ai_client_doesnt_exist(client_name,client_mac,service_name)\n populate_ai_client_profile_questions(client_ip,client_name)\n process_questions(service_name)\n if $os_name.match(/Darwin/)\n tftp_version_dir = $tftp_dir+\"/\"+service_name\n check_osx_iso_mount(tftp_version_dir,iso_file)\n end\n output_file = $work_dir+\"/\"+client_name+\"_ai_profile.xml\"\n create_ai_client_profile(output_file)\n puts \"Configuring:\\tClient \"+client_name+\" with MAC address \"+client_mac\n import_ai_client_profile(output_file,client_name,client_mac,service_name)\n create_ai_client(client_name,client_arch,client_mac,service_name,client_ip)\n if $os_name.match(/SunOS/) and $os_rel.match(/11/)\n clear_solaris_dhcpd()\n end\n return\nend", "def build_environment(autoinst)\n environment = {\n \"AYTESTS_FILES_DIR\" => files_dir.to_s,\n \"AYTESTS_PROVIDER\" => provider.to_s,\n \"AYTESTS_WEBSERVER_PORT\" => WEBSERVER_PORT,\n \"AYTESTS_MAC_ADDRESS\" => MAC_ADDRESS\n }\n linuxrc_file = autoinst.sub_ext(\".linuxrc\")\n environment[\"AYTESTS_LINUXRC\"] = File.read(linuxrc_file).chomp if linuxrc_file.exist?\n environment\n end", "def make_fake_instances\n return unless Fog.mock?\n\n asg_instances = []\n all_instances = []\n min_size.times do |n|\n instance_id = Fog::AWS::Mock.instance_id\n asg_instances << {\n 'AvailabilityZone' => availability_zones,\n 'HealthStatus' => 'Good',\n 'InstanceId' => instance_id,\n 'LifecycleState' => 'Pending',\n 'LaunchConfigurationName' => launch_configuration.aws_identifier\n }\n\n all_instances << {\n 'amiLaunchIndex' => n,\n 'architecture' => 'i386',\n 'blockDeviceMapping' => [],\n 'clientToken' => 'FAKE_CLIENT_TOKEN',\n 'dnsName' => 'not-a-real-hostname',\n 'ebsOptimized' => false,\n 'hypervisor' => 'xen',\n 'imageId' => launch_configuration.ami,\n 'instanceId' => instance_id,\n 'instanceState' => { 'code' => 0, 'name' => 'not pending?' },\n 'instanceType' => launch_configuration.instance_type,\n 'kernelId' => launch_configuration.kernel_id || Fog::AWS::Mock.kernel_id,\n 'keyName' => launch_configuration.key_name,\n 'launchTime' => Time.now,\n 'monitoring' => { 'state' => false },\n 'placement' => { 'availabilityZone' => availability_zones,\n 'groupName' => self.aws_identifier,\n 'tenancy' => 'default' },\n 'privateDnsName' => nil,\n 'productCodes' => [],\n 'reason' => nil,\n 'rootDeviceType' => 'instance-store',\n 'virtualizationType' => 'paravirtual',\n 'groupIds' => [],\n 'groupSet' => launch_configuration.security_groups,\n 'iamInstanceProfile' => launch_configuration.iam_role,\n 'networkInterfaces' => [],\n 'ownerId' => nil,\n 'privateIpAddress' => nil,\n 'reservationId' => Fog::AWS::Mock.reservation_id,\n 'stateReason' => {},\n 'ipAddress' => Fog::AWS::Mock.ip_address,\n 'privateIpAddress' => Fog::AWS::Mock.private_ip_address\n }\n end\n Aerosol::AWS.auto_scaling.data[:auto_scaling_groups][aws_identifier]\n .merge!('Instances' => asg_instances)\n all_instances.each do |instance|\n Aerosol::AWS.compute.data[:instances][instance['instanceId']] = instance\n end\n end", "def configure_vmware_vbox_vm(client_name)\n modify_vbox_vm(client_name,\"rtcuseutc\",\"on\")\n modify_vbox_vm(client_name,\"vtxvpid\",\"on\")\n modify_vbox_vm(client_name,\"vtxux\",\"on\")\n modify_vbox_vm(client_name,\"hwvirtex\",\"on\")\n setextradata_vbox_vm(client_name,\"VBoxInternal/Devices/pcbios/0/Config/DmiSystemVersion\",\"None\")\n setextradata_vbox_vm(client_name,\"VBoxInternal/Devices/pcbios/0/Config/DmiBoardVendor\",\"Intel Corporation\")\n setextradata_vbox_vm(client_name,\"VBoxInternal/Devices/pcbios/0/Config/DmiBoardProduct\",\"440BX Desktop Reference Platform\")\n setextradata_vbox_vm(client_name,\"VBoxInternal/Devices/pcbios/0/Config/DmiSystemVendor\",\"VMware, Inc.\")\n setextradata_vbox_vm(client_name,\"VBoxInternal/Devices/pcbios/0/Config/DmiSystemProduct\",\"VMware Virtual Platform\")\n setextradata_vbox_vm(client_name,\"VBoxInternal/Devices/pcbios/0/Config/DmiBIOSVendor\",\"Phoenix Technologies LTD\")\n setextradata_vbox_vm(client_name,\"VBoxInternal/Devices/pcbios/0/Config/DmiBIOSVersion\",\"6.0\")\n setextradata_vbox_vm(client_name,\"VBoxInternal/Devices/pcbios/0/Config/DmiChassisVendor\",\"No Enclosure\")\n vbox_vm_uuid = get_vbox_vm_uuid(client_name)\n vbox_vm_uuid = \"VMware-\"+vbox_vm_uuid\n setextradata_vbox_vm(client_name,\"VBoxInternal/Devices/pcbios/0/Config/DmiSystemSerial\",vbox_vm_uuid)\n return\nend", "def initialize\n\n unless ENV['PBMANAGER_ROOT'].nil?\n @base_dir = Pathname.new(ENV['PBMANAGER_ROOT'])\n if !@base_dir.directory?\n abort(\"PBMANAGER_ROOT is not directory\")\n end\n @base_dir = @base_dir.realpath.to_s\n else\n abort(\"Could not find PBMANAGER_ROOT\")\n return\n end\n\n\n # our \"build_root\" sits off our home directory,\n # TODO make this home or tmp or \"where told\"\n @sites_dir = ENV['HOME'] + \"/Sites\"\n\n\n @build_dir = \"#{@sites_dir}/build\"\n @cache_dir = \"#{@sites_dir}/cache\"\n\n # TODO Evaluate moving these to where they need to be realized.\n @ksiso_dir = \"#{@build_dir}/isos\"\n @vagrant_dir = \"#{@build_dir}/vagrant\"\n @ovf_dir = \"#{@build_dir}/ovf\"\n @vmware_dir = \"#{@build_dir}/vmware\"\n @vbox_dir = \"#{@build_dir}/vbox\"\n\n\n # TODO when ready to gemize the location this will be \"~/.pbs/config\"\n\n # if the directory exists then use it, otherwise create it, then\n # stop and alert the user.\n\n # TEMPORARY\n @config_dir = \"#{@base_dir}/config\"\n unless File.directory?(@config_dir)\n abort(\"Could not find PBMANAGER_ROOT/config?\")\n return\n end\n\n # TODO when ready to gemize location this will be \"~/.pbs/log\"\n the_log_dir = \"#{@base_dir}/log\"\n unless File.directory?(the_log_dir)\n # puts \"Making #{the_log_dir} for logging\"\n FileUtils.mkdir_p(the_log_dir)\n end\n\n the_log_file = \"#{the_log_dir}/log.txt\"\n begin\n @log = Logger.new(the_log_file)\n @log.level = Logger::DEBUG\n rescue => e\n raise PBManager::FatalError.new(\"Could not find #{the_log_file}\", e)\n return\n end\n\n @settings = YAML.load_file(\"#{@config_dir}/application.yml\")\n\n # TODO temp hardcode, this will be in \"~/.pbs/config\" directory\n @vmos = @settings[:vmos]\n @vmtype = @settings[:vmtype]\n\n\n @log.unknown(\"PBManager was initialized\")\n @log.debug \"@settings = \\n#{@settings.to_yaml}\"\n @log.info \"@vmos = #{@vmos}\"\n @log.info \"@vmtype = #{@vmtype}\"\n end", "def fix_automatic_attributes\n platform, version = Chef::Platform.find_platform_and_version(self)\n # FIXME(log): should be trace\n logger.debug(\"Platform is #{platform} version #{version}\")\n automatic[:platform] = platform\n automatic[:platform_version] = Chef::VersionString.new(version)\n automatic[:chef_guid] = Chef::Config[:chef_guid] || ( Chef::Config[:chef_guid] = node_uuid )\n automatic[:name] = name\n automatic[:chef_environment] = chef_environment\n end", "def generate()\n say \"Generating Cloud #{ options[:cloud] } IaC\", :green\n @values = parse_cloud_config\n\n case options[:cloud]\n when 'aws'\n copy_file('aws/terraform/main.tf', 'terraform/main.tf')\n copy_file('aws/terraform/network.tf', 'terraform/network.tf')\n copy_file('aws/terraform/outputs.tf', 'terraform/outputs.tf')\n copy_file('aws/terraform/variables.tf', 'terraform/variables.tf')\n template('aws/terraform/terraform.tfvars.erb', 'terraform/terraform.tfvars')\n copy_file('aws/README.md', 'README.md')\n\n template('aws/bosh-install.sh.erb', 'bin/bosh-install.sh')\n template('aws/setup-tunnel.sh.erb', 'bin/setup-tunnel.sh')\n template('aws/concourse-deploy.sh.erb', 'bin/concourse-deploy.sh')\n template('aws/set-env.sh.erb', 'bin/set-env.sh')\n chmod('bin/bosh-install.sh', 0755)\n chmod('bin/concourse-deploy.sh', 0755)\n chmod('bin/setup-tunnel.sh', 0755)\n\n when 'gcp'\n copy_file('gcp/terraform/main.tf', 'terraform/main.tf')\n copy_file('gcp/terraform/network.tf', 'terraform/network.tf')\n copy_file('gcp/terraform/outputs.tf', 'terraform/outputs.tf')\n copy_file('gcp/terraform/variables.tf', 'terraform/variables.tf')\n template('gcp/terraform/terraform.tfvars.erb', 'terraform/terraform.tfvars')\n copy_file('gcp/README.md', 'README.md', force: true)\n\n template('gcp/bosh-install.sh.erb', 'bin/bosh-install.sh')\n template('gcp/bosh-vars.yml.erb', 'bosh-vars.yml')\n template('gcp/setup-tunnel.sh.erb', 'bin/setup-tunnel.sh')\n template('gcp/set-env.sh.erb', 'bin/set-env.sh')\n chmod('bin/bosh-install.sh', 0755)\n chmod('bin/setup-tunnel.sh', 0755)\n\n else\n say 'Cloud provider not specified'\n\n end\n end", "def initialize(params_obj, options = {})\n @standard_properties = [\"SS_application\", \"SS_component\", \"SS_environment\", \"SS_component_version\", \"request_number\"]\n @p = params_obj\n super @p\n lib_path = @p.get(\"SS_script_support_path\")\n @action_platforms = {\"default\" => {\"transport\" => \"nsh\", \"platform\" => \"linux\", \"language\" => \"bash\", \"comment_char\" => \"#\", \"env_char\" => \"\", \"lb\" => \"\\n\", \"ext\" => \"sh\"}}\n @action_platforms.merge!(ACTION_PLATFORMS) if defined?(ACTION_PLATFORMS)\n @automation_category = get_option(options, \"automation_category\", @p.get(\"SS_automation_category\", \"shell\"))\n @output_dir = get_option(options, \"output_dir\", @p.get(\"SS_output_dir\"))\n action_platform(options)\n @nsh_path = get_option(options,\"nsh_path\", NSH_PATH)\n @debug = get_option(options, \"debug\", false)\n @nsh = NSH.new(@nsh_path)\n set_transfer_properties(options)\n timeout = get_option(options, \"timeout\", @p.get(\"step_estimate\", \"60\"))\n @timeout = timeout.to_i * 60\n end", "def setup_defaults\n @program_title = 'PROGRAM TITLE'\n @program_site = 'PROGRAM SITE'\n @request_availability = false\n @meeting_times = ''\n @sourcing_options = ''\n @course_options = ''\n @student_id_required = false\n @student_id_format = ''\n @student_id_format_help = ''\n @student_id_excluded_chars = ''\n @contact_email = '[email protected]'\n @is_preaccelerator_student = false\n end", "def prepare_for_configuration \n # clear_base_directory\n make_base_directory\n copy_misc_templates\n copy_custom_monitors\n store_keys_in_file\n Script.save!(self)\n # not my favorite...\n copy_ssh_key\n before_configuration_tasks\n end", "def configure(vm_config, number)\n vm_config.vm.network \"public_network\", ip: \"192.168.1.24#{number}\"\n vm_config.vm.host_name = \"level0#{number}.seoshop.net\"\n \n vm_config.vm.provision :puppet do |puppet|\n puppet.manifests_path = \"puppet\"\n puppet.module_path = \"puppet/modules\"\n puppet.manifest_file = \"site.pp\"\n end\nend", "def setup_hypervisor\n hypervisor = nil\n params = ARGV.clone\n\n params.each_with_index do |param, index|\n case param\n when '--kvm'\n hypervisor = KVM\n ARGV.delete_at(index)\n when '--xen'\n hypervisor = XEN\n ARGV.delete_at(index)\n end\n end\n\n if !hypervisor\n case $0\n when %r{/vmm\\/kvm/}\n hypervisor=KVM\n when %r{/vmm\\/xen\\d?/}\n hypervisor=XEN\n end\n end\n\n case hypervisor.name\n when 'XEN'\n file = 'xenrc'\n vars = %w{XM_POLL XM_LIST}\n when 'KVM'\n file = 'kvmrc'\n vars = %w{LIBVIRT_URI}\n else\n return nil\n end\n\n # Load the rc variables and override the default values\n begin\n env = `. #{File.dirname($0)+\"/#{file}\"};env`\n lines = env.split(\"\\n\")\n\n vars.each do |var|\n lines.each do |line|\n if a = line.match(/^(#{var})=(.*)$/)\n hypervisor::CONF[var] = a[2]\n break\n end\n end\n end\n rescue\n end\n\n return hypervisor\nend", "def create_config \n @config = ::Capistrano::Configuration.new\n if @cloud.debug || @cloud.verbose \n @config.logger.level = @cloud.debug ? ::Capistrano::Logger::MAX_LEVEL : ::Capistrano::Logger::INFO\n else\n @config.logger.level = ::Capistrano::Logger::IMPORTANT\n end\n \n capfile = returning Array.new do |arr|\n Dir[\"#{::File.dirname(__FILE__)}/recipies/*.rb\"].each {|a| arr << \"require '#{a}'\" }\n arr << \"ssh_options[:keys] = '#{@cloud.full_keypair_basename_path}'\"\n \n arr << set_poolparty_roles\n end.join(\"\\n\")\n \n @config.provisioner = self\n @config.cloud = @cloud\n \n @config.load(:string => capfile)\n \n @cloud.deploy_file ? @config.load(@cloud.deploy_file) : @config.set(:user, @cloud.user)\n end", "def connect_settings\n sanitize_environment_report\n settings = {\n :pid => $$,\n :port => ::TingYun::Agent.config[:port],\n :host => local_host,\n :appName => ::TingYun::Agent.config.app_names,\n :language => 'Ruby',\n :agentVersion => ::TingYun::VERSION::STRING,\n :env => @environment_report,\n :config => ::TingYun::Agent.config.to_collector_hash\n }\n settings\n end", "def block_device_mapping\n hsh = { 'DeviceName' => device }\n if ephemeral_device?\n hsh['VirtualName'] = volume_id\n elsif create_at_launch?\n hsh.merge!({\n 'Ebs.SnapshotId' => snapshot_id,\n 'Ebs.VolumeSize' => size,\n 'Ebs.DeleteOnTermination' => (! keep).to_s })\n else\n return\n end\n hsh\n end", "def set_ami_spec\n ami_arch = @@ec2.describe_images([self.ami_id]).first\n if (ami_arch[:aws_architecture] == \"i386\" && self.ami_spec.blank? && self.spot_price.blank?)\n self.ami_spec = \"c1.medium\"\n self.spot_price = 0.50\n elsif (ami_arch[:aws_architecture] == \"x86_64\" && self.ami_spec.blank? && self.spot_price.blank?)\n self.ami_spec = \"m1.large\"\n self.spot_price = 1.00\n end\n end", "def init\n if @args.first.nil?\n @ui.error('Please specify the node')\n return ARGUMENT_ERROR_RESULT\n end\n @mdbci_config = Configuration.new(@args.first, @env.labels)\n result = NetworkSettings.from_file(@mdbci_config.network_settings_file)\n if result.error?\n @ui.error(result.error)\n return ARGUMENT_ERROR_RESULT\n end\n\n @network_settings = result.value\n @product = @env.nodeProduct\n @product_version = @env.productVersion\n if @product.nil? || @product_version.nil?\n @ui.error('You must specify the name and version of the product')\n return ARGUMENT_ERROR_RESULT\n end\n\n @machine_configurator = MachineConfigurator.new(@ui)\n\n SUCCESS_RESULT\n end", "def config_file_settings\n Chef::Config[:knife].save(false) # this is like \"dup\" to a (real) Hash, and does not include default values (just user set values)\n end" ]
[ "0.74981487", "0.72785604", "0.6989644", "0.6177954", "0.6065063", "0.59736824", "0.5722319", "0.5713777", "0.56887895", "0.560247", "0.5570425", "0.5552157", "0.5528126", "0.5523799", "0.5490877", "0.5490583", "0.54324764", "0.54210335", "0.5394933", "0.5392132", "0.5388329", "0.53787315", "0.53431016", "0.5326678", "0.5300335", "0.5293702", "0.52902925", "0.52559453", "0.52302235", "0.52248013", "0.5216518", "0.52113706", "0.5209976", "0.51914376", "0.5168388", "0.51659536", "0.5161972", "0.5150922", "0.51493955", "0.51490045", "0.5133723", "0.51264846", "0.5124822", "0.51152307", "0.5075334", "0.5068617", "0.5067185", "0.50654554", "0.50606185", "0.50581264", "0.5053937", "0.50502014", "0.5047759", "0.50430846", "0.50412095", "0.50410116", "0.50402135", "0.50213134", "0.5019222", "0.50131106", "0.5006368", "0.50060856", "0.50046295", "0.49945658", "0.49945658", "0.4993912", "0.49931154", "0.4966332", "0.49658293", "0.49620825", "0.4959096", "0.49585038", "0.49559447", "0.49508515", "0.49463505", "0.49442044", "0.4937945", "0.49307752", "0.49303588", "0.49283436", "0.49277994", "0.49236396", "0.49235544", "0.49190822", "0.49179816", "0.49106154", "0.49065554", "0.4905645", "0.49046692", "0.4903234", "0.4902208", "0.48961705", "0.4889191", "0.4886258", "0.4885863", "0.4880067", "0.4877131", "0.487289", "0.4869016" ]
0.75688684
1
GET /bairros/1 GET /bairros/1.xml
def show @bairro = Bairro.find(params[:id]) respond_to do |format| format.html # show.html.erb format.xml { render :xml => @bairro } end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def index\n @boms = Bom.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @boms }\n end\n end", "def show\n @bairro_micro = BairroMicro.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @bairro_micro }\n end\n end", "def show\n @barrio = Barrio.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @barrio }\n end\n end", "def api_xml(path,method=:get,options={})\n xml_message(amee,\"/data\"+path,method,options)\n end", "def index\n @cuentas = Cuenta.all\n\n @cadena = getcuentasxml\n \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @cadena }\n end\n end", "def read(id=nil)\r\n request = Net::HTTP.new(@uri.host, @uri.port)\r\n if id.nil?\r\n response = request.get(\"#{@uri.path}.xml\") \r\n else\r\n response = request.get(\"#{@uri.path}/#{id}.xml\") \r\n end\r\n response.body\r\n end", "def get_xml\n response = @api.request(:get, @location, type: 'xml')\n response.body if response.status == 200\n end", "def show\n @bap = Bap.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @bap }\n end\n end", "def show\n @bonificacion = Bonificacion.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @bonificacion }\n end\n end", "def show\n @bixo = Bixo.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @bixo }\n end\n end", "def show\n @asambleista = Asambleista.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @asambleista }\n end\n end", "def index\n @brothers = Brother.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @brothers }\n end\n end", "def show\n @relatestagiario = Relatestagiario.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @relatestagiario }\n end\n end", "def index\n @feria2010observaciones = Feria2010observacion.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @feria2010observaciones }\n end\n end", "def index\n @borrows = Borrow.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @borrows }\n end\n end", "def read(id=nil)\n request = Net::HTTP.new(@uri.host, @uri.port)\n if id.nil?\n response = request.get(\"#{@uri.path}.xml\")\n else\n response = request.get(\"#{@uri.path}/#{id}.xml\")\n end\n\n response.body\n end", "def show\n @boat = Boat.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @boat }\n end\n end", "def index\n @criancas = Crianca.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @criancas }\n end\n end", "def index\n @aisles = Aisle.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @aisles }\n end\n end", "def index\n \n @bibliografias = Bibliografia.busqueda(params[:page], params[:generico], params[:buscar], 20)\n \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @bibliografias }\n end\n end", "def show\n @crianca = Crianca.find(params[:id])\n \n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @crianca }\n end\n end", "def show\n @bogey = Bogey.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @bogey }\n end\n end", "def show\n @bingo = Bingo.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @bingo }\n end\n end", "def index\n @bowls = Bowl.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @bowls }\n end\n end", "def new\n @bairro = Bairro.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @bairro }\n end\n end", "def index\n @acres = Acre.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @acres }\n end\n end", "def show\n @bom = Bom.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @bom }\n end\n end", "def index\n @aautos = Aauto.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @aautos }\n end\n end", "def list_books(api_object)\n puts \"Current Books:\"\n doc = Nokogiri::XML.parse api_object.read\n names = doc.xpath('books/book/title').collect {|e| e.text }\n puts names.join(\", \")\n puts \"\"\nend", "def index\n @tipo_lancamentos = TipoLancamento.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @tipo_lancamentos }\n end\n end", "def index\n @lancamentos = Lancamento.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @lancamentos }\n end\n end", "def index\n debugger\n @receitas = Receita.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @receitas }\n end\n end", "def show\n @news_blast = NewsBlast.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @news_blast }\n end\n end", "def index\n @avisos = Aviso.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @avisos }\n end\n end", "def index\n @asistencias = Asistencia.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @asistencias }\n end\n end", "def index\n @activos = Activo.all\n @marcas = Marca.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @activos }\n end\n end", "def show\n @browsenodeid = Browsenodeid.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @browsenodeid }\n end\n end", "def show\n @bios = Bio.find(:all, :order => :position)\n @bio = Bio.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @bio }\n end\n end", "def show\n @bibliografia = Bibliografia.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @bibliografia }\n end\n end", "def index\n @bets = Bet.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @bets }\n end\n end", "def show\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @abucket }\n end\n end", "def rest_get(uri)\n \n request = Net::HTTP::Get.new uri\n request.add_field(\"Accept\",\"application/xml\")\n auth_admin(request)\n \n Net::HTTP.start(uri.host, uri.port) do |http|\n response = http.request request\n response.value\n\n doc = REXML::Document.new response.body\n \n return doc\n \n end\n \nend", "def index\n @annees = Annee.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @annees }\n end\n end", "def show\n @analisis = Analisis.find(params[:id])\n\n respond_to do |format|\n format.xml { render :xml => @analisis }\n end\n end", "def show\n @bouncer = Bouncer.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @bouncer }\n end\n end", "def show\n @bdig = Bdig.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @bdig }\n end\n end", "def show\n @brothers = Brother.all\n @brother = Brother.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @brother }\n end\n end", "def show\n @roaster = Roaster.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @roaster }\n end\n end", "def index\n @bills = Bills.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @bills }\n end\n end", "def show\n @cabasiento = Cabasiento.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @cabasiento }\n end\n end", "def index\n @lieus = Lieu.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @lieus }\n end\n end", "def index\n respond_to do |format|\n format.html { render_template } # index.html.erb\n format.xml { \n @brands = Brand.all\n render xml: @brands \n }\n end\n end", "def index\n @datos = Dato.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @datos }\n end\n end", "def index\n @asambleistas = Asambleista.paginate(:per_page => 20, :page => params[:page])\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @asambleistas }\n format.print{ @asambleistas = Asambleista.find(:all); render :layout => 'imprimir' }\n end\n end", "def show\n @banana = Banana.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @banana }\n end\n end", "def show\n @bid = Bid.find(params[:id])\n\n respond_to do |format|\n format.html # show.rhtml\n format.xml { render :xml => @bid.to_xml }\n end\n end", "def show\n @reclamacao = Reclamacao.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @reclamacao }\n end\n end", "def show\n @abonne = Abonne.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @abonne }\n end\n end", "def xml(id)\n http.get(\"/nfse/#{id}/xml\") do |response|\n response.headers.fetch(\"Location\") { \"\" }\n end\n end", "def index\n @pagos = Pago.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @pagos }\n end\n end", "def index\n @news_blasts = NewsBlast.all\n \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @news_blasts }\n end\n end", "def show\n @bio = Bio.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @bio }\n end\n end", "def show\n @receita = Receita.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @receita }\n end\n end", "def show\n @receita = Receita.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @receita }\n end\n end", "def show\n @combustivel = Combustivel.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @combustivel }\n end\n end", "def index\n @compras = Compra.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @compras }\n end\n end", "def index\n @bonuses = Bonus.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @bonuses }\n end\n end", "def index\n @bosses = Boss.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @bosses }\n end\n end", "def show\n @pagamento = Pagamento.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @pagamento }\n end\n end", "def show\n @aplicacion = Aplicacion.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @aplicacion }\n end\n end", "def index\n @user_bills = UserBill.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @user_bills }\n end\n end", "def show\n @nostro = Nostro.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @nostro }\n end\n end", "def index\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @documentos }\n end\n end", "def index\n @tipo_restaurantes = TipoRestaurante.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @tipo_restaurantes }\n end\n end", "def index\n @books = Book.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @books }\n end\n end", "def index\n @books = Book.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @books }\n end\n end", "def show\n @compras_documento = ComprasDocumento.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @compras_documento }\n end\n end", "def show\n @feria2010observacion = Feria2010observacion.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @feria2010observacion }\n end\n end", "def index\n @bios = Bio.find(:all, :order => :position)\n @bio = @bios.first\n\n respond_to do |format|\n format.html { render :template => 'bios/show' }\n format.xml { render :xml => @bios }\n end\n end", "def index\n @ficha_tematicas = FichaTematica.busqueda(params[:page], params[:generico], params[:buscar])\n \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @ficha_tematicas }\n end\n end", "def show\n @carrera = Carrera.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @carrera }\n format.xml { render :xml => @carrera.to_xml }\n end\n end", "def show\n @distribuidora = Distribuidora.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @distribuidora }\n end\n end", "def show\n @reputacao_carona = ReputacaoCarona.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @reputacao_carona }\n end\n end", "def show\n @lancamento = Lancamento.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @lancamento }\n end\n end", "def show\n @brigade = Brigade.find(params[:id])\n @title = @brigade.name\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @brigade }\n end\n end", "def index\n @pagos = Pago.find(:all)\n \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @pagos }\n end\n end", "def show\n @liga_blaz_blue = LigaBlazBlue.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @liga_blaz_blue }\n end\n end", "def show\n @reclamo = Reclamo.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @reclamo }\n end\n end", "def index\n @nodes = Node.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @nodes }\n end\n end", "def index\n @nodes = Node.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @nodes }\n end\n end", "def show\r\n @razdel1 = Razdel1.find(params[:id])\r\n\r\n respond_to do |format|\r\n format.html # show.html.erb\r\n format.xml { render :xml => @razdel1 }\r\n end\r\n end", "def index\n @solicitudes = Solicitud.all\n \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @solicitudes }\n end\n end", "def index\n @paquetes = Paquete.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @paquetes }\n end\n end", "def show\n @burger = Burger.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @burger }\n end\n end", "def show\n @aisle = Aisle.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @aisle }\n end\n end", "def show\n @ambito = Ambito.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @ambito }\n end\n end", "def index\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @chronopay_links }\n end\n end", "def show\n @regiaos = Regiao.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @regiaos }\n end\n end", "def index\n @citas = Cita.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @citas }\n end\n end", "def show\n @bout = Bout.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @bout }\n end\n end" ]
[ "0.65553904", "0.64765036", "0.6409114", "0.63126993", "0.6283116", "0.6245994", "0.62356037", "0.62331575", "0.62306", "0.6182746", "0.61414367", "0.6128606", "0.6123555", "0.6078274", "0.60770947", "0.60497135", "0.60411954", "0.5990938", "0.5985811", "0.59728354", "0.5943101", "0.5941958", "0.5939262", "0.59288967", "0.5923602", "0.59165573", "0.591419", "0.59109426", "0.591047", "0.5909808", "0.59087676", "0.59059906", "0.59054744", "0.58955765", "0.58944386", "0.5890059", "0.58857083", "0.5883222", "0.5876657", "0.587256", "0.587117", "0.58683044", "0.58585125", "0.58548504", "0.5843575", "0.58426917", "0.5842107", "0.5841175", "0.58370674", "0.5829969", "0.5827223", "0.5825805", "0.58220327", "0.5818121", "0.58130383", "0.5809507", "0.58079916", "0.5803243", "0.57995206", "0.57993907", "0.5793393", "0.57922804", "0.5790365", "0.5790365", "0.57875496", "0.57844406", "0.57805717", "0.57772744", "0.5771596", "0.57705176", "0.5758578", "0.5755385", "0.5754308", "0.5754044", "0.5749351", "0.5749351", "0.5744263", "0.5732833", "0.5729307", "0.57286614", "0.57263094", "0.572555", "0.57235193", "0.57225287", "0.5720933", "0.5720793", "0.5719169", "0.57186985", "0.5717218", "0.5717218", "0.5710973", "0.57058364", "0.57004845", "0.5698893", "0.56984293", "0.5696187", "0.5694042", "0.5693999", "0.56915003", "0.56911206" ]
0.6797028
0
GET /bairros/new GET /bairros/new.xml
def new @bairro = Bairro.new respond_to do |format| format.html # new.html.erb format.xml { render :xml => @bairro } end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def new\n @barrio = Barrio.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @barrio }\n end\n end", "def new\n @nostro = Nostro.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @nostro }\n end\n end", "def new\n @node = Node.scopied.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @node }\n end\n end", "def new\n @asambleista = Asambleista.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @asambleista }\n end\n end", "def new\n @relatestagiario = Relatestagiario.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @relatestagiario }\n end\n end", "def new\n @bonificacion = Bonificacion.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @bonificacion }\n end\n end", "def new\n @recurso = Recurso.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @recurso }\n end\n end", "def new\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @nomina }\n end\n end", "def new\n @bairro_micro = BairroMicro.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @bairro_micro }\n end\n end", "def new\n @bap = Bap.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @bap }\n end\n end", "def new\n @crianca = Crianca.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @crianca }\n end\n end", "def new\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => new_vurl }\n end\n end", "def new\n @prueba = Prueba.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @prueba }\n end\n end", "def new\n @receita = Receita.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @receita }\n end\n end", "def new\n @pagamento = Pagamento.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @pagamento }\n end\n end", "def new\n @aplicacion = Aplicacion.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @aplicacion }\n end\n end", "def new\n @pagina = Pagina.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @pagina }\n end\n end", "def new\n @brother = Brother.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @brother }\n end\n end", "def new\n @bixo = Bixo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @bixo }\n end\n end", "def new\n @nossos_servico = NossosServico.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @nossos_servico }\n end\n end", "def new\n @remocao = Remocao.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @remocao }\n end\n end", "def new\n @roaster = Roaster.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @roaster }\n end\n end", "def new\n @book = Book.new :copies => 1\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @book }\n end\n end", "def new\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @instituto }\n end\n end", "def new\n @lien = Lien.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @lien }\n end\n end", "def new\n @lancamento = Lancamento.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @lancamento }\n end\n end", "def new\n @aviso = Aviso.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @aviso }\n end\n end", "def new\n @tipo_recibo = TipoRecibo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @tipo_recibo }\n end\n end", "def new\n @page = Page.new(:status => params[:from])\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @page }\n end\n end", "def new\n @reclamo = Reclamo.new\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @reclamo }\n end\n \n end", "def new\n @pagare = Pagare.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @pagare }\n end\n end", "def new\n @browsenodeid = Browsenodeid.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @browsenodeid }\n end\n end", "def new\n @abonne = Abonne.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @abonne }\n end\n end", "def new\n @carro = Carro.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @carro }\n end\n end", "def new\n @noami = Noami.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @noami }\n end\n end", "def new\n @regiaos = Regiao.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @regiaos }\n end\n end", "def new\n @aniversario = Aniversario.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @aniversario }\n end\n end", "def new\n @pessoa = Pessoa.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @pessoa }\n end\n end", "def new\n @pessoa = Pessoa.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @pessoa }\n end\n end", "def new\n @proceso = Proceso.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @proceso }\n end\n end", "def new\n @reclamacao = Reclamacao.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @reclamacao }\n end\n end", "def new\n @asistencia = Asistencia.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @asistencia }\n end\n end", "def new\n @nota = Nota.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @nota }\n end\n end", "def new\n @nota = Nota.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @nota }\n end\n end", "def new\n @ponto = Ponto.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @ponto }\n end\n end", "def new\n @bingo = Bingo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @bingo }\n end\n end", "def new\n @aauto = Aauto.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @aauto }\n end\n end", "def new\n @node = Node.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @node }\n end\n end", "def new\n @node = Node.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @node }\n end\n end", "def new\n @tpago = Tpago.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @tpago }\n end\n end", "def new\n respond_to do |format|\n format.html { render :layout => 'application' }\n format.xml { render :xml => @recommand }\n end\n end", "def new\n @Roc = Roc.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @Roc }\n end\n end", "def new\n @cita = Cita.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @cita }\n end\n end", "def new\n @cita = Cita.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @cita }\n end\n end", "def new\n @vestimenta = Vestimenta.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @vestimenta }\n end\n end", "def new\n @suministro = Suministro.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @suministro }\n end\n end", "def new\n @catena = Catena.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @catena }\n end\n end", "def new\n @peca = Peca.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @peca }\n end\n end", "def new\n @arrendamientosprorroga = Arrendamientosprorroga.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @arrendamientosprorroga }\n end\n end", "def new\n @page = Page.new\n\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @page }\n end\n end", "def new\n @banana = Banana.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @banana }\n end\n end", "def new\n @historico = Historico.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @historico }\n end\n end", "def new\n @norma = Norma.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @norma }\n end\n end", "def new\n @periodista = Periodista.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @periodista }\n end\n end", "def new\n @feria2010observacion = Feria2010observacion.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @feria2010observacion }\n end\n end", "def new\n @solicitud = Solicitud.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @solicitud }\n end\n end", "def new\n @aisle = Aisle.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @aisle }\n end\n end", "def new\n @movimiento = Movimiento.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @movimiento }\n end\n end", "def new\n @bibliografia = Bibliografia.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @bibliografia }\n end\n end", "def new\n @plantilla = Plantilla.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @plantilla }\n end\n end", "def new\n @precio = Precio.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @precio }\n end\n end", "def new\n @relatorios = Relatorio.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @relatorios }\n end\n end", "def create\n @bairro = Bairro.new(params[:bairro])\n\n respond_to do |format|\n if @bairro.save\n flash[:notice] = \"Bairro criado com sucesso.\"\n format.html { redirect_to(admin_bairros_path) }\n format.xml { render :xml => @bairro, :status => :created, :location => @bairro }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @bairro.errors, :status => :unprocessable_entity }\n end\n end\n end", "def new\n @pizarra = Pizarra.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @pizarra }\n end\n end", "def new\n @promocao = Promocao.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @promocao }\n end\n end", "def new\n @contrato = Contrato.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @contrato }\n end\n end", "def new\n @distribuidora = Distribuidora.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @distribuidora }\n end\n end", "def new\n @colo = Colo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @colo }\n end\n end", "def new\n @tiposproceso = Tiposproceso.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @tiposproceso }\n end\n end", "def new\n @silo = Silo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @silo }\n end\n end", "def new\n @reputacao_carona = ReputacaoCarona.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @reputacao_carona }\n end\n end", "def new\n @po = Po.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @po }\n end\n end", "def new\n @compras_documento = ComprasDocumento.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @compras_documento }\n end\n end", "def new\n @documento = Documento.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @documento }\n end\n end", "def new\n @page = Page.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @page }\n \n end\n end", "def new\n @protocolo = Protocolo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @protocolo }\n end\n end", "def new\n @coleccionista = Coleccionista.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @coleccionista }\n end\n end", "def new\n @zebra = Zebra.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @zebra }\n end\n end", "def new\n @page = Page.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @page }\n end\n end", "def new\n @page = Page.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @page }\n end\n end", "def new\n @page = Page.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @page }\n end\n end", "def new\n @page = Page.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @page }\n end\n end", "def new\n @page = Page.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @page }\n end\n end", "def new\n @page = Page.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @page }\n end\n end", "def new\n @tipo_lancamento = TipoLancamento.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @tipo_lancamento }\n end\n end", "def new\n @movimento = Movimento.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @movimento }\n end\n end", "def new\n @movimento = Movimento.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @movimento }\n end\n end", "def new\n @compra = Compra.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @compra }\n end\n end", "def new\n @cuenta = Cuenta.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @cuenta }\n end\n end", "def new\n @cuenta = Cuenta.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @cuenta }\n end\n end" ]
[ "0.70185375", "0.70065886", "0.69854504", "0.69701964", "0.69656986", "0.6926386", "0.6883337", "0.68671656", "0.6852459", "0.68497086", "0.6839413", "0.6831075", "0.6807439", "0.6787974", "0.67793375", "0.67761993", "0.67753637", "0.6768397", "0.67658836", "0.67598534", "0.675444", "0.67492867", "0.6744281", "0.67426014", "0.6732878", "0.6729682", "0.6726391", "0.67261356", "0.6713183", "0.670961", "0.67092407", "0.6691566", "0.66863185", "0.668549", "0.6682689", "0.667477", "0.66725004", "0.6671901", "0.6671901", "0.66701853", "0.66678166", "0.6666325", "0.6665426", "0.6665426", "0.6650469", "0.6645134", "0.6642463", "0.6639221", "0.6639221", "0.6637411", "0.66360795", "0.66340005", "0.66337013", "0.66337013", "0.66334957", "0.6630803", "0.66255337", "0.6622712", "0.6619544", "0.6617698", "0.6612385", "0.66120046", "0.66042024", "0.6601937", "0.66012007", "0.6599188", "0.65971833", "0.65933347", "0.6590722", "0.6590486", "0.65862453", "0.65842915", "0.6580471", "0.65794265", "0.6579008", "0.6578533", "0.6576241", "0.657451", "0.6571777", "0.6571492", "0.65693665", "0.65664893", "0.6565681", "0.65623796", "0.6560618", "0.65567356", "0.65565616", "0.6549381", "0.6545726", "0.6545726", "0.6545726", "0.6545726", "0.6545726", "0.6545726", "0.654492", "0.6544828", "0.6544828", "0.6539068", "0.65384746", "0.65384746" ]
0.73753154
0
POST /bairros POST /bairros.xml
def create @bairro = Bairro.new(params[:bairro]) respond_to do |format| if @bairro.save flash[:notice] = "Bairro criado com sucesso." format.html { redirect_to(admin_bairros_path) } format.xml { render :xml => @bairro, :status => :created, :location => @bairro } else format.html { render :action => "new" } format.xml { render :xml => @bairro.errors, :status => :unprocessable_entity } end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def post(xmldoc)\n headers = {'Content-Type' => 'text/xml'}\n check_response( @httpcli.post(@endpoint, xmldoc, headers) )\n end", "def post(body)\n http = Net::HTTP.new(uri.host, uri.port)\n http.use_ssl = true if uri.scheme == 'https'\n\n request = Net::HTTP::Post.new(uri)\n request['Content-Type'] = 'text/xml'\n request['Accept-Language'] = locale if locale\n request.body = body\n\n response = http.request(request)\n\n Response.new(response, uri)\n end", "def create\n @bairro_micro = BairroMicro.new(params[:bairro_micro])\n\n respond_to do |format|\n if @bairro_micro.save\n flash[:notice] = 'BairroMicro was successfully created.'\n format.html { redirect_to(@bairro_micro) }\n format.xml { render :xml => @bairro_micro, :status => :created, :location => @bairro_micro }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @bairro_micro.errors, :status => :unprocessable_entity }\n end\n end\n end", "def post(uri, xml)\r\n req = Net::HTTP::Post.new(uri)\r\n req[\"content-type\"] = \"application/xml\"\r\n req.body = xml\r\n request(req)\r\n end", "def post(body)\n request = Net::HTTP::Post.new(bind_uri)\n request.body = body\n request.content_length = request.body.size\n request[\"Content-Type\"] = \"text/xml; charset=utf-8\"\n\n Jabber.debug(\"Sending POST request - #{body.strip}\")\n\n response = Net::HTTP.new(domain, port).start { |http| http.request(request) }\n\n Jabber.debug(\"Receiving POST response - #{response.code}: #{response.body.inspect}\")\n\n unless response.is_a?(Net::HTTPSuccess)\n raise Net::HTTPBadResponse, \"Net::HTTPSuccess expected, but #{response.class} was received\"\n end\n\n response\n end", "def create(name=\"Default Name\", age=\"50\")\r\n xml_req =\r\n \"<?xml version='1.0' encoding='UTF-8'?>\r\n <person>\r\n <name>#{name}</name>\r\n <age>#{age}</age>\r\n </person>\"\r\n \r\n request = Net::HTTP::Post.new(@url)\r\n request.add_field \"Content-Type\", \"application/xml\"\r\n request.body = xml_req\r\n \r\n http = Net::HTTP.new(@uri.host, @uri.port)\r\n response = http.request(request)\r\n response.body \r\n end", "def create(bin_params)\n @rest.post('save', bin_params)\n end", "def create\n @bairro = Bairro.new(bairro_params)\n\n respond_to do |format|\n if @bairro.save\n format.html { redirect_to @bairro, notice: 'Bairro criado com sucesso.' }\n format.json { render :show, status: :created, location: @bairro }\n else\n format.html { render :new }\n format.json { render json: @bairro.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n doc = Nokogiri::XML(request.body.read)\n bNode = doc.xpath('elwak/benutzer')\n\n @benutzer = Benutzer.new(benutzer_params(bNode))\n if @benutzer.save\n if bNode.xpath('objekt_zuordnungs').length > 0\n objekt_ids = bNode.xpath('objekt_zuordnungs/objekt_id').map{|oz| oz.text.to_s.to_i}\n @benutzer.setze_objekt_zuordnungen(objekt_ids)\n end\n success(@benutzer.id)\n else\n error(@benutzer.errors)\n end\n end", "def create\n doc = Nokogiri::XML(request.body.read)\n cvNode = doc.xpath('elwak/checklisten_vorlage')\n cv = ChecklistenVorlage.new({\n objekt_id: cvNode.xpath('objekt_id').text.to_s, \n bezeichner: cvNode.xpath('bezeichner').text.to_s, \n version: cvNode.xpath('version').text.to_s.to_i, \n inaktiv: cvNode.xpath('inaktiv').text.to_s.to_bool \n })\n cv.save\n\n cvNode.xpath('checklisten_eintrags/checklisten_eintrag').each do |ceNode|\n ce = ChecklistenEintrag.new({\n checklisten_vorlage_id: cv.id,\n bezeichner: ceNode.xpath('bezeichner').text.to_s,\n was: ceNode.xpath('was').text.to_s,\n wann: ceNode.xpath('wann').text.to_s,\n typ: ceNode.xpath('typ').text.to_s.to_i,\n position: ceNode.xpath('position').text.to_s.to_i\n })\n ce.save\n end\n\n respond_to do |format|\n format.xml {render :xml => '<?xml version=\"1.0\" encoding=\"UTF-8\"?><success />'}\n end\n end", "def create\n @barrio = Barrio.new(params[:barrio])\n\n respond_to do |format|\n if @barrio.save\n format.html { redirect_to(@barrio, :notice => 'Barrio was successfully created.') }\n format.xml { render :xml => @barrio, :status => :created, :location => @barrio }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @barrio.errors, :status => :unprocessable_entity }\n end\n end\n end", "def send_post(data_xml,url)\r\n result = @client.post(self.target_uri(url), :body => data_xml , :head => {'Content-Type' => 'application/xml'} ) \r\n raise \"Invalid status #{result.http_status} from server #{@host}:#{@port}\" if(result.http_status != '200') \r\n #reply = Reply.from_xml(result.http_body)\r\n if block_given?\r\n yield(result.http_body)\r\n else\r\n result.http_body\r\n end\r\n end", "def request_body(xml); xml; end", "def create\n @bonificacion = Bonificacion.new(params[:bonificacion])\n\n respond_to do |format|\n if @bonificacion.save\n format.html { redirect_to(@bonificacion, :notice => 'Bonificacion was successfully created.') }\n format.xml { render :xml => @bonificacion, :status => :created, :location => @bonificacion }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @bonificacion.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @selecciones = Seleccion.where(\"cliente_id = ?\",usuario_actual.id)\n @peso_total = Seleccion.peso_total(usuario_actual.id)\n @precio_total = Seleccion.precio_total(usuario_actual.id)\n @tarjetas = usuario_actual.tdc\n \n #Cobro en el banco\n client = Savon::Client.new(\"http://localhost:3001/servicios/wsdl\")\n tdc = Tarjeta.where(\"id = ? AND cliente_id = ?\",params[:orden][:tarjeta_id],usuario_actual.id)\n total_pagar = params[:orden][:total]\n pago = '<Message>\n <Request>\n <numero_tdc>'+tdc.numero+'</numero_tdc>\n <nombre_tarjetahabiente>'+tdc.tarjetahabiente+'</nombre_tarjetahabiente>\n <fecha_vencimiento>'+tdc.mes_vencimiento+'/'+tdc.ano_vencimiento+'</fecha_vencimiento>\n <codigo_seguridad>'+tdc.codigo+'</codigo_seguridad>\n <tipo_tarjeta>'+tdc.tipo+'</tipo_tarjeta>\n <direccion_cobro>'+tdc.direccion+'</direccion_cobro>\n <total_pagar>'+total_pagar+'</total_pagar>\n <cuenta_receptora>'+cuenta_receptora+'</cuenta_receptora>\n </Request>\n </Message>'\n #response = client.request :verificar_pago, body: { \"value\" => pago } \n #if response.success?\n # data = response.to_hash[:verificar_pago_response][:value][:response].first\n # @respuesta = XmlSimple.xml_in(data)\n #end\n\n #NAMESPACE = 'pagotdc'\n #URL = 'http://localhost:8080/'\n #banco = SOAP::RPC::Driver.new(URL, NAMESPACE)\n #banco.add_method('verificar_pago', 'numero_tdc', 'nombre_tarjetahabiente', 'fecha_vencimiento', 'codigo_seguridad', 'tipo_tarjeta', 'direccion_cobro', 'total_pagar', 'cuenta_receptora')\n #\n \n #respuesta = banco.verificar_pago(tdc.numero, tdc.tarjetahabiente, tdc.mes_vencimiento.to_s+'/'+tdc.ano_vencimiento.to_s, tdc.codigo, tdc.tipo, params[:orden][:total], tdc.direccion)\n \n if true #respuesta.ack.eql?(0)\n params[:orden][:cliente_id] = usuario_actual.id\n params[:orden][:total] = Seleccion.precio_total(usuario_actual.id)\n params[:orden][:fecha_entrega] = \"0000-00-00\"\n @orden = Orden.new(params[:orden])\n \n if @orden.save\n @selecciones = Seleccion.where(\"cliente_id = ?\",usuario_actual.id)\n @selecciones.each do |seleccion|\n p = Producto.find(seleccion.producto_id)\n @venta = Venta.new(:producto_id=>p.id, \n :orden_id=>@orden.id,\n :categoria_id=>p.categoria_id, \n :cantidad=>seleccion.cantidad,\n :costo=>p.precio)\n @venta.save\n end\n \n Seleccion.vaciar_carro(usuario_actual.id)\n respond_to do |format|\n format.html { redirect_to ver_ordenes_path, notice: 'Orden generada correctamente.' }\n end\n else\n respond_to do |format|\n format.html { render action: \"new\" }\n end\n end\n else\n respond_to do |format|\n format.html { render action: \"new\", notice: respuesta.mensaje }\n end\n end\n end", "def create(name=\"Default name\")\n xml_req =\n \"<?xml version='1.0' encoding='UTF-8'?>\n <customer>\n <name>#{name}</name>\n </customer>\"\n\n request = Net::HTTP::Post.new(@url)\n request.add_field \"Content-Type\", \"application/xml\"\n request.body = xml_req\n\n http = Net::HTTP.new(@uri.host, @uri.port)\n response = http.request(request)\n\n response.body\n end", "def post(buffer)\n connection.post(\"#{configuration.path}/update\", buffer, {'Content-type' => 'text/xml;charset=utf-8'})\n end", "def post_headers\n {\"Content-Type\" => 'text/xml; charset=utf-8'}\n end", "def create\n @browsenodeid = Browsenodeid.new(browsenodeid_params)\n\n respond_to do |format|\n if @browsenodeid.save\n format.html { redirect_to(@browsenodeid, :notice => 'Browsenodeid was successfully created.') }\n format.xml { render :xml => @browsenodeid, :status => :created, :location => @browsenodeid }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @browsenodeid.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @bienvenido = Bienvenido.new(bienvenido_params)\n\n respond_to do |format|\n if @bienvenido.save\n format.html { redirect_to @bienvenido, notice: 'Bienvenido was successfully created.' }\n format.json { render :show, status: :created, location: @bienvenido }\n else\n format.html { render :new }\n format.json { render json: @bienvenido.errors, status: :unprocessable_entity }\n end\n end\n end", "def send_request( xml )\n write( xml )\n read\n end", "def post_nodes_with_root\n serialize_service.post_nodes_serialized\n end", "def create\n @busca = Busca.new(busca_params)\n\n respond_to do |format|\n if @busca.save\n format.html { redirect_to @busca, notice: 'Busca was successfully created.' }\n format.json { render :show, status: :created, location: @busca }\n else\n format.html { render :new }\n format.json { render json: @busca.errors, status: :unprocessable_entity }\n end\n end\n end", "def post_business(business, location)\n xml = Builder::XmlMarkup.new\n query = xml.tag!(\"BPMSPost\", 'Edition' => \"1.1\") {\n xml.tag!(\"Record\") {\n xml.tag!(\"Phone\", location.phone)\n xml.tag!(\"BusinessName\", location.location_name)\n xml.tag!(\"Address\", location.address)\n xml.tag!(\"City\", location.city)\n xml.tag!(\"State\", location.state)\n xml.tag!(\"Zip\", location.zip)\n xml.tag!(\"URL\", location.website_url)\n xml.tag!(\"TagLine\", location.special_offer)\n #xml.tag!(\"LogoImage\", location.logo)\n xml.tag!(\"Categories\") {\n xml.tag!(\"Category\") {\n xml.tag!(\"Type\", \"Primary\")\n xml.tag!(\"Name\", business.industry_primary)\n }\n if business.industry_alt_1.present?\n xml.tag!(\"Category\") {\n xml.tag!(\"Type\", \"Alt1\")\n xml.tag!(\"Name\", business.industry_alt_1)\n }\n end\n if business.industry_alt_2.present?\n xml.tag!(\"Category\") {\n xml.tag!(\"Type\", \"Alt2\")\n xml.tag!(\"Name\", business.industry_alt_2)\n }\n end\n }\n }\n }\n body = build_request(3700, 1510, query)\n response = send_to_localeze(body)\n xml_doc = respond_with_hash(Nokogiri::XML(response.to_xml).text)\n xml_doc['Error'] == '0' # success (returns true/false)\n end", "def post\n response = HTTParty.post(servlet_url,\n :body => to_xml,\n :headers => { 'Content-Type' => 'application/xml' }\n ).response\n\n return Dhl::Shipment::Response.new(response.body)\n rescue Exception => e\n request_xml = if @to_xml.to_s.size>0\n @to_xml\n else\n '<not generated at time of error>'\n end\n\n response_body = if (response && response.body && response.body.to_s.size > 0)\n response.body\n else\n '<not received at time of error>'\n end\n\n log_level = if e.respond_to?(:log_level)\n e.log_level\n else\n :critical\n end\n\n log_request_and_response_xml(log_level, e, request_xml, response_body )\n raise e\n end", "def api_gateway_post(path, params)\n api_gateway_body_fwd = params.to_json\n rack_input = StringIO.new(api_gateway_body_fwd)\n\n post path, real_params = {}, 'rack.input' => rack_input\nend", "def create\n @asambleista = Asambleista.new(params[:asambleista])\n\n respond_to do |format|\n if @asambleista.save\n flash[:notice] = 'Asambleista se ha creado con exito.'\n format.html { redirect_to(admin_asambleistas_path) }\n format.xml { render :xml => @asambleista, :status => :created, :location => @asambleista }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @asambleista.errors, :status => :unprocessable_entity }\n end\n end\n end", "def post_config(url_prefix, xml)\n post_data(url_prefix, xml, 'application/xml;charset=UTF-8')\n end", "def post(path, params={})\n signature_params = params.values.any?{|value| value.respond_to?(:to_io)} ? {} : params\n request(:post, path, params.to_xml, signature_params)\n end", "def post\n Rentlinx.client.post(self)\n end", "def POST; end", "def api_gateway_post(path, params)\n api_gateway_body_fwd = params.to_json\n rack_input = StringIO.new(api_gateway_body_fwd)\n\n post path, real_params = {}, {\"rack.input\" => rack_input}\nend", "def post(method, params = {})\n url = make_url method, params\n query = url.query\n url.query = nil\n\n req = Net::HTTP::Post.new url.path\n req.body = query\n req.content_type = 'application/x-www-form-urlencoded'\n\n res = Net::HTTP.start url.host, url.port do |http|\n http.request req\n end\n\n xml = Nokogiri::XML(res.body, nil, nil, 0)\n\n check_error xml\n\n parse_response xml\n rescue SystemCallError, SocketError, Timeout::Error, IOError,\n Nokogiri::XML::SyntaxError => e\n raise CommunicationError.new(e)\n rescue Net::HTTPError => e\n xml = Nokogiri::XML(e.res.body) { |cfg| cfg.strict }\n check_error xml\n raise CommunicationError.new(e)\n end", "def create\n @brodo = Brodo.new(brodo_params)\n\n respond_to do |format|\n if @brodo.save\n format.html { redirect_to @brodo, notice: 'Brodo was successfully created.' }\n format.json { render :show, status: :created, location: @brodo }\n else\n format.html { render :new }\n format.json { render json: @brodo.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @intranet_boleto = Intranet::Boleto.new(intranet_boleto_params)\n\n respond_to do |format|\n if @intranet_boleto.save\n format.html { redirect_to @intranet_boleto, notice: \"Boleto was successfully created.\" }\n format.json { render :show, status: :created, location: @intranet_boleto }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @intranet_boleto.errors, status: :unprocessable_entity }\n end\n end\n end", "def post_query( xml )\n url = URI.parse( self.url )\n response = self.http.post_form( url, { \"query\" => xml } )\n return response.body\n end", "def create_jenkins_job(name, xml_file)\n create_url = \"http://#{Pkg::Config.jenkins_build_host}/createItem?name=#{name}\"\n form_args = [\"-H\", '\"Content-Type: application/xml\"', \"--data-binary\", \"@#{xml_file}\"]\n curl_form_data(create_url, form_args)\n \"http://#{Pkg::Config.jenkins_build_host}/job/#{name}\"\nend", "def http_post(request, response)\n path = request.path\n\n # Only handling xml\n content_type = request.header('Content-Type')\n return nil unless content_type.index('application/xml') || content_type.index('text/xml')\n\n # Making sure the node exists\n begin\n node = @server.tree.node_for_path(path)\n rescue Dav::Exception::NotFound\n return nil\n end\n\n request_body = request.body_as_string\n\n # If this request handler could not deal with this POST request, it\n # will return 'null' and other plugins get a chance to handle the\n # request.\n #\n # However, we already requested the full body. This is a problem,\n # because a body can only be read once. This is why we preemptively\n # re-populated the request body with the existing data.\n request.body = request_body\n\n document_type_box = Box.new('')\n message = @server.xml.parse(request_body, request.url, document_type_box)\n document_type = document_type_box.value\n\n case document_type\n # Dealing with the 'share' document, which modified invitees on a\n # calendar.\n when \"{#{Plugin::NS_CALENDARSERVER}}share\"\n # We can only deal with IShareableCalendar objects\n return true unless node.is_a?(IShareableCalendar)\n\n @server.transaction_type = 'post-calendar-share'\n\n # Getting ACL info\n acl = @server.plugin('acl')\n\n # If there's no ACL support, we allow everything\n acl.check_privileges(path, '{DAV:}write') if acl\n\n node.update_shares(message.set, message.remove)\n\n response.status = 200\n # Adding this because sending a response body may cause issues,\n # and I wanted some type of indicator the response was handled.\n response.update_header('X-Sabre-Status', 'everything-went-well')\n\n # Breaking the event chain\n return false\n # The invite-reply document is sent when the user replies to an\n # invitation of a calendar share.\n when \"{#{Plugin::NS_CALENDARSERVER}}invite-reply\"\n\n # This only works on the calendar-home-root node.\n return true unless node.is_a?(CalendarHome)\n\n @server.transaction_type = 'post-invite-reply'\n\n # Getting ACL info\n acl = @server.plugin('acl')\n\n # If there's no ACL support, we allow everything\n acl.check_privileges(path, '{DAV:}write') if acl\n\n url = node.share_reply(\n message.href,\n message.status,\n message.calendar_uri,\n message.in_reply_to,\n message.summary\n )\n\n response.status = 200\n # Adding this because sending a response body may cause issues,\n # and I wanted some type of indicator the response was handled.\n response.update_header('X-Sabre-Status', 'everything-went-well')\n\n if url\n writer = @server.xml.writer\n writer.open_memory\n writer.start_document\n writer.start_element(\"{#{Plugin::NS_CALENDARSERVER}}shared-as\")\n writer.write(Dav::Xml::Property::Href.new(url))\n writer.end_element\n response.update_header('Content-Type', 'application/xml')\n response.body = writer.output_memory\n end\n\n # Breaking the event chain\n return false\n when \"{#{Plugin::NS_CALENDARSERVER}}publish-calendar\"\n # We can only deal with IShareableCalendar objects\n return true unless node.is_a?(IShareableCalendar)\n\n @server.transaction_type = 'post-publish-calendar'\n\n # Getting ACL info\n acl = @server.plugin('acl')\n\n # If there's no ACL support, we allow everything\n acl.check_privileges(path, '{DAV:}write') if acl\n\n node.publish_status = true\n\n # iCloud sends back the 202, so we will too.\n response.status = 202\n\n # Adding this because sending a response body may cause issues,\n # and I wanted some type of indicator the response was handled.\n response.update_header('X-Sabre-Status', 'everything-went-well')\n\n # Breaking the event chain\n return false\n when \"{#{Plugin::NS_CALENDARSERVER}}unpublish-calendar\"\n # We can only deal with IShareableCalendar objects\n return true unless node.is_a?(IShareableCalendar)\n\n @server.transaction_type = 'post-unpublish-calendar'\n\n # Getting ACL info\n acl = @server.plugin('acl')\n\n # If there's no ACL support, we allow everything\n acl.check_privileges(path, '{DAV:}write') if acl\n\n node.publish_status = false\n\n response.status = 200\n\n # Adding this because sending a response body may cause issues,\n # and I wanted some type of indicator the response was handled.\n response.update_header('X-Sabre-Status', 'everything-went-well')\n\n # Breaking the event chain\n return false\n end\n end", "def create\n megam_rest.post_node(to_hash)\n end", "def post_xml(url, ls_data)\n uri = URI.parse(url)\n request = Net::HTTP::Post.new(uri.request_uri, HEADER_XML)\n request.body = ls_data\n request.basic_auth(@nsx_user, @nsx_password)\n response = Net::HTTP.start(uri.host, uri.port, :use_ssl => true,\n :verify_mode => OpenSSL::SSL::VERIFY_NONE) do |https|\n https.request(request)\n end\n return response.body if check_response(response, 201)\n end", "def create\n @boredom = Boredom.new(boredom_params)\n\n respond_to do |format|\n if @boredom.save\n format.html { redirect_to @boredom, notice: 'Boredom was successfully created.' }\n format.json { render :show, status: :created, location: @boredom }\n else\n format.html { render :new }\n format.json { render json: @boredom.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @bairro = Bairro.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @bairro }\n end\n end", "def submit_order()\n\tputs \"Submitting order\"\n\tdata = create_order()\n\tresponse = request_post(\"/api/order\", data)\n\tputs response.body\nend", "def request(path, parameters = {})\n response = Basecamp.connection.post(path, convert_body(parameters), \"Content-Type\" => content_type)\n\n if response.code.to_i / 100 == 2\n result = XmlSimple.xml_in(response.body, 'keeproot' => true, 'contentkey' => '__content__', 'forcecontent' => true)\n typecast_value(result)\n else\n raise \"#{response.message} (#{response.code})\"\n end\n end", "def test_should_create_post_via_API_XML\r\n get \"/logout\"\r\n post \"/forum_posts.xml\", :api_key=>'testapikey',\r\n :forum_post => {:title=>'API Test Post',\r\n :body=>'Test Post desc',\r\n :user_id=>1}\r\n assert_response :created\r\n end", "def create\n @brite_rt_waxman = BriteRtWaxman.new(brite_rt_waxman_params)\n\n respond_to do |format|\n if @brite_rt_waxman.save\n format.html { redirect_to @brite_rt_waxman, notice: 'Brite rt waxman was successfully created.' }\n format.json { render :show, status: :created, location: @brite_rt_waxman }\n else\n format.html { render :new }\n format.json { render json: @brite_rt_waxman.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @tipo_recibo = TipoRecibo.new(params[:tipo_recibo])\n\n respond_to do |format|\n if @tipo_recibo.save\n format.html { redirect_to(@tipo_recibo, :notice => 'Tipo recibo was successfully created.') }\n format.xml { render :xml => @tipo_recibo, :status => :created, :location => @tipo_recibo }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @tipo_recibo.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @bodega = Bodega.new(bodega_params)\n\n respond_to do |format|\n if @bodega.save\n actualizar\n format.html { redirect_to @bodega, notice: 'Bodega fue creada exitosamente' }\n format.json { render :show, status: :created, location: @bodega }\n else\n format.html { render :new }\n format.json { render json: @bodega.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @brite_td_asbarabasi_rt_waxman = BriteTdAsbarabasiRtWaxman.new(brite_td_asbarabasi_rt_waxman_params)\n\n respond_to do |format|\n if @brite_td_asbarabasi_rt_waxman.save\n format.html { redirect_to @brite_td_asbarabasi_rt_waxman, notice: 'Brite td asbarabasi rt waxman was successfully created.' }\n format.json { render :show, status: :created, location: @brite_td_asbarabasi_rt_waxman }\n else\n format.html { render :new }\n format.json { render json: @brite_td_asbarabasi_rt_waxman.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @bloque = Bloque.new(params[:bloque])\n\n if @bloque.save\n render json: @bloque, status: :created, location: @bloque\n else\n render json: @bloque.errors, status: :unprocessable_entity\n end\n end", "def body\n xml = Builder::XmlMarkup.new( :indent => 2 )\n xml.instruct! :xml, :encoding => \"UTF-8\"\n xml.dataroot do |dataroot|\n dataroot.typvyplatneho options.postage_type\n dataroot.typtisku options.print_type\n dataroot.tiskpoukazky options.coupon_type\n dataroot.typods options.sender_type\n dataroot.typadr options.recipient_type\n dataroot.odsid nil\n dataroot.odsobrazek nil\n dataroot.odsfirma nil\n dataroot.odsosoba nil\n dataroot.odsulice nil\n dataroot.odscp nil\n dataroot.odsco nil\n dataroot.odsobec nil\n dataroot.odspsc nil\n dataroot.adrosloveni nil\n dataroot.adrfirma nil\n dataroot.adrosoba nil\n dataroot.adrulice nil\n dataroot.adrcp nil\n dataroot.adrco nil\n dataroot.adrobec nil\n dataroot.adrpsc nil\n dataroot.adriso nil\n dataroot.soubory do |soubory|\n soubory.soubor(:mimeType => \"\", :name => @pdf_file_path.split(\"/\").last) do |soubor|\n soubor.dataSoubor Base64.encode64(File.read(@pdf_file_path))\n end\n end\n end\n\n xml_file = Tempfile.new([\"DopisOnlineNew_1_1\", \".xml\"])\n xml_file.write xml.target!\n xml_file.flush\n xml_file.rewind\n\n {\n :user => DopisOnlineClient.username,\n :password => DopisOnlineClient.password,\n :soubor => xml_file\n }\n end", "def create\n @bordado = Bordado.new(params[:bordado])\n\n respond_to do |format|\n if @bordado.save\n format.html { redirect_to @bordado, notice: 'Bordado was successfully created.' }\n format.json { render json: @bordado, status: :created, location: @bordado }\n else\n format.html { render action: \"new\" }\n format.json { render json: @bordado.errors, status: :unprocessable_entity }\n end\n end\n end", "def post_data(body)\r\n raise ConfigError, 'no json_records' if body.empty?\r\n # Create REST request header\r\n header = get_header(body.bytesize)\r\n # Post REST request \r\n response = RestClient.post(@uri, body, header)\r\n\r\n return response\r\n end", "def create\n @barrio = Barrio.new(params[:barrio])\n\n respond_to do |format|\n if @barrio.save\n format.html { redirect_to @barrio, :notice => 'Barrio ha sido creado.' }\n format.json { render :json => @barrio, :status => :created, :location => @barrio }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @barrio.errors, :status => :unprocessable_entity }\n end\n end\n end", "def post_request(params, useSSL=false)\n # get a server handle\n port = (useSSL == true) ? 443 : 80\n http_server = Net::HTTP.new(API_HOST, port)\n http_server.use_ssl = useSSL\n \n # build a request\n http_request = Net::HTTP::Post.new(API_PATH_REST)\n http_request.form_data = params\n \n # get the response XML\n return http_server.start{|http| http.request(http_request)}.body\n end", "def create\n @bonificacion = Bonificacion.new(bonificacion_params)\n\n respond_to do |format|\n if @bonificacion.save\n format.html { redirect_to @bonificacion, notice: 'Bonificación fue creado exitosamente.' }\n format.json { render :show, status: :created, location: @bonificacion }\n else\n format.html { render :new }\n format.json { render json: @bonificacion.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @registro_bovino = RegistroBovino.new(params[:registro_bovino])\n\n respond_to do |format|\n if @registro_bovino.save\n #format.html { redirect_to @registro_bovino, notice: 'Registro bovino was successfully created.' }\n format.html { redirect_to action: \"index\" }\n format.json { render json: @registro_bovino, status: :created, location: @registro_bovino }\n else\n format.html { render action: \"new\" }\n format.json { render json: @registro_bovino.errors, status: :unprocessable_entity }\n end \n end\n end", "def create\n @relatestagiario = Relatestagiario.new(params[:relatestagiario])\n\n respond_to do |format|\n if @relatestagiario.save\n flash[:notice] = 'RELATÓRIO SALVO COM SUCESSO.'\n format.html { redirect_to(@relatestagiario) }\n format.xml { render :xml => @relatestagiario, :status => :created, :location => @relatestagiario }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @relatestagiario.errors, :status => :unprocessable_entity }\n end\n end\n end", "def process_request\n @build_xml = build_xml\n pickup_serv_log('Final XML Request : ' + @build_xml.to_s)\n api_url_srv = api_url + \"/pickup\"\n pickup_serv_log('URL for API : ' + api_url_srv.to_s)\n api_response = self.class.post(api_url_srv, :body => build_xml)\n pickup_serv_log('API Response : ' + api_response.to_s)\n puts api_response if @debug\n response = parse_response(api_response)\n if success?(response)\n pickup_serv_log('Successfully Done : ' + response.to_s)\n success_response(api_response, response)\n else\n failure_response(api_response, response)\n end\n end", "def post(path, params={})\n RestClient.post request_base+path, params\n end", "def create\n @boc = Boc.new(boc_params)\n\n respond_to do |format|\n if @boc.save\n format.html { redirect_to new_boc_path, notice: 'Boc was successfully created.' }\n format.json { render :show, status: :created, location: @boc }\n else\n format.html { render :new }\n format.json { render json: @boc.errors, status: :unprocessable_entity }\n end\n end\n end", "def build()\n response = send_post_request(@xml_api_build_path, {:delay => '0sec'})\n response.is_a?(Net::HTTPSuccess) or response.is_a?(Net::HTTPRedirection)\n end", "def submit\n obj = ActiveSupport::Inflector.constantize(self.class.to_s.gsub(/Builder/, ''))\n obj.new(Client.post_xml_request(to_s))\n end", "def create\n @reclamacao = Reclamacao.new(params[:reclamacao])\n\n respond_to do |format|\n if @reclamacao.save\n format.html { redirect_to(@reclamacao, :notice => 'Reclamacao was successfully created.') }\n format.xml { render :xml => @reclamacao, :status => :created, :location => @reclamacao }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @reclamacao.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create(params)\n\nxml =<<XML\n<entry xmlns=\"http://purl.org/atom/ns#\">\n <title>#{params[:title]}</title>\n <link rel=\"related\" type=\"text/html\" href=\"#{params[:url]}\" />\n <summary type=\"text/plain\">#{params[:comment]}</summary>\n</entry>\nXML\n\n post('/post', xml)\n end", "def create\n @bnpb = Bnpb.new(bnpb_params)\n\n respond_to do |format|\n if @bnpb.save\n format.html { redirect_to @bnpb, notice: 'Bnpb was successfully created.' }\n format.json { render :show, status: :created, location: @bnpb }\n else\n format.html { render :new }\n format.json { render json: @bnpb.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @borad = Borad.new(params[:borad])\n\n respond_to do |format|\n if @borad.save\n format.html { redirect_to @borad, :notice => 'Borad was successfully created.' }\n format.json { render :json => @borad, :status => :created, :location => @borad }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @borad.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @brother = Brother.new(params[:brother])\n\n respond_to do |format|\n if @brother.save\n flash[:notice] = 'Brother was successfully created.'\n format.html { render :back }\n format.xml { render :xml => @brother, :status => :created, :location => @brother }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @brother.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @bonificacion = Bonificacion.new(bonificacion_params)\n\n respond_to do |format|\n if @bonificacion.save\n format.html { redirect_to @bonificacion, notice: 'Bonificacion was successfully created.' }\n format.json { render :show, status: :created, location: @bonificacion }\n else\n format.html { render :new }\n format.json { render json: @bonificacion.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @bonificacion = Bonificacion.new(bonificacion_params)\n\n respond_to do |format|\n if @bonificacion.save\n format.html { redirect_to @bonificacion, notice: 'Bonificacion was successfully created.' }\n format.json { render :show, status: :created, location: @bonificacion }\n else\n format.html { render :new }\n format.json { render json: @bonificacion.errors, status: :unprocessable_entity }\n end\n end\n end", "def bienvenido_params\n params.require(:bienvenido).permit(:name, :usuario)\n end", "def to_xml\n http.body\n end", "def create\n @bom = Bom.new(params[:bom])\n\n respond_to do |format|\n if @bom.save\n format.html { redirect_to(@bom, :notice => 'Bom was successfully created.') }\n format.xml { render :xml => @bom, :status => :created, :location => @bom }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @bom.errors, :status => :unprocessable_entity }\n end\n end\n end", "def post(path, parameters = {})\n request(:post, path, parameters)\n end", "def post_check(excon, body)\n excon.request(\n method: :post,\n path: '/check',\n headers: { 'Content-Type' => 'application/json' },\n body: body\n )\nend", "def post\n resource.post(request, response)\n end", "def create\n @bolsa_trabajo = BolsaTrabajo.new(bolsa_trabajo_params)\n\n respond_to do |format|\n if @bolsa_trabajo.save\n format.html { redirect_to @bolsa_trabajo, notice: 'Bolsa trabajo was successfully created.' }\n format.json { render :show, status: :created, location: @bolsa_trabajo }\n else\n format.html { render :new }\n format.json { render json: @bolsa_trabajo.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @bingo = Bingo.new(params[:bingo])\n\n respond_to do |format|\n if @bingo.save\n format.html { redirect_to(@bingo, :notice => 'Bingo was successfully created.') }\n format.xml { render :xml => @bingo, :status => :created, :location => @bingo }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @bingo.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @bloqueo = Bloqueo.new(bloqueo_params)\n\n respond_to do |format|\n if @bloqueo.save\n format.html { redirect_to @bloqueo, notice: 'Bloqueo was successfully created.' }\n format.json { render :show, status: :created, location: @bloqueo }\n else\n format.html { render :new }\n format.json { render json: @bloqueo.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @palabra_bolsa = PalabraBolsa.new(palabra_bolsa_params)\n\n respond_to do |format|\n if @palabra_bolsa.save\n format.html { redirect_to @palabra_bolsa, notice: 'Palabra bolsa was successfully created.' }\n format.json { render :show, status: :created, location: @palabra_bolsa }\n else\n format.html { render :new }\n format.json { render json: @palabra_bolsa.errors, status: :unprocessable_entity }\n end\n end\n end", "def post path_and_params, post_body\n start if not @pid\n @lock.synchronize do\n @last_use = Time.new.to_f\n\n # Make request to xtractr\n Net::HTTP.start('localhost', @port) do |http|\n http.request_post \"/#{path_and_params}\", post_body do |response|\n headers = {}\n response.each_header {|name,val| headers[name] = val}\n return response.code.to_i, headers, response.body\n end\n end\n end\n end", "def create\n @fb_adresa = FbAdresa.new(params[:fb_adresa])\n\n respond_to do |format|\n if @fb_adresa.save\n format.html { redirect_to(@fb_adresa, :notice => 'Fb adresa was successfully created.') }\n format.xml { render :xml => @fb_adresa, :status => :created, :location => @fb_adresa }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @fb_adresa.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @cabasiento = Cabasiento.new(params[:cabasiento])\n\n respond_to do |format|\n if @cabasiento.save\n flash[:notice] = 'Cabasiento was successfully created.'\n format.html { redirect_to(@cabasiento) }\n format.xml { render :xml => @cabasiento, :status => :created, :location => @cabasiento }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @cabasiento.errors, :status => :unprocessable_entity }\n end\n end\n end", "def post(path, params={})\n request(:post, path, params)\n end", "def post(path, params={})\n request(:post, path, params)\n end", "def post(path, params={})\n request(:post, path, params)\n end", "def create\n @node_config = NodeConfig.new(params[:node_config])\n\n respond_to do |format|\n if @node_config.save\n format.xml { render :xml => @node_config, :status => :created, :location => @node_config }\n format.any { render :json => @node_config, :status => :created, :location => @node_config }\n else\n format.xml { render :xml => @node_config.errors, :status => :unprocessable_entity }\n format.any { render :json => @node_config.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @abonne = Abonne.new(params[:abonne])\n\n respond_to do |format|\n if @abonne.save\n flash[:notice] = 'Abonne was successfully created.'\n format.html { redirect_to(@abonne) }\n format.xml { render :xml => @abonne, :status => :created, :location => @abonne }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @abonne.errors, :status => :unprocessable_entity }\n end\n end\n end", "def post_save xml, options={:mapping=>:_default}\n xml.root.add_attributes(\"xmlns\"=>\"http://schema.intuit.com/platform/fdatafeed/bankingaccount/v1\")\n end", "def create\n\t\n\t@precio_boleto = PrecioBoleto.new(precio_boleto_params)\n\n\trespond_to do |format|\n\tif @precio_boleto.save\n\t\tformat.html { redirect_to @precio_boleto, notice: 'Nuevo Precio de boleto fue creado exitosamente.' }\n\t\tformat.json { render action: 'show', status: :created, location: @precio_boleto }\n\telse\n\t\tformat.html { render action: 'new' }\n\t\tformat.json { render json: @precio_boleto.errors, status: :unprocessable_entity }\n\tend\n\tend\nend", "def commit(xml)\n url = (test? ? test_url : live_url)\n\n response = parse(ssl_post(url, post_data(xml), 'Content-Type' => 'application/x-www-form-urlencoded;charset=UTF-8'))\n\n Response.new(\n success_from(response),\n message_from(response),\n response,\n authorization: authorization_from(response),\n test: test?,\n error_code: error_code_from(response)\n )\n end", "def post(url_variables:, body:)\n ensure_service_document\n\n end", "def create\n @burrito = Burrito.new(burrito_params)\n\n respond_to do |format|\n if @burrito.save\n format.html { redirect_to @burrito, notice: 'Burrito was successfully created.' }\n format.json { render action: 'show', status: :created, location: @burrito }\n else\n format.html { render action: 'new' }\n format.json { render json: @burrito.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @suministro = Suministro.new(params[:suministro])\n\n respond_to do |format|\n if @suministro.save\n format.html { redirect_to(@suministro, :notice => 'Suministro was successfully created.') }\n format.xml { render :xml => @suministro, :status => :created, :location => @suministro }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @suministro.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @brave_burst = BraveBurst.new(brave_burst_params)\n\n respond_to do |format|\n if @brave_burst.save\n format.html { redirect_to @brave_burst, notice: 'Brave burst was successfully created.' }\n format.json { render action: 'show', status: :created, location: @brave_burst }\n else\n format.html { render action: 'new' }\n format.json { render json: @brave_burst.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @precio_boleto = PrecioBoleto.new(precio_boleto_params)\n\n respond_to do |format|\n if @precio_boleto.save\n format.html { redirect_to @precio_boleto, notice: 'Precio boleto was successfully created.' }\n format.json { render action: 'show', status: :created, location: @precio_boleto }\n else\n format.html { render action: 'new' }\n format.json { render json: @precio_boleto.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @bico = Bico.new(params[:bico])\n\n respond_to do |format|\n if @bico.save\n format.html { redirect_to @bico, notice: 'Bico was successfully created.' }\n format.json { render json: @bico, status: :created, location: @bico }\n else\n format.html { render action: \"new\" }\n format.json { render json: @bico.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @bodega = Bodega.new(bodega_params)\n\n respond_to do |format|\n if @bodega.save\n format.html { redirect_to @bodega, notice: 'Se creo correctamemte la Bodega.' }\n format.json { render action: 'show', status: :created, location: @bodega }\n else\n format.html { render action: 'new' }\n format.json { render json: @bodega.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @barrio = Barrio.new(params[:barrio])\n\n respond_to do |format|\n if @barrio.save\n format.html { redirect_to @barrio, notice: 'Barrio was successfully created.' }\n format.json { render json: @barrio, status: :created, location: @barrio }\n else\n format.html { render action: \"new\" }\n format.json { render json: @barrio.errors, status: :unprocessable_entity }\n end\n end\n end", "def process(xml)\n timeout(TIMEOUT) do\n url = URI.parse(webservices_url)\n http = Net::HTTP.new(url.host, url.port)\n http.use_ssl = true\n http.verify_mode = OpenSSL::SSL::VERIFY_NONE\n http.start {\n request = Net::HTTP::Post.new(url.to_s)\n request.body = xml\n response = http.request(request)\n response.body\n }\n end\n end" ]
[ "0.6109481", "0.5931159", "0.5924494", "0.5912243", "0.5894569", "0.57990813", "0.5770801", "0.5735245", "0.5691109", "0.56860334", "0.56578946", "0.5609075", "0.55740684", "0.5571139", "0.5555524", "0.549797", "0.5491862", "0.5471097", "0.5470009", "0.54614395", "0.54555416", "0.54506654", "0.54289144", "0.5428735", "0.5427394", "0.5422074", "0.54176164", "0.5416364", "0.54076576", "0.5405854", "0.5377832", "0.53678584", "0.5351998", "0.533855", "0.5336891", "0.53094786", "0.53092957", "0.5299645", "0.5299298", "0.5299209", "0.52701133", "0.5259695", "0.52502555", "0.52448386", "0.52322364", "0.5231401", "0.5230528", "0.5226045", "0.5220716", "0.5217616", "0.5210686", "0.5208324", "0.5205509", "0.5205368", "0.5198513", "0.5198464", "0.5197095", "0.51949424", "0.51923376", "0.5189598", "0.51841277", "0.51836956", "0.51835996", "0.51787376", "0.5169217", "0.5169099", "0.51670766", "0.51669943", "0.51669025", "0.51669025", "0.516157", "0.5155878", "0.5155713", "0.51555425", "0.5153655", "0.5152326", "0.5145146", "0.51428276", "0.51281184", "0.51223963", "0.51219004", "0.51213795", "0.51182944", "0.5116902", "0.5116902", "0.5116902", "0.5116711", "0.51154166", "0.5108773", "0.51078963", "0.51077396", "0.51046896", "0.51025075", "0.51003706", "0.5098176", "0.5098005", "0.50965524", "0.5096309", "0.5093253", "0.5093206" ]
0.6218456
0
PUT /bairros/1 PUT /bairros/1.xml
def update @bairro = Bairro.find(params[:id]) respond_to do |format| if @bairro.update_attributes(params[:bairro]) flash[:notice] = "Bairro atualizado com sucesso." format.html { redirect_to(admin_bairros_path) } format.xml { head :ok } else format.html { render :action => "edit" } format.xml { render :xml => @bairro.errors, :status => :unprocessable_entity } end end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update opts = {}\n opts[:headers] ||= {}\n opts[:headers]['Content-Type'] ||= 'text/xml'\n post 'update', opts\n end", "def put(uri, xml)\r\n req = Net::HTTP::Put.new(uri)\r\n req[\"content-type\"] = \"application/xml\"\r\n req.body = xml\r\n request(req)\r\n end", "def rest_update(uri, method: Net::HTTP::Put)\n request = Net::HTTP::Get.new uri\n request.add_field(\"Accept\",\"application/xml\")\n auth_admin(request)\n \n Net::HTTP.start(uri.host, uri.port) do |http|\n response = http.request request\n response.value\n\n doc = REXML::Document.new response.body\n \n doc = strip_class_attributes(yield doc)\n \n request2 = method.new uri\n request2.content_type = 'application/xml'\n auth_admin(request2)\n\n request2.body=doc.to_s\n \n response2 = http.request request2\n response.value\n\n end\n \nend", "def update(id, name=\"Updated Name\", age=\"55\")\r\n xml_req =\r\n \"<?xml version='1.0' encoding='UTF-8'?>\r\n <person>\r\n <id type='integer'>#{id}</id>\r\n <name>#{name}</name>\r\n <age>#{age}</age> \r\n </person>\"\r\n request = Net::HTTP::Put.new(\"#{@url}/#{id}.xml\")\r\n request.add_field \"Content-Type\", \"application/xml\"\r\n request.body = xml_req\r\n http = Net::HTTP.new(@uri.host, @uri.port)\r\n response = http.request(request)\r\n # no response body will be returned\r\n case response\r\n when Net::HTTPSuccess\r\n return \"#{response.code} OK\"\r\n else\r\n return \"#{response.code} ERROR\"\r\n end\r\n end", "def update(id, name= \"Updated Name\")\n xml_req =\n \"<?xml version='1.0' encoding='UTF-8'?>\n <customer>\n <id type='integer'>#{id}</id>\n <name>#{name}</name>\n </customer>\"\n\n request = Net::HTTP::Put.new(\"#{@url}/#{id}.xml\")\n request.add_field \"Content-Type\", \"application/xml\"\n request.body = xml_req\n\n http = Net::HTTP.new(@uri.host, @uri.port)\n response = http.request(request)\n\n # no response body will be returned\n case response\n when Net::HTTPSuccess\n return \"#{response.code} OK\"\n else\n return \"#{response.code} ERROR\"\n end\n end", "def update\n @bairro_micro = BairroMicro.find(params[:id])\n\n respond_to do |format|\n if @bairro_micro.update_attributes(params[:bairro_micro])\n flash[:notice] = 'BairroMicro was successfully updated.'\n format.html { redirect_to(@bairro_micro) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @bairro_micro.errors, :status => :unprocessable_entity }\n end\n end\n end", "def test_put_invoices_1_xml\n @parameters = {:invoice => {:number => 'NewNumber'}}\n \n Redmine::ApiTest::Base.should_allow_api_authentication(:put,\n '/invoices/1.xml',\n {:invoice => {:number => 'NewNumber'}},\n {:success_code => :ok})\n \n assert_no_difference('Invoice.count') do\n put '/invoices/1.xml', @parameters, credentials('admin')\n end\n \n invoice = Invoice.find(1)\n assert_equal \"NewNumber\", invoice.number\n \n end", "def update\n @barrio = Barrio.find(params[:id])\n\n respond_to do |format|\n if @barrio.update_attributes(params[:barrio])\n format.html { redirect_to(@barrio, :notice => 'Barrio was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @barrio.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n doc = Nokogiri::XML(request.body.read)\n bNode = doc.xpath('elwak/benutzer')\n\n @benutzer = Benutzer.find(params[:id])\n \n #Sicherstellen, dass Benutzer synchronisiert wird auch wenn nur Objekt-Zuordnungen anders sind!\n @benutzer.updated_at = DateTime.now \n\n if bNode.xpath('objekt_zuordnungs').length > 0\n @benutzer.setze_objekt_zuordnungen(bNode.xpath('objekt_zuordnungs/objekt_id').map{|oz| oz.text.to_s.to_i})\n end\n if @benutzer.update(benutzer_params(bNode))\n success(nil)\n else\n error(@benutzer.errors)\n end\n end", "def update!(params)\n res = @client.put(path, nil, params, \"Content-Type\" => \"application/json\")\n @attributes = res.json if res.status == 201\n res\n end", "def update\n respond_to do |format|\n if @boolio.update( boolio_params )\n format.html { redirect_to @boolio, notice: 'Boolio was successfully updated.' }\n format.json { render :show, status: :ok, location: @boolio }\n else\n format.html { render :edit }\n format.json { render json: @boolio.errors, status: :unprocessable_entity }\n end\n end\n end", "def update(url, data)\n RestClient.put url, data, :content_type => :json\nend", "def update\n respond_to do |format|\n if @bairro.update(bairro_params)\n format.html { redirect_to @bairro, notice: 'Bairro atualizado com sucesso.' }\n format.json { render :show, status: :ok, location: @bairro }\n else\n format.html { render :edit }\n format.json { render json: @bairro.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n doc = Nokogiri::XML(request.body.read)\n cvNode = doc.xpath('elwak/checklisten_vorlage')\n\n update_params = {inaktiv: cvNode.xpath('inaktiv').text.to_s.to_bool}\n respond_to do |format|\n if @checklisten_vorlage.update(update_params)\n format.xml {render :xml => '<?xml version=\"1.0\" encoding=\"UTF-8\"?><success />'}\n else\n format.xml {render :xml => '<?xml version=\"1.0\" encoding=\"UTF-8\"?><error />'}\n end\n end\n end", "def update\n @bap = Bap.find(params[:id])\n\n respond_to do |format|\n if @bap.update_attributes(params[:bap])\n format.html { redirect_to(@bap, :notice => 'Bap was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @bap.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @osoba = Osoba.find(params[:id])\n\n if @osoba.update(params[:osoba])\n head :no_content\n else\n render json: @osoba.errors, status: :unprocessable_entity\n end\n end", "def put(path = '/files/', params = {})\n request :put, path, params\n end", "def api_xml(path,method=:get,options={})\n xml_message(amee,\"/data\"+path,method,options)\n end", "def test_should_update_link_via_API_XML\r\n get \"/logout\"\r\n put \"/links/1.xml\", :link => {:user_id => 1,\r\n :title => 'API Link 1',\r\n :url => 'http://www.api.com'}\r\n assert_response 401\r\n end", "def update\n @asambleista = Asambleista.find(params[:id])\n\n respond_to do |format|\n if @asambleista.update_attributes(params[:asambleista])\n flash[:notice] = 'Asambleista se ha actualizado con exito.'\n format.html { redirect_to(admin_asambleistas_path) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @asambleista.errors, :status => :unprocessable_entity }\n end\n end\n end", "def test_putpoi_update_valid\n nd = create(:node)\n cs_id = nd.changeset.id\n user = nd.changeset.user\n amf_content \"putpoi\", \"/1\", [\"#{user.email}:test\", cs_id, nd.version, nd.id, nd.lon, nd.lat, nd.tags, nd.visible]\n post :amf_write\n assert_response :success\n amf_parse_response\n result = amf_result(\"/1\")\n\n assert_equal 5, result.size\n assert_equal 0, result[0]\n assert_equal \"\", result[1]\n assert_equal nd.id, result[2]\n assert_equal nd.id, result[3]\n assert_equal nd.version + 1, result[4]\n\n # Now try to update again, with a different lat/lon, using the updated version number\n lat = nd.lat + 0.1\n lon = nd.lon - 0.1\n amf_content \"putpoi\", \"/2\", [\"#{user.email}:test\", cs_id, nd.version + 1, nd.id, lon, lat, nd.tags, nd.visible]\n post :amf_write\n assert_response :success\n amf_parse_response\n result = amf_result(\"/2\")\n\n assert_equal 5, result.size\n assert_equal 0, result[0]\n assert_equal \"\", result[1]\n assert_equal nd.id, result[2]\n assert_equal nd.id, result[3]\n assert_equal nd.version + 2, result[4]\n end", "def test_should_update_project_via_API_XML\r\n get \"/logout\"\r\n put \"/projects/1.xml\", :project => {:user_id => 1,\r\n :url => 'http://www.apiproject.com',\r\n :name => 'API Project',\r\n :description => 'API Project Desc' }\r\n assert_response 401\r\n end", "def update_xml\n self.xml= dumpRouteAsXml\n self.save\n end", "def update_object_xml(object_type, id, xml)\n @client.update_business_object_by_public_id({\n :busObNameOrId => object_type,\n :busObPublicId => id,\n :updateXml => xml\n })\n return last_error\n end", "def update\n @bingo = Bingo.find(params[:id])\n\n respond_to do |format|\n if @bingo.update_attributes(params[:bingo])\n format.html { redirect_to(@bingo, :notice => 'Bingo was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @bingo.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @bom = Bom.find(params[:id])\n\n respond_to do |format|\n if @bom.update_attributes(params[:bom])\n format.html { redirect_to(@bom, :notice => 'Bom was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @bom.errors, :status => :unprocessable_entity }\n end\n end\n end", "def put!\n request! :put\n end", "def update\n connection.put(element_path, to_xml)\n end", "def update\n @sabio = Sabio.find(params[:id])\n\n respond_to do |format|\n if @sabio.update_attributes(params[:sabio])\n format.html { redirect_to @sabio, notice: 'El Sabio fue actualizado.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @sabio.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @barrio = Barrio.find(params[:id])\n\n respond_to do |format|\n if @barrio.update_attributes(params[:barrio])\n format.html { redirect_to @barrio, :notice => 'Barrio ha sido actualizado.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @barrio.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @rubro.update_attributes(params[:rubro])\n format.html { redirect_to(@rubro, :notice => \"Se actualizó el rubro #{@rubro.nombre}.\") }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @rubro.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @bus = Bus.find(params[:id])\n\n respond_to do |format|\n if @bus.update_attributes(params[:bus])\n format.html { redirect_to(@bus, :notice => 'Bus was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @bus.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update_aos_version(args = {}) \n id = args['id']\n temp_path = \"/aosversions.json/{aosVersionId}\"\n path = temp_path\nargs.keys.each do |key|\n if (key == \"aosversionId\")\n args.delete(key)\n path = temp_path.gsub(\"{#{key}}\", id)\n end\nend\n puts \" PATH : #{path}\"\n put(path, args)\nend", "def update\n @browsenodeid = Browsenodeid.find(params[:id])\n\n respond_to do |format|\n if @browsenodeid.update_attributes(browsenodeid_params)\n format.html { redirect_to(@browsenodeid, :notice => 'Browsenodeid was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @browsenodeid.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @obra = Obra.find(params[:id])\n\n respond_to do |format|\n if @obra.update_attributes(params[:obra])\n format.html { redirect_to @obra, notice: 'Obra was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @obra.errors, status: :unprocessable_entity }\n end\n end\n end", "def put(path, params={})\n RestClient.put request_base+path, params\n end", "def update\n @barrio = Barrio.find(params[:id])\n\n respond_to do |format|\n if @barrio.update_attributes(params[:barrio])\n format.html { redirect_to @barrio, notice: 'Barrio was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @barrio.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @bonificacion = Bonificacion.find(params[:id])\n\n respond_to do |format|\n if @bonificacion.update_attributes(params[:bonificacion])\n format.html { redirect_to(@bonificacion, :notice => 'Bonificacion was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @bonificacion.errors, :status => :unprocessable_entity }\n end\n end\n end", "def put(*args)\n request :put, *args\n end", "def update\n @boat = Boat.find(params[:id])\n\n respond_to do |format|\n if @boat.update_attributes(params[:boat])\n format.html { redirect_to(@boat, :notice => 'Boat was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @boat.errors, :status => :unprocessable_entity }\n end\n end\n end", "def rm_update path, data, msg\n\n re = rm_request path, data, 'PUT'\n puts re.body\n chk (re.code!='200'), msg + \"\\n#{re.code} #{re.msg}\\n\\n\"\n return true\n\nend", "def update\n @inventario = Inventario.find(params[:id])\n @foto = @inventario.foto\n \n @service = InventarioService.new(@inventario, @foto)\n respond_to do |format|\n\n if @inventario.update_attributes(params[:inventario],params[:foto_file])\n format.html { redirect_to(@inventario, :notice => 'Inventario was successfully updated.') }\n format.xml { head :ok }\n else\n\t @foto = @service.foto\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @inventario.errors, :status => :unprocessable_entity }\n end\n end\n end", "def restobooking\n @buchung = Buchung.find(params[:id])\n @buchung.status='B' \n \n respond_to do |format|\n if @buchung.update_attributes(params[:buchung])\n format.html { redirect_to @buchung, notice: 'Buchung wurde erfolgreich geaendert.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @buchung.errors, status: :unprocessable_entity }\n end\n end \n end", "def update\n @relatestagiario = Relatestagiario.find(params[:id])\n\n respond_to do |format|\n if @relatestagiario.update_attributes(params[:relatestagiario])\n flash[:notice] = 'RELATÓRIO SALVO COM SUCESSO.'\n format.html { redirect_to(@relatestagiario) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @relatestagiario.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @arbitro = Arbitro.find(params[:id])\n\n respond_to do |format|\n if @arbitro.update_attributes(params[:arbitro])\n format.html { redirect_to @arbitro, notice: 'El Arbitro se ha modificado correctamente.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @arbitro.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n format.xml { head :method_not_allowed }\n format.json { head :method_not_allowed }\n end\n end", "def put(path, body = nil, ctype = 'application/json')\n make_call(mk_conn(path, 'Content-Type': ctype,\n 'Accept': 'application/json'),\n :put, nil, body.to_json)\n end", "def update\n @roaster = Roaster.find(params[:id])\n\n respond_to do |format|\n if @roaster.update_attributes(params[:roaster])\n flash[:notice] = 'Roaster was successfully updated.'\n format.html { redirect_to(@roaster) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @roaster.errors, :status => :unprocessable_entity }\n end\n end\n end", "def put_datastream(pid, dsID, xml)\n uri = URI.parse(@fedora + '/objects/' + pid + '/datastreams/' + dsID ) \n RestClient.put(uri.to_s, xml, :content_type => \"application/xml\")\n rescue => e\n e.response \n end", "def update\n # returning connection.put(element_path(prefix_options), to_xml, self.class.headers) do |response|\n returning connection.put(element_path(prefix_options), to_ssj, self.class.headers) do |response|\n load_attributes_from_response(response)\n end\n end", "def put(path, params = {})\n request(:put, path, params)\n end", "def put(path, params = {})\n request(:put, path, params)\n end", "def put(path, params = {})\n request(:put, path, params)\n end", "def put(path, parameters = {})\n request(:put, path, parameters)\n end", "def update\n @node_rack = @object\n\n respond_to do |format|\n if @node_rack.update_attributes(params[:node_rack])\n flash[:notice] = 'NodeRack was successfully updated.'\n format.html { redirect_to node_rack_url(@node_rack) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @node_rack.errors.to_xml, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @basin = Basin.find(params[:id])\n\n respond_to do |format|\n if @basin.update_attributes(params[:basin])\n format.html { redirect_to @basin, notice: 'Basin was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @basin.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @registro_bovino = RegistroBovino.find(params[:id])\n\n respond_to do |format|\n if @registro_bovino.update_attributes(params[:registro_bovino])\n #format.html { redirect_to @registro_bovino, notice: 'Registro bovino was successfully updated.' }\n format.html { redirect_to action: \"index\" }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @registro_bovino.errors, status: :unprocessable_entity }\n end\n end\n end", "def test_should_update_invite_via_API_XML\r\n get \"/logout\"\r\n put \"/invites/1.xml\", :invite => {:message => 'API Invite 1',\r\n :accepted => false,\r\n :email => '[email protected]',\r\n :user_id => 1 }\r\n assert_response 401\r\n end", "def test_put\n header 'Content-Type', 'application/json'\n\n data = File.read 'sample-traces/0.json'\n post('/traces', data, 'CONTENT_TYPE': 'application/json')\n\n contents = last_response.body\n contents_id = contents['_id']\n\n data = File.read 'sample-traces/1.json'\n put(\"/traces/#{contents_id}\", data, 'CONTENT_TYPE': 'application/json')\n contents = last_response.body\n\n assert_equal contents_id, contents['_id']\n end", "def update\n respond_to do |format|\n if @busca.update(busca_params)\n format.html { redirect_to @busca, notice: 'Busca was successfully updated.' }\n format.json { render :show, status: :ok, location: @busca }\n else\n format.html { render :edit }\n format.json { render json: @busca.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @boc.update(boc_params)\n format.html { redirect_to @boc, notice: 'Boc was successfully updated.' }\n format.json { render :show, status: :ok, location: @boc }\n else\n format.html { render :edit }\n format.json { render json: @boc.errors, status: :unprocessable_entity }\n end\n end\n end", "def update options={}\n client.put(\"/#{id}\", options)\n end", "def update\n @brag = Brag.find(params[:id])\n\n respond_to do |format|\n if @brag.update_attributes(params[:brag])\n format.html { redirect_to @brag, notice: 'Brag was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @brag.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @nostro = Nostro.find(params[:id])\n\n respond_to do |format|\n if @nostro.update_attributes(params[:nostro])\n flash[:notice] = 'Nostro was successfully updated.'\n format.html { redirect_to(@nostro) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @nostro.errors, :status => :unprocessable_entity }\n end\n end\n end", "def post(buffer)\n connection.post(\"#{configuration.path}/update\", buffer, {'Content-type' => 'text/xml;charset=utf-8'})\n end", "def put(path, params={})\n request(:put, path, params)\n end", "def put(path, params={})\n request(:put, path, params)\n end", "def put(path, params={})\n request(:put, path, params)\n end", "def put(path, params={})\n request(:put, path, params)\n end", "def put(path, params={})\n request(:put, path, params)\n end", "def put(path, params={})\n request(:put, path, params)\n end", "def put(path, params={})\n request(:put, path, params)\n end", "def put(path, params={})\n request(:put, path, params)\n end", "def update\n @boleto = Boleto.find(params[:id])\n\n respond_to do |format|\n if @boleto.update_attributes(params[:boleto])\n format.html { redirect_to [:admin, @boleto], notice: t(:updated, :name=>\"Boleto\") }\n else\n format.html { render action: \"edit\" }\n end\n end\n end", "def update\n @suministro = Suministro.find(params[:id])\n\n respond_to do |format|\n if @suministro.update_attributes(params[:suministro])\n format.html { redirect_to(@suministro, :notice => 'Suministro was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @suministro.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update_volumes(username, token, workset_name, volume_ids)\n\n #<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n #<volumes xmlns=\"http://registry.htrc.i3.illinois.edu/entities/workset\">\n # <volume>\n # <id>9999999</id>\n # </volume>\n # <volume>\n # <id>3333333</id>\n # </volume>\n # </volumes>\n volumes_xml =\n \"<?xml version=\\\"1.0\\\" encoding=\\\"UTF-8\\\" standalone=\\\"yes\\\"?>\" +\n \"<volumes xmlns=\\\"http://registry.htrc.i3.illinois.edu/entities/workset\\\">\";\n\n for id in volume_ids\n volumes_xml += \"<volume><id>#{id}</id></volume>\"\n end\n volumes_xml += \"</volumes>\"\n\n\n # curl -v --data @new_volumes.xml -X PUT \\\n # -H \"Content-Type: application/vnd.htrc-volume+xml\" \\\n # -H \"Accept: application/vnd.htrc-volume+xml\" \\\n # http://localhost:9763/ExtensionAPI-0.1.0/services/worksets/workset1/volumes?user=fred\n\n url = URI.parse(\"#{APP_CONFIG['registry_url']}/worksets/#{workset_name}/volumes\")\n http = Net::HTTP.new(url.host, url.port)\n if Rails.env.development?\n http.set_debug_output($stdout)\n end\n http.use_ssl = true\n http.verify_mode = OpenSSL::SSL::VERIFY_NONE\n\n request = Net::HTTP::Put.new(url.request_uri)\n request[\"Content-Type\"] = \"application/vnd.htrc-volume+xml\"\n request.add_field(\"Authorization\", \"Bearer #{token}\")\n\n request.body = volumes_xml\n response = http.request(request)\n\n #xml = response.body\n\n case response\n when Net::HTTPUnauthorized then\n raise Exceptions::SessionExpiredError.new(\"Session expired. Please login again\")\n when Net::HTTPSuccess then\n # Do nothing\n else\n raise Exceptions::SystemError.new(\"Error retrieving worksets (HTTP #{response.code})\")\n end\n\n end", "def update\n @ambito = Ambito.find(params[:id])\n\n respond_to do |format|\n if @ambito.update_attributes(params[:ambito])\n format.html { redirect_to(@ambito, :notice => 'Ambito was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @ambito.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @bowl = Bowl.find(params[:id])\n\n respond_to do |format|\n if @bowl.update_attributes(params[:bowl])\n format.html { redirect_to(@bowl, :notice => 'Bowl was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @bowl.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @carro = Carro.find(params[:id])\n\n respond_to do |format|\n if @carro.update_attributes(params[:carro])\n format.html { redirect_to(@carro, :notice => 'Carro atualizado com sucesso.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @carro.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @box = Box.find(params[:id])\n\n respond_to do |format|\n if @box.update_attributes(params[:box])\n #format.html { redirect_to(@box, :notice => 'Box was successfully updated.') }\n format.html {redirect_to my_boxes_path}\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @box.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @aauto = Aauto.find(params[:id])\n\n respond_to do |format|\n if @aauto.update_attributes(params[:aauto])\n format.html { redirect_to(@aauto, :notice => 'Aauto was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @aauto.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @bla = Bla.find(params[:id])\n\n respond_to do |format|\n if @bla.update_attributes(params[:bla])\n format.html { redirect_to @bla, :notice => 'Bla was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @bla.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @node = Node.scopied.find(params[:id])\n\n respond_to do |format|\n if @node.update_attributes(params[:node])\n format.html { redirect_to(@node, :notice => 'Node was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @node.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n put :update\n end", "def update_tenant_circle(args = {}) \n put(\"/tenantcircles.json/#{args[:circleId]}\", args)\nend", "def put(path, params = {})\n request(:put, path, params)\n end", "def put(path, params = {})\n request(:put, path, params)\n end", "def update\n respond_to do |format|\n if @robo.update(robo_params)\n format.html { redirect_to @robo, notice: 'Robo was successfully updated.' }\n format.json { render :show, status: :ok, location: @robo }\n else\n format.html { render :edit }\n format.json { render json: @robo.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @bloque = Bloque.find(params[:id])\n\n if @bloque.update(params[:bloque])\n head :no_content\n else\n render json: @bloque.errors, status: :unprocessable_entity\n end\n end", "def create_update_volumes(username, token, workset_name, volume_ids)\n\n #<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n #<volumes xmlns=\"http://registry.htrc.i3.illinois.edu/entities/workset\">\n # <volume>\n # <id>9999999</id>\n # </volume>\n # <volume>\n # <id>3333333</id>\n # </volume>\n # </volumes>\n volumes_xml =\n \"<?xml version=\\\"1.0\\\" encoding=\\\"UTF-8\\\" standalone=\\\"yes\\\"?>\" +\n \"<volumes xmlns=\\\"http://registry.htrc.i3.illinois.edu/entities/workset\\\">\";\n\n for id in volume_ids\n volumes_xml += \"<volume><id>#{id}</id></volume>\"\n end\n volumes_xml += \"</volumes>\"\n\n\n # curl -v --data @new_volumes.xml -X PUT \\\n # -H \"Content-Type: application/vnd.htrc-volume+xml\" \\\n # -H \"Accept: application/vnd.htrc-volume+xml\" \\\n # http://localhost:9763/ExtensionAPI-0.1.0/services/worksets/workset1/volumes?user=fred\n\n url = URI.parse(\"#{APP_CONFIG['registry_url']}/worksets/#{workset_name}\")\n http = Net::HTTP.new(url.host, url.port)\n if Rails.env.development?\n http.set_debug_output($stdout)\n end\n http.use_ssl = true\n http.verify_mode = OpenSSL::SSL::VERIFY_NONE\n\n request = Net::HTTP::Put.new(url.path)\n request[\"Content-Type\"] = \"application/vnd.htrc-volume+xml\"\n request.add_field(\"Authorization\", \"Bearer #{token}\")\n\n request.body = volumes_xml\n response = http.request(request)\n\n #xml = response.body\n\n case response\n when Net::HTTPUnauthorized then\n raise Exceptions::SessionExpiredError.new(\"Session expired. Please login again\")\n when Net::HTTPSuccess then\n # Do nothing\n else\n raise Exceptions::SystemError.new(\"Error retrieving worksets (HTTP #{response.code})\")\n end\n end", "def api_put(path, data = {})\n api_request(:put, path, :data => data)\n end", "def update\n @nossos_servico = NossosServico.find(params[:id])\n\n respond_to do |format|\n if @nossos_servico.update_attributes(params[:nossos_servico])\n format.html { redirect_to(@nossos_servico, :notice => 'Nossos servico was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @nossos_servico.errors, :status => :unprocessable_entity }\n end\n end\n end", "def edit_axis2XML(carbon_home,http_port,https_port) \n\n\tFile.open(File.join(carbon_home , 'conf','axis2.xml')) do |config_file|\n\t\t# Open the document and edit the port (axis2.xml)\n\t\tconfig = Document.new(config_file)\n\t\t\n\t\tconfig.root.elements[25].elements[1].text=http_port\n\t\tconfig.root.elements[26].elements[1].text=https_port\n\t\n\t\t\n\t\t# Write the result to a new file.\n\t\tformatter = REXML::Formatters::Default.new\n\t\tFile.open(File.join(carbon_home , 'conf','result_axis2.xml'), 'w') do |result|\n\t\tformatter.write(config, result)\n\t\tend\n\tend \n\tFile.delete(File.join(carbon_home , 'conf','axis2.xml'))\n\tFile.rename( File.join(carbon_home , 'conf','result_axis2.xml'),File.join(carbon_home , 'conf','axis2.xml') )\n\nend", "def update\n respond_to do |format|\n if @ba.update(ba_params)\n format.html { redirect_to @ba, notice: 'ベストアンサーを更新しました' }\n format.json { render :show, status: :ok, location: @ba }\n else\n format.html { render :edit }\n format.json { render json: @ba.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @aniversario = Aniversario.find(params[:id])\n\n respond_to do |format|\n if @aniversario.update_attributes(params[:aniversario])\n format.html { redirect_to(@aniversario, :notice => 'Aniversario was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @aniversario.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @sabor = Sabor.find(params[:id])\n\n respond_to do |format|\n if @sabor.update_attributes(params[:sabor])\n format.html { redirect_to @sabor, notice: 'Sabor was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @sabor.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @bitacora = Bitacora.find(params[:id])\n\n respond_to do |format|\n if @bitacora.update_attributes(params[:bitacora])\n format.html { redirect_to @bitacora, notice: 'Bitacora was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @bitacora.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\r\n @razdel1 = Razdel1.find(params[:id])\r\n\r\n respond_to do |format|\r\n if @razdel1.update_attributes(params[:razdel1])\r\n flash[:notice] = 'razdel1 was successfully updated.'\r\n format.html { redirect_to(@razdel1) }\r\n format.xml { head :ok } \r\n else\r\n format.html { render :action => \"edit\" }\r\n format.xml { render :xml => @razdel1.errors, :status => :unprocessable_entity }\r\n end\r\n end\r\n end", "def update\n respond_to do |format|\n if @intranet_boleto.update(intranet_boleto_params)\n format.html { redirect_to @intranet_boleto, notice: \"Boleto was successfully updated.\" }\n format.json { render :show, status: :ok, location: @intranet_boleto }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @intranet_boleto.errors, status: :unprocessable_entity }\n end\n end\n end", "def put(path, params={})\n request(:put, path, params)\n end" ]
[ "0.6471276", "0.6349391", "0.61486113", "0.6087704", "0.6031485", "0.5948117", "0.5842212", "0.5778172", "0.5773976", "0.56518084", "0.5553332", "0.55451053", "0.55291426", "0.55029553", "0.54884577", "0.54634005", "0.54506475", "0.54460955", "0.54380375", "0.5436617", "0.5432839", "0.5418186", "0.5417449", "0.5396499", "0.53929853", "0.5390268", "0.5386663", "0.5382465", "0.5378992", "0.53785527", "0.53613985", "0.5359608", "0.53547513", "0.5349351", "0.5334407", "0.53265256", "0.5318482", "0.5312743", "0.5305777", "0.53043276", "0.53019464", "0.5293373", "0.52884257", "0.5262285", "0.5252003", "0.52506596", "0.52449346", "0.52400285", "0.52344906", "0.5231598", "0.52208525", "0.52208525", "0.52208525", "0.5216585", "0.52091527", "0.5207671", "0.52053237", "0.5200154", "0.5188129", "0.5187136", "0.5179839", "0.5179393", "0.51677376", "0.5167256", "0.5166807", "0.51662844", "0.51662844", "0.51662844", "0.51662844", "0.51662844", "0.51662844", "0.51662844", "0.51662844", "0.5164277", "0.5163801", "0.5162534", "0.5161", "0.5160355", "0.5158502", "0.51569766", "0.51565975", "0.51502585", "0.5146706", "0.51384336", "0.5132952", "0.5129116", "0.5129116", "0.51287663", "0.5126173", "0.51249397", "0.51178735", "0.51132303", "0.51105875", "0.510689", "0.5104553", "0.5103903", "0.5102643", "0.5102496", "0.50978863", "0.50965625" ]
0.61298263
3
DELETE /bairros/1 DELETE /bairros/1.xml
def destroy @bairro = Bairro.find(params[:id]) @bairro.destroy respond_to do |format| format.html { redirect_to(admin_bairros_path) } format.xml { head :ok } end end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def destroy\n RestClient.delete \"#{REST_API_URI}/contents/#{id}.xml\" \n self\n end", "def delete()\n response = send_post_request(@xml_api_delete_path)\n response.is_a?(Net::HTTPSuccess) or response.is_a?(Net::HTTPRedirection)\n end", "def destroy\n @browsenodeid = Browsenodeid.find(params[:id])\n @browsenodeid.destroy\n\n respond_to do |format|\n format.html { redirect_to(browsenodeids_url) }\n format.xml { head :ok }\n end\n end", "def delete()\n response = send_post_request(@xml_api_delete_path)\n response.is_a?(Net::HTTPSuccess) or response.is_a?(Net::HTTPRedirection)\n end", "def netdev_resxml_delete( xml )\n top = netdev_resxml_top( xml )\n par = top.instance_variable_get(:@parent)\n par['delete'] = 'delete'\n end", "def destroy\n @relatestagiario = Relatestagiario.find(params[:id])\n @relatestagiario.destroy\n\n respond_to do |format|\n format.html { redirect_to(relatestagiarios_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @bixo = Bixo.find(params[:id])\n @bixo.destroy\n\n respond_to do |format|\n format.html { redirect_to(bixos_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @bairro_micro = BairroMicro.find(params[:id])\n @bairro_micro.destroy\n\n respond_to do |format|\n format.html { redirect_to(bairro_micros_url) }\n format.xml { head :ok }\n end\n end", "def delete\n client.delete(\"/#{id}\")\n end", "def destroy\n @rubro.destroy\n\n respond_to do |format|\n format.html { redirect_to(rubros_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @reclamacao = Reclamacao.find(params[:id])\n @reclamacao.destroy\n\n respond_to do |format|\n format.html { redirect_to(reclamacaos_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @barrio = Barrio.find(params[:id])\n @barrio.destroy\n\n respond_to do |format|\n format.html { redirect_to(barrios_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @remocao = Remocao.find(params[:id])\n @remocao.destroy\n\n respond_to do |format|\n format.html { redirect_to(remocaos_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @bonificacion = Bonificacion.find(params[:id])\n @bonificacion.destroy\n\n respond_to do |format|\n format.html { redirect_to(bonificacions_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @bap = Bap.find(params[:id])\n @bap.destroy\n\n respond_to do |format|\n format.html { redirect_to(baps_url) }\n format.xml { head :ok }\n end\n end", "def destroy\r\n @razdel1 = Razdel1.find(params[:id])\r\n @razdel1.destroy\r\n\r\n respond_to do |format|\r\n format.html { redirect_to(razdel1s_url) }\r\n format.xml { head :ok }\r\n end\r\n end", "def destroy\n @aisle = Aisle.find(params[:id])\n @aisle.destroy\n\n respond_to do |format|\n format.html { redirect_to(aisles_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @asambleista = Asambleista.find(params[:id])\n @asambleista.destroy\n\n respond_to do |format|\n format.html { redirect_to(admin_asambleistas_path) }\n format.xml { head :ok }\n end\n end", "def delete(id)\n request = Net::HTTP::Delete.new(\"#{@url}/#{id}.xml\")\n http = Net::HTTP.new(@uri.host, @uri.port)\n response = http.request(request)\n\n # no response body will be returned\n case response\n when Net::HTTPSuccess\n return \"#{response.code} OK\"\n else\n return \"#{response.code} ERROR\"\n end\n end", "def destroy\n @child_dupa2 = ChildDupa2.find(params[:id])\n @child_dupa2.destroy\n\n respond_to do |format|\n format.html { redirect_to(child_dupa2s_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @bail = Bail.find(params[:id])\n# @bail.destroy\n\n respond_to do |format|\n format.html { redirect_to(bails_url) }\n format.xml { head :ok }\n end\n end", "def delete(path)\n RestClient.delete request_base+path\n end", "def destroy\n @recurso = Recurso.find(params[:id])\n @recurso.destroy\n\n respond_to do |format|\n format.html { redirect_to(recursos_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @receita = Receita.find(params[:id])\n @receita.destroy\n\n respond_to do |format|\n format.html { redirect_to(receitas_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @aauto = Aauto.find(params[:id])\n @aauto.destroy\n\n respond_to do |format|\n format.html { redirect_to(aautos_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @arquivo = Arquivo.find(params[:id])\n\n @comentarios = Comentario.where(:comentavel_id => @arquivo.id)\n\n if @comentarios\n @comentarios.delete_all\n end\n\n @arquivo.destroy\n\n respond_to do |format|\n format.html { redirect_to arquivos_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @abonne = Abonne.find(params[:id])\n @abonne.destroy\n\n respond_to do |format|\n format.html { redirect_to(abonnes_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @distribuidora = Distribuidora.find(params[:id])\n @distribuidora.destroy\n\n respond_to do |format|\n format.html { redirect_to(distribuidoras_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @acre = Acre.find(params[:id])\n @acre.destroy\n\n respond_to do |format|\n format.html { redirect_to(acres_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @recebimento = Recebimento.find(params[:id])\n @recebimento.destroy\n\n respond_to do |format|\n format.html { redirect_to(recebimentos_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @orc_ficha = OrcFicha.find(params[:id])\n @orc_ficha.destroy\n\n respond_to do |format|\n format.html { redirect_to(orc_fichas_url) }\n format.xml { head :ok }\n end\n end", "def delete(options={})\n connection.delete(\"/\", @name)\n end", "def destroy\n @suministro = Suministro.find(params[:id])\n @suministro.destroy\n\n respond_to do |format|\n format.html { redirect_to(suministros_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @reputacao_carona = ReputacaoCarona.find(params[:id])\n @reputacao_carona.destroy\n\n respond_to do |format|\n format.html { redirect_to(reputacao_caronas_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @direccion = Direccion.find(params[:id])\n @direccion.destroy\n\n respond_to do |format|\n format.html { redirect_to(direccions_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @cabasiento = Cabasiento.find(params[:id])\n @cabasiento.destroy\n\n respond_to do |format|\n format.html { redirect_to(cabasientos_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @ambito = Ambito.find(params[:id])\n @ambito.destroy\n\n respond_to do |format|\n format.html { redirect_to(ambitos_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @dossier = Dossier.find(params[:id])\n @dossier.destroy\n\n respond_to do |format|\n format.html { redirect_to(\"/\") }\n format.xml { head :ok }\n end\n end", "def delete_data(index_name)\n uri = @client.make_uri(\"/#{index_name}/update/\")\n req = HTTP::Post.new(uri)\n req.content_type = 'text/xml'\n req.body = '<delete><query>*:*</query></delete>'\n response = @client.send_http(req, true, ['200'])\n end", "def delete\n start { |connection| connection.request http :Delete }\n end", "def destroy\n @bom = Bom.find(params[:id])\n @bom.destroy\n\n respond_to do |format|\n format.html { redirect_to(boms_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @feria2010observacion = Feria2010observacion.find(params[:id])\n @feria2010observacion.destroy\n\n respond_to do |format|\n format.html { redirect_to(feria2010observaciones_url) }\n format.xml { head :ok }\n end\n end", "def delete(path, params = {})\n debug_log \"DELETE #{@host}#{path} params:#{params}\"\n res = connection.delete path, params\n debug_log \"Response status:#{res.status}, body:#{res.body}\"\n res\n end", "def destroy\n @cuenta = Cuenta.find(params[:id])\n @cuenta.destroy\n\n respond_to do |format|\n format.html { redirect_to(cuentas_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @cuenta = Cuenta.find(params[:id])\n @cuenta.destroy\n\n respond_to do |format|\n format.html { redirect_to(cuentas_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @cuenta = Cuenta.find(params[:id])\n @cuenta.destroy\n\n respond_to do |format|\n format.html { redirect_to(cuentas_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @asistencia = Asistencia.find(params[:id])\n @asistencia.destroy\n\n respond_to do |format|\n format.html { redirect_to(asistencias_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @bdig = Bdig.find(params[:id])\n @bdig.destroy\n\n respond_to do |format|\n format.html { redirect_to(bdigs_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @carro = Carro.find(params[:id])\n @carro.destroy\n\n respond_to do |format|\n format.html { redirect_to(carros_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @reclamo = Reclamo.find(params[:id])\n @reclamo.destroy\n\n respond_to do |format|\n format.html { redirect_to(reclamos_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @documento = Documento.find(params[:id])\n @documento.destroy\n\n respond_to do |format|\n format.html { redirect_to(documentos_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @genbank_file.destroy\n\n respond_to do |format|\n format.xml { head :ok }\n format.json { head :ok }\n end\n end", "def destroy\n @fb_adresa = FbAdresa.find(params[:id])\n @fb_adresa.destroy\n\n respond_to do |format|\n format.html { redirect_to(fb_adresas_url) }\n format.xml { head :ok }\n end\n end", "def delete(path)\n request(:delete, path)\n end", "def destroy\n @bingo = Bingo.find(params[:id])\n @bingo.destroy\n\n respond_to do |format|\n format.html { redirect_to(bingos_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @domino = Domino.find(params[:id])\n @domino.destroy\n\n respond_to do |format|\n format.html { redirect_to(dominos_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @documento = @externo.documentos.find(params[:id])\n @documento.destroy\n\n respond_to do |format|\n format.html { redirect_to(documentos_url(@externo)) }\n format.xml { head :ok }\n end\n end", "def destroy\n @reputacao_veiculo = ReputacaoVeiculo.find(params[:id])\n @reputacao_veiculo.destroy\n\n respond_to do |format|\n format.html { redirect_to(reputacao_veiculos_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @leilao = Leilao.find(params[:id])\n @leilao.destroy\n\n respond_to do |format|\n format.html { redirect_to(leilaos_url) }\n format.xml { head :ok }\n end\n end", "def destroy\r\n @asignacion = Asignacion.find(params[:id])\r\n @asignacion.destroy\r\n\r\n respond_to do |format|\r\n format.html { redirect_to(asignacions_url) }\r\n format.xml { head :ok }\r\n end\r\n end", "def delete path\n make_request(path, \"delete\", {})\n end", "def destroy\n @tipo_restaurante = TipoRestaurante.find(params[:id])\n @tipo_restaurante.destroy\n\n respond_to do |format|\n format.html { redirect_to(tipo_restaurantes_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @estudiante = Estudiante.find(params[:id])\n @estudiante.destroy\n\n respond_to do |format|\n format.html { redirect_to(estudiantes_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @estudiante = Estudiante.find(params[:id])\n @estudiante.destroy\n\n respond_to do |format|\n format.html { redirect_to(estudiantes_url) }\n format.xml { head :ok }\n end\n end", "def destroy1\n @todo = Todo.find(params[:id])\n @todo.destroy\n\n respond_to do |format|\n format.html { redirect_to(todos_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @iguanasactualizacion = Iguanasactualizacion.find(params[:id])\n @iguanasactualizacion.destroy\n\n respond_to do |format|\n format.html { redirect_to(iguanasactualizaciones_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @contrato = Contrato.find(params[:id])\n @contrato.destroy\n\n respond_to do |format|\n format.html { redirect_to(contratos_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @nostro = Nostro.find(params[:id])\n @nostro.destroy\n\n respond_to do |format|\n format.html { redirect_to(nostros_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @coleccionista = Coleccionista.find(params[:id])\n @coleccionista.destroy\n\n respond_to do |format|\n format.html { redirect_to(coleccionistas_url) }\n format.xml { head :ok }\n end\n end", "def delete(path)\n make_call(mk_conn(path), :delete)\n end", "def destroy\n arquivo = Arquivo.find(@pregoestitulosgrafico.arquivo_id)\n\n File.delete(arquivo.caminho)\n\n pregoestitulo = Pregoestitulo.find(@pregoestitulosgrafico.pregoestitulo_id)\n \n @pregoestitulosgrafico.destroy\n respond_to do |format|\n format.html { redirect_to pregoestitulo, notice: 'Arquivo excluído com sucesso.' }\n format.json { head :no_content }\n end\n end", "def deletes_to(path,opts={},&block) #:nodoc: \n crud_to(:delete,path,opts[:params] || {},opts,&block)\n end", "def destroy\n @aviso = Aviso.find(params[:id])\n @aviso.destroy\n\n respond_to do |format|\n format.html { redirect_to(avisos_url) }\n format.xml { head :ok }\n end\n end", "def delete(container_name, file_name)\n validate_path_elements(container_name, file_name)\n\n client.request(\n method: :delete,\n path: \"#{container_name}/#{file_name}\",\n expected: 204\n )\n end", "def destroy\n @banana = Banana.find(params[:id])\n @banana.destroy\n\n respond_to do |format|\n format.html { redirect_to(bananas_url) }\n format.xml { head :ok }\n end\n end", "def delete(path = '/files/', params = {})\n request :delete, path, params\n end", "def delete\n Iterable.request(conf, base_path).delete\n end", "def destroy\n @dato = Dato.find(params[:id])\n @dato.destroy\n\n respond_to do |format|\n format.html { redirect_to(datos_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @dato = Dato.find(params[:id])\n @dato.destroy\n\n respond_to do |format|\n format.html { redirect_to(datos_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @tipo_conta = TipoConta.find(params[:id])\n @tipo_conta.destroy\n\n respond_to do |format|\n format.html { redirect_to(tipo_contas_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @bogey = Bogey.find(params[:id])\n @bogey.destroy\n\n respond_to do |format|\n format.html { redirect_to(bogeys_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @tipo_de_documento = TipoDeDocumento.find(params[:id])\n @tipo_de_documento.destroy\n\n respond_to do |format|\n format.html { redirect_to(tipos_de_documento_url) }\n format.xml { head :ok }\n end\n end", "def delete_aos_version(args = {}) \n delete(\"/aosversions.json/#{args[:aosVersionId]}\", args)\nend", "def destroy\n @backup = Backup.find(params[:id])\n @backup.destroy\n\n respond_to do |format|\n format.html { redirect_to(backups_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @regiaos = Regiao.find(params[:id])\n @regiaos.destroy\n\n respond_to do |format|\n format.html { redirect_to(homes_path) }\n format.xml { head :ok }\n end\n end", "def delete\n execute_request('DELETE') do |uri, headers|\n HTTP.http_client.delete(uri, header: headers)\n end\n end", "def destroy\n @pagina = Pagina.find(params[:id])\n @pagina.destroy\n\n respond_to do |format|\n format.html { redirect_to(paginas_url) }\n format.xml { head :ok }\n end\n end", "def destroy \n Link.connection.execute(\"delete from links where id in (#{params[:id].join(',')})\") unless params[:id].blank?\n respond_to do |format|\n format.html { redirect_to(links_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @bodega.destroy\n respond_to do |format|\n format.html { redirect_to bodegas_url, notice: 'Bodega fue eliminada exitosamente' }\n format.json { head :no_content }\n end\n end", "def delete()\n @api.do_request(\"DELETE\", get_base_api_path())\n end", "def delete()\n @api.do_request(\"DELETE\", get_base_api_path())\n end", "def delete()\n @api.do_request(\"DELETE\", get_base_api_path())\n end", "def delete()\n @api.do_request(\"DELETE\", get_base_api_path())\n end", "def delete_all(xpath); end", "def destroy\n @zebra = Zebra.find(params[:id])\n @zebra.destroy\n\n respond_to do |format|\n format.html { redirect_to(zebras_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @aplicacion = Aplicacion.find(params[:id])\n @aplicacion.destroy\n\n respond_to do |format|\n format.html { redirect_to(aplicacions_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @roaster = Roaster.find(params[:id])\n @roaster.destroy\n\n respond_to do |format|\n format.html { redirect_to(roasters_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @vicariato = Vicariato.find(params[:id])\n @vicariato.destroy\n\n respond_to do |format|\n format.html { redirect_to(vicariatos_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @feefile = Feefile.find(params[:id])\n directory= \"uploads\"\n path =File.join(directory,@feefile.feefilename)\n File.delete(path)\n @feefile.destroy\n \n\n respond_to do |format|\n format.html { redirect_to(feefiles_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @elemento = Elemento.find(params[:id])\n @elemento.destroy\n\n respond_to do |format|\n format.html { redirect_to(elementos_url) }\n format.xml { head :ok }\n end\n end" ]
[ "0.6918107", "0.6720777", "0.66734356", "0.6633984", "0.6571922", "0.65668035", "0.6537507", "0.6418858", "0.64137757", "0.63924545", "0.63758075", "0.63695467", "0.6343922", "0.6339396", "0.6337218", "0.63289523", "0.6299543", "0.62928206", "0.62815976", "0.6270704", "0.62667733", "0.62615764", "0.6259575", "0.6251509", "0.6244444", "0.62394583", "0.6235836", "0.6233695", "0.6224263", "0.6214216", "0.6209351", "0.61952585", "0.61908245", "0.6190062", "0.61882603", "0.61827844", "0.61807317", "0.6176387", "0.6172367", "0.6168711", "0.6162958", "0.6156248", "0.6154156", "0.61517394", "0.61517394", "0.61517394", "0.6151113", "0.614751", "0.6147042", "0.6146095", "0.6137262", "0.61353266", "0.61320037", "0.61289716", "0.6126987", "0.61266077", "0.6121176", "0.61084646", "0.6105271", "0.6103235", "0.60982704", "0.60975325", "0.60970503", "0.6096788", "0.60955757", "0.6094024", "0.6089858", "0.6089687", "0.60880566", "0.6087833", "0.6084367", "0.6081665", "0.6081563", "0.6081029", "0.6080494", "0.60750926", "0.6072466", "0.6067036", "0.6067036", "0.60611326", "0.60606194", "0.60596687", "0.6059076", "0.60589576", "0.6056881", "0.6055185", "0.60551757", "0.60549426", "0.60539484", "0.6053444", "0.6053444", "0.6053444", "0.6053444", "0.6051079", "0.6050828", "0.6050077", "0.60479444", "0.60455036", "0.60439736", "0.6041127" ]
0.6927857
0