file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
datalake_samples_access_control_recursive_async.py
|
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: datalake_samples_access_control_recursive_async.py
DESCRIPTION:
This sample demonstrates recursive set/get access control on directories.
USAGE:
python datalake_samples_access_control_recursive_async.py
Set the environment variables with your own values before running the sample:
1) STORAGE_ACCOUNT_NAME - the storage account name
2) STORAGE_ACCOUNT_KEY - the storage account key
"""
import os
import random
import uuid
import asyncio
from azure.core.exceptions import AzureError
from azure.storage.filedatalake.aio import (
DataLakeServiceClient,
)
# TODO: rerun after test account is fixed
async def recursive_access_control_sample(filesystem_client):
# create a parent directory
dir_name = "testdir"
print("Creating a directory named '{}'.".format(dir_name))
directory_client = await filesystem_client.create_directory(dir_name)
# populate the directory with some child files
await create_child_files(directory_client, 35)
# get and display the permissions of the parent directory
acl_props = await directory_client.get_access_control()
print("Permissions of directory '{}' are {}.".format(dir_name, acl_props['permissions']))
# set the permissions of the entire directory tree recursively
# update/remove acl operations are performed the same way
acl = 'user::rwx,group::r-x,other::rwx'
failed_entries = []
# the progress callback is invoked each time a batch is completed
async def progress_callback(acl_changes):
print(("In this batch: {} directories and {} files were processed successfully, {} failures were counted. " +
"In total, {} directories and {} files were processed successfully, {} failures were counted.")
.format(acl_changes.batch_counters.directories_successful, acl_changes.batch_counters.files_successful,
acl_changes.batch_counters.failure_count, acl_changes.aggregate_counters.directories_successful,
acl_changes.aggregate_counters.files_successful, acl_changes.aggregate_counters.failure_count))
# keep track of failed entries if there are any
failed_entries.append(acl_changes.batch_failures)
# illustrate the operation by using a small batch_size
try:
acl_change_result = await directory_client.set_access_control_recursive(acl=acl,
progress_hook=progress_callback,
batch_size=5)
except AzureError as error:
# if the error has continuation_token, you can restart the operation using that continuation_token
if error.continuation_token:
acl_change_result = \
await directory_client.set_access_control_recursive(acl=acl,
continuation_token=error.continuation_token,
progress_hook=progress_callback,
batch_size=5)
print("Summary: {} directories and {} files were updated successfully, {} failures were counted."
.format(acl_change_result.counters.directories_successful, acl_change_result.counters.files_successful,
acl_change_result.counters.failure_count))
# if an error was encountered, a continuation token would be returned if the operation can be resumed
if acl_change_result.continuation is not None:
print("The operation can be resumed by passing the continuation token {} again into the access control method."
.format(acl_change_result.continuation))
# get and display the permissions of the parent directory again
acl_props = await directory_client.get_access_control()
print("New permissions of directory '{}' and its children are {}.".format(dir_name, acl_props['permissions']))
async def create_child_files(directory_client, num_child_files):
import itertools
async def create_file():
# generate a random name
file_name = str(uuid.uuid4()).replace('-', '')
file_client = directory_client.get_file_client(file_name)
await file_client.create_file()
futures = [asyncio.ensure_future(create_file()) for _ in itertools.repeat(None, num_child_files)]
await asyncio.wait(futures)
print("Created {} files under the directory '{}'.".format(num_child_files, directory_client.path_name))
async def run():
account_name = os.getenv('STORAGE_ACCOUNT_NAME', "")
account_key = os.getenv('STORAGE_ACCOUNT_KEY', "")
# set up the service client with the credentials from the environment variables
service_client = DataLakeServiceClient(account_url="{}://{}.dfs.core.windows.net".format(
"https",
account_name
), credential=account_key)
async with service_client:
# generate a random name for testing purpose
fs_name = "testfs{}".format(random.randint(1, 1000))
print("Generating a test filesystem named '{}'.".format(fs_name))
# create the filesystem
filesystem_client = await service_client.create_file_system(file_system=fs_name)
# invoke the sample code
try:
await recursive_access_control_sample(filesystem_client)
finally:
# clean up the demo filesystem
await filesystem_client.delete_file_system()
if __name__ == '__main__':
|
loop = asyncio.get_event_loop()
loop.run_until_complete(run())
|
conditional_block
|
|
datalake_samples_access_control_recursive_async.py
|
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: datalake_samples_access_control_recursive_async.py
DESCRIPTION:
This sample demonstrates recursive set/get access control on directories.
USAGE:
python datalake_samples_access_control_recursive_async.py
Set the environment variables with your own values before running the sample:
1) STORAGE_ACCOUNT_NAME - the storage account name
2) STORAGE_ACCOUNT_KEY - the storage account key
"""
import os
import random
import uuid
import asyncio
from azure.core.exceptions import AzureError
from azure.storage.filedatalake.aio import (
DataLakeServiceClient,
)
# TODO: rerun after test account is fixed
async def recursive_access_control_sample(filesystem_client):
# create a parent directory
dir_name = "testdir"
print("Creating a directory named '{}'.".format(dir_name))
directory_client = await filesystem_client.create_directory(dir_name)
# populate the directory with some child files
await create_child_files(directory_client, 35)
# get and display the permissions of the parent directory
acl_props = await directory_client.get_access_control()
print("Permissions of directory '{}' are {}.".format(dir_name, acl_props['permissions']))
# set the permissions of the entire directory tree recursively
# update/remove acl operations are performed the same way
acl = 'user::rwx,group::r-x,other::rwx'
failed_entries = []
# the progress callback is invoked each time a batch is completed
async def progress_callback(acl_changes):
print(("In this batch: {} directories and {} files were processed successfully, {} failures were counted. " +
"In total, {} directories and {} files were processed successfully, {} failures were counted.")
.format(acl_changes.batch_counters.directories_successful, acl_changes.batch_counters.files_successful,
acl_changes.batch_counters.failure_count, acl_changes.aggregate_counters.directories_successful,
acl_changes.aggregate_counters.files_successful, acl_changes.aggregate_counters.failure_count))
# keep track of failed entries if there are any
failed_entries.append(acl_changes.batch_failures)
# illustrate the operation by using a small batch_size
try:
acl_change_result = await directory_client.set_access_control_recursive(acl=acl,
progress_hook=progress_callback,
batch_size=5)
except AzureError as error:
# if the error has continuation_token, you can restart the operation using that continuation_token
if error.continuation_token:
acl_change_result = \
await directory_client.set_access_control_recursive(acl=acl,
continuation_token=error.continuation_token,
progress_hook=progress_callback,
batch_size=5)
print("Summary: {} directories and {} files were updated successfully, {} failures were counted."
.format(acl_change_result.counters.directories_successful, acl_change_result.counters.files_successful,
acl_change_result.counters.failure_count))
# if an error was encountered, a continuation token would be returned if the operation can be resumed
if acl_change_result.continuation is not None:
print("The operation can be resumed by passing the continuation token {} again into the access control method."
.format(acl_change_result.continuation))
# get and display the permissions of the parent directory again
acl_props = await directory_client.get_access_control()
print("New permissions of directory '{}' and its children are {}.".format(dir_name, acl_props['permissions']))
async def
|
(directory_client, num_child_files):
import itertools
async def create_file():
# generate a random name
file_name = str(uuid.uuid4()).replace('-', '')
file_client = directory_client.get_file_client(file_name)
await file_client.create_file()
futures = [asyncio.ensure_future(create_file()) for _ in itertools.repeat(None, num_child_files)]
await asyncio.wait(futures)
print("Created {} files under the directory '{}'.".format(num_child_files, directory_client.path_name))
async def run():
account_name = os.getenv('STORAGE_ACCOUNT_NAME', "")
account_key = os.getenv('STORAGE_ACCOUNT_KEY', "")
# set up the service client with the credentials from the environment variables
service_client = DataLakeServiceClient(account_url="{}://{}.dfs.core.windows.net".format(
"https",
account_name
), credential=account_key)
async with service_client:
# generate a random name for testing purpose
fs_name = "testfs{}".format(random.randint(1, 1000))
print("Generating a test filesystem named '{}'.".format(fs_name))
# create the filesystem
filesystem_client = await service_client.create_file_system(file_system=fs_name)
# invoke the sample code
try:
await recursive_access_control_sample(filesystem_client)
finally:
# clean up the demo filesystem
await filesystem_client.delete_file_system()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(run())
|
create_child_files
|
identifier_name
|
datalake_samples_access_control_recursive_async.py
|
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: datalake_samples_access_control_recursive_async.py
DESCRIPTION:
This sample demonstrates recursive set/get access control on directories.
USAGE:
python datalake_samples_access_control_recursive_async.py
Set the environment variables with your own values before running the sample:
1) STORAGE_ACCOUNT_NAME - the storage account name
2) STORAGE_ACCOUNT_KEY - the storage account key
"""
import os
import random
import uuid
import asyncio
from azure.core.exceptions import AzureError
from azure.storage.filedatalake.aio import (
DataLakeServiceClient,
)
# TODO: rerun after test account is fixed
async def recursive_access_control_sample(filesystem_client):
# create a parent directory
dir_name = "testdir"
print("Creating a directory named '{}'.".format(dir_name))
directory_client = await filesystem_client.create_directory(dir_name)
# populate the directory with some child files
await create_child_files(directory_client, 35)
# get and display the permissions of the parent directory
acl_props = await directory_client.get_access_control()
print("Permissions of directory '{}' are {}.".format(dir_name, acl_props['permissions']))
# set the permissions of the entire directory tree recursively
# update/remove acl operations are performed the same way
acl = 'user::rwx,group::r-x,other::rwx'
failed_entries = []
# the progress callback is invoked each time a batch is completed
async def progress_callback(acl_changes):
print(("In this batch: {} directories and {} files were processed successfully, {} failures were counted. " +
"In total, {} directories and {} files were processed successfully, {} failures were counted.")
.format(acl_changes.batch_counters.directories_successful, acl_changes.batch_counters.files_successful,
acl_changes.batch_counters.failure_count, acl_changes.aggregate_counters.directories_successful,
acl_changes.aggregate_counters.files_successful, acl_changes.aggregate_counters.failure_count))
# keep track of failed entries if there are any
failed_entries.append(acl_changes.batch_failures)
# illustrate the operation by using a small batch_size
try:
acl_change_result = await directory_client.set_access_control_recursive(acl=acl,
progress_hook=progress_callback,
batch_size=5)
except AzureError as error:
# if the error has continuation_token, you can restart the operation using that continuation_token
if error.continuation_token:
acl_change_result = \
await directory_client.set_access_control_recursive(acl=acl,
continuation_token=error.continuation_token,
progress_hook=progress_callback,
batch_size=5)
print("Summary: {} directories and {} files were updated successfully, {} failures were counted."
.format(acl_change_result.counters.directories_successful, acl_change_result.counters.files_successful,
acl_change_result.counters.failure_count))
# if an error was encountered, a continuation token would be returned if the operation can be resumed
if acl_change_result.continuation is not None:
print("The operation can be resumed by passing the continuation token {} again into the access control method."
.format(acl_change_result.continuation))
# get and display the permissions of the parent directory again
acl_props = await directory_client.get_access_control()
print("New permissions of directory '{}' and its children are {}.".format(dir_name, acl_props['permissions']))
async def create_child_files(directory_client, num_child_files):
import itertools
async def create_file():
# generate a random name
|
futures = [asyncio.ensure_future(create_file()) for _ in itertools.repeat(None, num_child_files)]
await asyncio.wait(futures)
print("Created {} files under the directory '{}'.".format(num_child_files, directory_client.path_name))
async def run():
account_name = os.getenv('STORAGE_ACCOUNT_NAME', "")
account_key = os.getenv('STORAGE_ACCOUNT_KEY', "")
# set up the service client with the credentials from the environment variables
service_client = DataLakeServiceClient(account_url="{}://{}.dfs.core.windows.net".format(
"https",
account_name
), credential=account_key)
async with service_client:
# generate a random name for testing purpose
fs_name = "testfs{}".format(random.randint(1, 1000))
print("Generating a test filesystem named '{}'.".format(fs_name))
# create the filesystem
filesystem_client = await service_client.create_file_system(file_system=fs_name)
# invoke the sample code
try:
await recursive_access_control_sample(filesystem_client)
finally:
# clean up the demo filesystem
await filesystem_client.delete_file_system()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(run())
|
file_name = str(uuid.uuid4()).replace('-', '')
file_client = directory_client.get_file_client(file_name)
await file_client.create_file()
|
identifier_body
|
datalake_samples_access_control_recursive_async.py
|
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: datalake_samples_access_control_recursive_async.py
DESCRIPTION:
This sample demonstrates recursive set/get access control on directories.
USAGE:
python datalake_samples_access_control_recursive_async.py
Set the environment variables with your own values before running the sample:
1) STORAGE_ACCOUNT_NAME - the storage account name
2) STORAGE_ACCOUNT_KEY - the storage account key
"""
import os
import random
import uuid
import asyncio
from azure.core.exceptions import AzureError
from azure.storage.filedatalake.aio import (
DataLakeServiceClient,
)
# TODO: rerun after test account is fixed
async def recursive_access_control_sample(filesystem_client):
# create a parent directory
dir_name = "testdir"
print("Creating a directory named '{}'.".format(dir_name))
directory_client = await filesystem_client.create_directory(dir_name)
# populate the directory with some child files
await create_child_files(directory_client, 35)
# get and display the permissions of the parent directory
acl_props = await directory_client.get_access_control()
print("Permissions of directory '{}' are {}.".format(dir_name, acl_props['permissions']))
# set the permissions of the entire directory tree recursively
# update/remove acl operations are performed the same way
acl = 'user::rwx,group::r-x,other::rwx'
failed_entries = []
# the progress callback is invoked each time a batch is completed
async def progress_callback(acl_changes):
print(("In this batch: {} directories and {} files were processed successfully, {} failures were counted. " +
"In total, {} directories and {} files were processed successfully, {} failures were counted.")
.format(acl_changes.batch_counters.directories_successful, acl_changes.batch_counters.files_successful,
acl_changes.batch_counters.failure_count, acl_changes.aggregate_counters.directories_successful,
acl_changes.aggregate_counters.files_successful, acl_changes.aggregate_counters.failure_count))
# keep track of failed entries if there are any
failed_entries.append(acl_changes.batch_failures)
# illustrate the operation by using a small batch_size
try:
acl_change_result = await directory_client.set_access_control_recursive(acl=acl,
progress_hook=progress_callback,
batch_size=5)
except AzureError as error:
# if the error has continuation_token, you can restart the operation using that continuation_token
if error.continuation_token:
acl_change_result = \
await directory_client.set_access_control_recursive(acl=acl,
continuation_token=error.continuation_token,
progress_hook=progress_callback,
batch_size=5)
print("Summary: {} directories and {} files were updated successfully, {} failures were counted."
.format(acl_change_result.counters.directories_successful, acl_change_result.counters.files_successful,
acl_change_result.counters.failure_count))
# if an error was encountered, a continuation token would be returned if the operation can be resumed
if acl_change_result.continuation is not None:
print("The operation can be resumed by passing the continuation token {} again into the access control method."
.format(acl_change_result.continuation))
# get and display the permissions of the parent directory again
acl_props = await directory_client.get_access_control()
print("New permissions of directory '{}' and its children are {}.".format(dir_name, acl_props['permissions']))
async def create_child_files(directory_client, num_child_files):
import itertools
async def create_file():
# generate a random name
file_name = str(uuid.uuid4()).replace('-', '')
file_client = directory_client.get_file_client(file_name)
await file_client.create_file()
futures = [asyncio.ensure_future(create_file()) for _ in itertools.repeat(None, num_child_files)]
await asyncio.wait(futures)
print("Created {} files under the directory '{}'.".format(num_child_files, directory_client.path_name))
async def run():
account_name = os.getenv('STORAGE_ACCOUNT_NAME', "")
account_key = os.getenv('STORAGE_ACCOUNT_KEY', "")
# set up the service client with the credentials from the environment variables
service_client = DataLakeServiceClient(account_url="{}://{}.dfs.core.windows.net".format(
"https",
account_name
), credential=account_key)
async with service_client:
|
# create the filesystem
filesystem_client = await service_client.create_file_system(file_system=fs_name)
# invoke the sample code
try:
await recursive_access_control_sample(filesystem_client)
finally:
# clean up the demo filesystem
await filesystem_client.delete_file_system()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(run())
|
# generate a random name for testing purpose
fs_name = "testfs{}".format(random.randint(1, 1000))
print("Generating a test filesystem named '{}'.".format(fs_name))
|
random_line_split
|
puntata.rs
|
use crate::die::Die;
use std::{collections::HashSet, fmt};
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Puntata {
value: Die,
count: i32,
}
impl Puntata {
pub fn new(count: i32, value: i32) -> Self {
Puntata {
value: Die::new(value),
count,
}
}
pub fn new_lama(count: i32) -> Self {
Puntata {
value: Die::new_lama(),
count,
}
}
pub fn get_value(self) -> i32 {
self.value.get_value()
}
pub fn get_count(self) -> i32 {
self.count
}
pub fn is_lama(self) -> bool {
self.value.is_lama()
}
pub fn with_count(self, count: i32) -> Self {
Puntata {
value: Die::new(self.value.get_value()),
count,
}
}
}
impl fmt::Display for Puntata {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
if self.is_lama() {
write!(fmt, "Puntata di {} Lama", self.count)
} else {
write!(fmt, "Puntata di {} {}", self.count, self.value.get_value())
}
}
}
pub fn least_gt_puntate(p: Puntata, is_palifico: bool) -> Vec<Puntata> {
let mut puntate = vec![];
if !p.is_lama() {
for i in (p.value.get_value() + 1)..7 {
puntate.push(Puntata::new(p.count, i));
}
for i in 2..7 {
puntate.push(Puntata::new(p.count + 1, i));
}
puntate.push(Puntata::new_lama((p.count + 1) / 2));
} else {
puntate.push(Puntata::new_lama(p.count + 1));
for i in 2..7 {
puntate.push(Puntata::new(p.count * 2 + 1, i));
}
}
if is_palifico {
puntate.retain(|pv| pv.value == p.value)
}
puntate
}
pub fn
|
(total_dices: i32, p: Puntata, is_palifico: bool) -> Vec<Puntata> {
let mut v = (p.count..total_dices)
.flat_map(|v| {
let px = p.with_count(v);
least_gt_puntate(px, is_palifico)
})
.collect::<HashSet<_>>();
if !is_palifico || p.is_lama() {
for i in p.count..=total_dices {
v.insert(Puntata::new_lama(i));
}
}
v.remove(&p);
v.insert(p);
v.into_iter().collect()
}
|
all_gt_puntate
|
identifier_name
|
puntata.rs
|
use crate::die::Die;
use std::{collections::HashSet, fmt};
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Puntata {
value: Die,
count: i32,
}
impl Puntata {
pub fn new(count: i32, value: i32) -> Self {
Puntata {
value: Die::new(value),
count,
}
}
pub fn new_lama(count: i32) -> Self {
Puntata {
value: Die::new_lama(),
count,
}
}
pub fn get_value(self) -> i32 {
self.value.get_value()
}
pub fn get_count(self) -> i32 {
self.count
}
pub fn is_lama(self) -> bool {
self.value.is_lama()
}
pub fn with_count(self, count: i32) -> Self {
Puntata {
value: Die::new(self.value.get_value()),
count,
}
}
}
impl fmt::Display for Puntata {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
if self.is_lama() {
write!(fmt, "Puntata di {} Lama", self.count)
} else {
write!(fmt, "Puntata di {} {}", self.count, self.value.get_value())
}
}
}
pub fn least_gt_puntate(p: Puntata, is_palifico: bool) -> Vec<Puntata> {
let mut puntate = vec![];
if !p.is_lama() {
for i in (p.value.get_value() + 1)..7 {
puntate.push(Puntata::new(p.count, i));
}
for i in 2..7 {
puntate.push(Puntata::new(p.count + 1, i));
}
puntate.push(Puntata::new_lama((p.count + 1) / 2));
} else {
puntate.push(Puntata::new_lama(p.count + 1));
for i in 2..7 {
puntate.push(Puntata::new(p.count * 2 + 1, i));
}
}
if is_palifico {
puntate.retain(|pv| pv.value == p.value)
}
puntate
}
pub fn all_gt_puntate(total_dices: i32, p: Puntata, is_palifico: bool) -> Vec<Puntata> {
let mut v = (p.count..total_dices)
.flat_map(|v| {
let px = p.with_count(v);
least_gt_puntate(px, is_palifico)
})
.collect::<HashSet<_>>();
if !is_palifico || p.is_lama() {
for i in p.count..=total_dices {
v.insert(Puntata::new_lama(i));
}
}
v.remove(&p);
|
v.into_iter().collect()
}
|
v.insert(p);
|
random_line_split
|
puntata.rs
|
use crate::die::Die;
use std::{collections::HashSet, fmt};
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Puntata {
value: Die,
count: i32,
}
impl Puntata {
pub fn new(count: i32, value: i32) -> Self {
Puntata {
value: Die::new(value),
count,
}
}
pub fn new_lama(count: i32) -> Self {
Puntata {
value: Die::new_lama(),
count,
}
}
pub fn get_value(self) -> i32 {
self.value.get_value()
}
pub fn get_count(self) -> i32 {
self.count
}
pub fn is_lama(self) -> bool {
self.value.is_lama()
}
pub fn with_count(self, count: i32) -> Self {
Puntata {
value: Die::new(self.value.get_value()),
count,
}
}
}
impl fmt::Display for Puntata {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
if self.is_lama() {
write!(fmt, "Puntata di {} Lama", self.count)
} else {
write!(fmt, "Puntata di {} {}", self.count, self.value.get_value())
}
}
}
pub fn least_gt_puntate(p: Puntata, is_palifico: bool) -> Vec<Puntata> {
let mut puntate = vec![];
if !p.is_lama()
|
else {
puntate.push(Puntata::new_lama(p.count + 1));
for i in 2..7 {
puntate.push(Puntata::new(p.count * 2 + 1, i));
}
}
if is_palifico {
puntate.retain(|pv| pv.value == p.value)
}
puntate
}
pub fn all_gt_puntate(total_dices: i32, p: Puntata, is_palifico: bool) -> Vec<Puntata> {
let mut v = (p.count..total_dices)
.flat_map(|v| {
let px = p.with_count(v);
least_gt_puntate(px, is_palifico)
})
.collect::<HashSet<_>>();
if !is_palifico || p.is_lama() {
for i in p.count..=total_dices {
v.insert(Puntata::new_lama(i));
}
}
v.remove(&p);
v.insert(p);
v.into_iter().collect()
}
|
{
for i in (p.value.get_value() + 1)..7 {
puntate.push(Puntata::new(p.count, i));
}
for i in 2..7 {
puntate.push(Puntata::new(p.count + 1, i));
}
puntate.push(Puntata::new_lama((p.count + 1) / 2));
}
|
conditional_block
|
puntata.rs
|
use crate::die::Die;
use std::{collections::HashSet, fmt};
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Puntata {
value: Die,
count: i32,
}
impl Puntata {
pub fn new(count: i32, value: i32) -> Self {
Puntata {
value: Die::new(value),
count,
}
}
pub fn new_lama(count: i32) -> Self
|
pub fn get_value(self) -> i32 {
self.value.get_value()
}
pub fn get_count(self) -> i32 {
self.count
}
pub fn is_lama(self) -> bool {
self.value.is_lama()
}
pub fn with_count(self, count: i32) -> Self {
Puntata {
value: Die::new(self.value.get_value()),
count,
}
}
}
impl fmt::Display for Puntata {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
if self.is_lama() {
write!(fmt, "Puntata di {} Lama", self.count)
} else {
write!(fmt, "Puntata di {} {}", self.count, self.value.get_value())
}
}
}
pub fn least_gt_puntate(p: Puntata, is_palifico: bool) -> Vec<Puntata> {
let mut puntate = vec![];
if !p.is_lama() {
for i in (p.value.get_value() + 1)..7 {
puntate.push(Puntata::new(p.count, i));
}
for i in 2..7 {
puntate.push(Puntata::new(p.count + 1, i));
}
puntate.push(Puntata::new_lama((p.count + 1) / 2));
} else {
puntate.push(Puntata::new_lama(p.count + 1));
for i in 2..7 {
puntate.push(Puntata::new(p.count * 2 + 1, i));
}
}
if is_palifico {
puntate.retain(|pv| pv.value == p.value)
}
puntate
}
pub fn all_gt_puntate(total_dices: i32, p: Puntata, is_palifico: bool) -> Vec<Puntata> {
let mut v = (p.count..total_dices)
.flat_map(|v| {
let px = p.with_count(v);
least_gt_puntate(px, is_palifico)
})
.collect::<HashSet<_>>();
if !is_palifico || p.is_lama() {
for i in p.count..=total_dices {
v.insert(Puntata::new_lama(i));
}
}
v.remove(&p);
v.insert(p);
v.into_iter().collect()
}
|
{
Puntata {
value: Die::new_lama(),
count,
}
}
|
identifier_body
|
webviewElement.ts
|
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { FindInPageOptions, OnBeforeRequestListenerDetails, OnHeadersReceivedListenerDetails, Response, WebContents, WebviewTag } from 'electron';
import { addDisposableListener } from 'vs/base/browser/dom';
import { ThrottledDelayer } from 'vs/base/common/async';
import { Emitter, Event } from 'vs/base/common/event';
import { once } from 'vs/base/common/functional';
import { Disposable, DisposableStore, IDisposable, toDisposable } from 'vs/base/common/lifecycle';
import { isMacintosh } from 'vs/base/common/platform';
import { URI } from 'vs/base/common/uri';
import * as modes from 'vs/editor/common/modes';
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
import { IEnvironmentService } from 'vs/platform/environment/common/environment';
import { IFileService } from 'vs/platform/files/common/files';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { ITunnelService } from 'vs/platform/remote/common/tunnel';
import { ITelemetryService } from 'vs/platform/telemetry/common/telemetry';
import { BaseWebview, WebviewMessageChannels } from 'vs/workbench/contrib/webview/browser/baseWebviewElement';
import { Webview, WebviewContentOptions, WebviewExtensionDescription, WebviewOptions } from 'vs/workbench/contrib/webview/browser/webview';
import { WebviewPortMappingManager } from 'vs/workbench/contrib/webview/common/portMapping';
import { WebviewResourceScheme } from 'vs/workbench/contrib/webview/common/resourceLoader';
import { WebviewThemeDataProvider } from 'vs/workbench/contrib/webview/common/themeing';
import { registerFileProtocol } from 'vs/workbench/contrib/webview/electron-browser/webviewProtocols';
import { IWorkbenchEnvironmentService } from 'vs/workbench/services/environment/common/environmentService';
import { WebviewFindDelegate, WebviewFindWidget } from '../browser/webviewFindWidget';
class WebviewTagHandle extends Disposable {
private _webContents: undefined | WebContents | 'destroyed';
public constructor(
public readonly webview: WebviewTag,
) {
super();
this._register(addDisposableListener(this.webview, 'destroyed', () => {
this._webContents = 'destroyed';
}));
this._register(addDisposableListener(this.webview, 'did-start-loading', once(() => {
const contents = this.webContents;
if (contents) {
this._onFirstLoad.fire(contents);
this._register(toDisposable(() => {
contents.removeAllListeners();
}));
}
})));
}
private readonly _onFirstLoad = this._register(new Emitter<WebContents>());
public readonly onFirstLoad = this._onFirstLoad.event;
public get webContents(): WebContents | undefined {
if (this._webContents === 'destroyed') {
return undefined;
}
if (this._webContents)
|
this._webContents = this.webview.getWebContents();
return this._webContents;
}
}
type OnBeforeRequestDelegate = (details: OnBeforeRequestListenerDetails) => Promise<Response | undefined>;
type OnHeadersReceivedDelegate = (details: OnHeadersReceivedListenerDetails) => { cancel: boolean; } | undefined;
class WebviewSession extends Disposable {
private readonly _onBeforeRequestDelegates: Array<OnBeforeRequestDelegate> = [];
private readonly _onHeadersReceivedDelegates: Array<OnHeadersReceivedDelegate> = [];
public constructor(
webviewHandle: WebviewTagHandle,
) {
super();
this._register(webviewHandle.onFirstLoad(contents => {
contents.session.webRequest.onBeforeRequest(async (details, callback) => {
for (const delegate of this._onBeforeRequestDelegates) {
const result = await delegate(details);
if (typeof result !== 'undefined') {
callback(result);
return;
}
}
callback({});
});
contents.session.webRequest.onHeadersReceived((details, callback) => {
for (const delegate of this._onHeadersReceivedDelegates) {
const result = delegate(details);
if (typeof result !== 'undefined') {
callback(result);
return;
}
}
callback({ cancel: false });
});
}));
}
public onBeforeRequest(delegate: OnBeforeRequestDelegate) {
this._onBeforeRequestDelegates.push(delegate);
}
public onHeadersReceived(delegate: OnHeadersReceivedDelegate) {
this._onHeadersReceivedDelegates.push(delegate);
}
}
class WebviewProtocolProvider extends Disposable {
private _resolve!: () => void;
private _reject!: () => void;
public readonly ready: Promise<void>;
constructor(
handle: WebviewTagHandle,
getExtensionLocation: () => URI | undefined,
getLocalResourceRoots: () => ReadonlyArray<URI>,
fileService: IFileService,
) {
super();
this.ready = new Promise((resolve, reject) => {
this._resolve = resolve;
this._reject = reject;
});
this._register(handle.onFirstLoad(contents => {
try {
registerFileProtocol(contents, WebviewResourceScheme, fileService, getExtensionLocation(), getLocalResourceRoots);
this._resolve();
} catch {
this._reject();
}
}));
}
}
class WebviewPortMappingProvider extends Disposable {
constructor(
session: WebviewSession,
getExtensionLocation: () => URI | undefined,
mappings: () => ReadonlyArray<modes.IWebviewPortMapping>,
tunnelService: ITunnelService,
) {
super();
const manager = this._register(new WebviewPortMappingManager(getExtensionLocation, mappings, tunnelService));
session.onBeforeRequest(async details => {
const redirect = await manager.getRedirect(details.url);
return redirect ? { redirectURL: redirect } : undefined;
});
}
}
class WebviewKeyboardHandler {
private readonly _webviews = new Set<WebviewTagHandle>();
private readonly _isUsingNativeTitleBars: boolean;
constructor(configurationService: IConfigurationService) {
this._isUsingNativeTitleBars = configurationService.getValue<string>('window.titleBarStyle') === 'native';
}
public add(
webviewHandle: WebviewTagHandle,
): IDisposable {
this._webviews.add(webviewHandle);
const disposables = new DisposableStore();
if (this.shouldToggleMenuShortcutsEnablement) {
disposables.add(webviewHandle.onFirstLoad(() => {
this.setIgnoreMenuShortcutsForWebview(webviewHandle, true);
}));
}
disposables.add(addDisposableListener(webviewHandle.webview, 'ipc-message', (event) => {
switch (event.channel) {
case 'did-focus':
this.setIgnoreMenuShortcuts(true);
break;
case 'did-blur':
this.setIgnoreMenuShortcuts(false);
return;
}
}));
return toDisposable(() => {
disposables.dispose();
this._webviews.delete(webviewHandle);
});
}
private get shouldToggleMenuShortcutsEnablement() {
return isMacintosh || this._isUsingNativeTitleBars;
}
private setIgnoreMenuShortcuts(value: boolean) {
for (const webview of this._webviews) {
this.setIgnoreMenuShortcutsForWebview(webview, value);
}
}
private setIgnoreMenuShortcutsForWebview(webview: WebviewTagHandle, value: boolean) {
if (this.shouldToggleMenuShortcutsEnablement) {
const contents = webview.webContents;
if (!contents?.isDestroyed()) {
contents?.setIgnoreMenuShortcuts(value);
}
}
}
}
export class ElectronWebviewBasedWebview extends BaseWebview<WebviewTag> implements Webview, WebviewFindDelegate {
private static _webviewKeyboardHandler: WebviewKeyboardHandler | undefined;
private static getWebviewKeyboardHandler(configService: IConfigurationService) {
if (!this._webviewKeyboardHandler) {
this._webviewKeyboardHandler = new WebviewKeyboardHandler(configService);
}
return this._webviewKeyboardHandler;
}
private _webviewFindWidget: WebviewFindWidget | undefined;
private _findStarted: boolean = false;
public extension: WebviewExtensionDescription | undefined;
private readonly _protocolProvider: WebviewProtocolProvider;
private readonly _domReady: Promise<void>;
private readonly _focusDelayer = this._register(new ThrottledDelayer(10));
private _elementFocusImpl!: (options?: FocusOptions | undefined) => void;
constructor(
id: string,
options: WebviewOptions,
contentOptions: WebviewContentOptions,
private readonly _webviewThemeDataProvider: WebviewThemeDataProvider,
@IInstantiationService instantiationService: IInstantiationService,
@IFileService fileService: IFileService,
@ITunnelService tunnelService: ITunnelService,
@ITelemetryService telemetryService: ITelemetryService,
@IEnvironmentService environementService: IEnvironmentService,
@IWorkbenchEnvironmentService workbenchEnvironmentService: IWorkbenchEnvironmentService,
@IConfigurationService configurationService: IConfigurationService,
) {
super(id, options, contentOptions, _webviewThemeDataProvider, telemetryService, environementService, workbenchEnvironmentService);
const webviewAndContents = this._register(new WebviewTagHandle(this.element!));
const session = this._register(new WebviewSession(webviewAndContents));
this._protocolProvider = new WebviewProtocolProvider(webviewAndContents,
() => this.extension?.location,
() => (this.content.options.localResourceRoots || []),
fileService);
this._register(this._protocolProvider);
this._register(new WebviewPortMappingProvider(
session,
() => this.extension ? this.extension.location : undefined,
() => (this.content.options.portMapping || []),
tunnelService,
));
this._register(ElectronWebviewBasedWebview.getWebviewKeyboardHandler(configurationService).add(webviewAndContents));
this._domReady = new Promise(resolve => {
const subscription = this._register(this.on(WebviewMessageChannels.webviewReady, () => {
subscription.dispose();
resolve();
}));
});
this._register(addDisposableListener(this.element!, 'console-message', function (e: { level: number; message: string; line: number; sourceId: string; }) {
console.log(`[Embedded Page] ${e.message}`);
}));
this._register(addDisposableListener(this.element!, 'dom-ready', () => {
// Workaround for https://github.com/electron/electron/issues/14474
if (this.element && (this.focused || document.activeElement === this.element)) {
this.element.blur();
this.element.focus();
}
}));
this._register(addDisposableListener(this.element!, 'crashed', () => {
console.error('embedded page crashed');
}));
this._register(this.on('synthetic-mouse-event', (rawEvent: any) => {
if (!this.element) {
return;
}
const bounds = this.element.getBoundingClientRect();
try {
window.dispatchEvent(new MouseEvent(rawEvent.type, {
...rawEvent,
clientX: rawEvent.clientX + bounds.left,
clientY: rawEvent.clientY + bounds.top,
}));
return;
} catch {
// CustomEvent was treated as MouseEvent so don't do anything - https://github.com/microsoft/vscode/issues/78915
return;
}
}));
this._register(this.on('did-set-content', () => {
if (this.element) {
this.element.style.flex = '';
this.element.style.width = '100%';
this.element.style.height = '100%';
}
}));
this._register(addDisposableListener(this.element!, 'devtools-opened', () => {
this._send('devtools-opened');
}));
if (options.enableFindWidget) {
this._webviewFindWidget = this._register(instantiationService.createInstance(WebviewFindWidget, this));
this._register(addDisposableListener(this.element!, 'found-in-page', e => {
this._hasFindResult.fire(e.result.matches > 0);
}));
this.styledFindWidget();
}
}
protected createElement(options: WebviewOptions) {
const element = document.createElement('webview');
this._elementFocusImpl = element.focus.bind(element);
element.focus = () => {
this.doFocus();
};
element.setAttribute('partition', `webview${Date.now()}`);
element.setAttribute('webpreferences', 'contextIsolation=yes');
element.className = `webview ${options.customClasses || ''}`;
element.style.flex = '0 1';
element.style.width = '0';
element.style.height = '0';
element.style.outline = '0';
element.preload = require.toUrl('./pre/electron-index.js');
element.src = 'data:text/html;charset=utf-8,%3C%21DOCTYPE%20html%3E%0D%0A%3Chtml%20lang%3D%22en%22%20style%3D%22width%3A%20100%25%3B%20height%3A%20100%25%22%3E%0D%0A%3Chead%3E%0D%0A%3Ctitle%3EVirtual%20Document%3C%2Ftitle%3E%0D%0A%3C%2Fhead%3E%0D%0A%3Cbody%20style%3D%22margin%3A%200%3B%20overflow%3A%20hidden%3B%20width%3A%20100%25%3B%20height%3A%20100%25%22%20role%3D%22document%22%3E%0D%0A%3C%2Fbody%3E%0D%0A%3C%2Fhtml%3E';
return element;
}
protected readonly extraContentOptions = {};
public mountTo(parent: HTMLElement) {
if (!this.element) {
return;
}
if (this._webviewFindWidget) {
parent.appendChild(this._webviewFindWidget.getDomNode()!);
}
parent.appendChild(this.element);
}
protected async postMessage(channel: string, data?: any): Promise<void> {
await Promise.all([
this._protocolProvider.ready,
this._domReady,
]);
this.element?.send(channel, data);
}
public focus(): void {
this.doFocus();
// Handle focus change programmatically (do not rely on event from <webview>)
this.handleFocusChange(true);
}
private doFocus() {
if (!this.element) {
return;
}
// Workaround for https://github.com/microsoft/vscode/issues/75209
// Electron's webview.focus is async so for a sequence of actions such as:
//
// 1. Open webview
// 1. Show quick pick from command palette
//
// We end up focusing the webview after showing the quick pick, which causes
// the quick pick to instantly dismiss.
//
// Workarount this by debouncing the focus and making sure we are not focused on an input
// when we try to re-focus.
this._focusDelayer.trigger(async () => {
if (!this.focused || !this.element) {
return;
}
if (document.activeElement?.tagName === 'INPUT') {
return;
}
try {
this._elementFocusImpl();
} catch {
// noop
}
this._send('focus');
});
}
protected style(): void {
super.style();
this.styledFindWidget();
}
private styledFindWidget() {
this._webviewFindWidget?.updateTheme(this._webviewThemeDataProvider.getTheme());
}
private readonly _hasFindResult = this._register(new Emitter<boolean>());
public readonly hasFindResult: Event<boolean> = this._hasFindResult.event;
public startFind(value: string, options?: FindInPageOptions) {
if (!value || !this.element) {
return;
}
// ensure options is defined without modifying the original
options = options || {};
// FindNext must be false for a first request
const findOptions: FindInPageOptions = {
forward: options.forward,
findNext: false,
matchCase: options.matchCase,
medialCapitalAsWordStart: options.medialCapitalAsWordStart
};
this._findStarted = true;
this.element.findInPage(value, findOptions);
}
/**
* Webviews expose a stateful find API.
* Successive calls to find will move forward or backward through onFindResults
* depending on the supplied options.
*
* @param value The string to search for. Empty strings are ignored.
*/
public find(value: string, previous: boolean): void {
if (!this.element) {
return;
}
// Searching with an empty value will throw an exception
if (!value) {
return;
}
const options = { findNext: true, forward: !previous };
if (!this._findStarted) {
this.startFind(value, options);
return;
}
this.element.findInPage(value, options);
}
public stopFind(keepSelection?: boolean): void {
this._hasFindResult.fire(false);
if (!this.element) {
return;
}
this._findStarted = false;
this.element.stopFindInPage(keepSelection ? 'keepSelection' : 'clearSelection');
}
public showFind() {
this._webviewFindWidget?.reveal();
}
public hideFind() {
this._webviewFindWidget?.hide();
}
public runFindAction(previous: boolean) {
this._webviewFindWidget?.find(previous);
}
public selectAll() {
this.element?.selectAll();
}
public copy() {
this.element?.copy();
}
public paste() {
this.element?.paste();
}
public cut() {
this.element?.cut();
}
public undo() {
this.element?.undo();
}
public redo() {
this.element?.redo();
}
protected on<T = unknown>(channel: WebviewMessageChannels | string, handler: (data: T) => void): IDisposable {
if (!this.element) {
return Disposable.None;
}
return addDisposableListener(this.element, 'ipc-message', (event) => {
if (!this.element) {
return;
}
if (event.channel === channel && event.args && event.args.length) {
handler(event.args[0]);
}
});
}
}
|
{
return this._webContents;
}
|
conditional_block
|
webviewElement.ts
|
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { FindInPageOptions, OnBeforeRequestListenerDetails, OnHeadersReceivedListenerDetails, Response, WebContents, WebviewTag } from 'electron';
import { addDisposableListener } from 'vs/base/browser/dom';
import { ThrottledDelayer } from 'vs/base/common/async';
import { Emitter, Event } from 'vs/base/common/event';
import { once } from 'vs/base/common/functional';
import { Disposable, DisposableStore, IDisposable, toDisposable } from 'vs/base/common/lifecycle';
import { isMacintosh } from 'vs/base/common/platform';
import { URI } from 'vs/base/common/uri';
import * as modes from 'vs/editor/common/modes';
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
import { IEnvironmentService } from 'vs/platform/environment/common/environment';
import { IFileService } from 'vs/platform/files/common/files';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { ITunnelService } from 'vs/platform/remote/common/tunnel';
import { ITelemetryService } from 'vs/platform/telemetry/common/telemetry';
import { BaseWebview, WebviewMessageChannels } from 'vs/workbench/contrib/webview/browser/baseWebviewElement';
import { Webview, WebviewContentOptions, WebviewExtensionDescription, WebviewOptions } from 'vs/workbench/contrib/webview/browser/webview';
import { WebviewPortMappingManager } from 'vs/workbench/contrib/webview/common/portMapping';
import { WebviewResourceScheme } from 'vs/workbench/contrib/webview/common/resourceLoader';
import { WebviewThemeDataProvider } from 'vs/workbench/contrib/webview/common/themeing';
import { registerFileProtocol } from 'vs/workbench/contrib/webview/electron-browser/webviewProtocols';
import { IWorkbenchEnvironmentService } from 'vs/workbench/services/environment/common/environmentService';
import { WebviewFindDelegate, WebviewFindWidget } from '../browser/webviewFindWidget';
class WebviewTagHandle extends Disposable {
private _webContents: undefined | WebContents | 'destroyed';
public constructor(
public readonly webview: WebviewTag,
) {
super();
this._register(addDisposableListener(this.webview, 'destroyed', () => {
this._webContents = 'destroyed';
}));
this._register(addDisposableListener(this.webview, 'did-start-loading', once(() => {
const contents = this.webContents;
if (contents) {
this._onFirstLoad.fire(contents);
this._register(toDisposable(() => {
contents.removeAllListeners();
}));
}
})));
}
private readonly _onFirstLoad = this._register(new Emitter<WebContents>());
public readonly onFirstLoad = this._onFirstLoad.event;
public get webContents(): WebContents | undefined {
if (this._webContents === 'destroyed') {
return undefined;
}
if (this._webContents) {
return this._webContents;
}
this._webContents = this.webview.getWebContents();
return this._webContents;
}
}
type OnBeforeRequestDelegate = (details: OnBeforeRequestListenerDetails) => Promise<Response | undefined>;
type OnHeadersReceivedDelegate = (details: OnHeadersReceivedListenerDetails) => { cancel: boolean; } | undefined;
class WebviewSession extends Disposable {
private readonly _onBeforeRequestDelegates: Array<OnBeforeRequestDelegate> = [];
private readonly _onHeadersReceivedDelegates: Array<OnHeadersReceivedDelegate> = [];
public constructor(
webviewHandle: WebviewTagHandle,
) {
super();
this._register(webviewHandle.onFirstLoad(contents => {
contents.session.webRequest.onBeforeRequest(async (details, callback) => {
for (const delegate of this._onBeforeRequestDelegates) {
const result = await delegate(details);
if (typeof result !== 'undefined') {
callback(result);
return;
}
}
callback({});
});
contents.session.webRequest.onHeadersReceived((details, callback) => {
for (const delegate of this._onHeadersReceivedDelegates) {
const result = delegate(details);
if (typeof result !== 'undefined') {
callback(result);
return;
}
}
callback({ cancel: false });
});
}));
}
public onBeforeRequest(delegate: OnBeforeRequestDelegate) {
this._onBeforeRequestDelegates.push(delegate);
}
public onHeadersReceived(delegate: OnHeadersReceivedDelegate) {
this._onHeadersReceivedDelegates.push(delegate);
}
}
class WebviewProtocolProvider extends Disposable {
private _resolve!: () => void;
private _reject!: () => void;
public readonly ready: Promise<void>;
constructor(
handle: WebviewTagHandle,
getExtensionLocation: () => URI | undefined,
getLocalResourceRoots: () => ReadonlyArray<URI>,
fileService: IFileService,
) {
super();
this.ready = new Promise((resolve, reject) => {
this._resolve = resolve;
this._reject = reject;
});
this._register(handle.onFirstLoad(contents => {
try {
registerFileProtocol(contents, WebviewResourceScheme, fileService, getExtensionLocation(), getLocalResourceRoots);
this._resolve();
} catch {
this._reject();
}
}));
}
}
class WebviewPortMappingProvider extends Disposable {
constructor(
session: WebviewSession,
getExtensionLocation: () => URI | undefined,
mappings: () => ReadonlyArray<modes.IWebviewPortMapping>,
tunnelService: ITunnelService,
) {
super();
const manager = this._register(new WebviewPortMappingManager(getExtensionLocation, mappings, tunnelService));
session.onBeforeRequest(async details => {
const redirect = await manager.getRedirect(details.url);
return redirect ? { redirectURL: redirect } : undefined;
});
}
}
class WebviewKeyboardHandler {
private readonly _webviews = new Set<WebviewTagHandle>();
private readonly _isUsingNativeTitleBars: boolean;
constructor(configurationService: IConfigurationService) {
this._isUsingNativeTitleBars = configurationService.getValue<string>('window.titleBarStyle') === 'native';
}
public add(
webviewHandle: WebviewTagHandle,
): IDisposable {
this._webviews.add(webviewHandle);
const disposables = new DisposableStore();
if (this.shouldToggleMenuShortcutsEnablement) {
disposables.add(webviewHandle.onFirstLoad(() => {
this.setIgnoreMenuShortcutsForWebview(webviewHandle, true);
}));
}
disposables.add(addDisposableListener(webviewHandle.webview, 'ipc-message', (event) => {
switch (event.channel) {
case 'did-focus':
this.setIgnoreMenuShortcuts(true);
break;
case 'did-blur':
this.setIgnoreMenuShortcuts(false);
return;
}
}));
return toDisposable(() => {
disposables.dispose();
this._webviews.delete(webviewHandle);
});
}
private get shouldToggleMenuShortcutsEnablement() {
return isMacintosh || this._isUsingNativeTitleBars;
}
|
}
private setIgnoreMenuShortcutsForWebview(webview: WebviewTagHandle, value: boolean) {
if (this.shouldToggleMenuShortcutsEnablement) {
const contents = webview.webContents;
if (!contents?.isDestroyed()) {
contents?.setIgnoreMenuShortcuts(value);
}
}
}
}
export class ElectronWebviewBasedWebview extends BaseWebview<WebviewTag> implements Webview, WebviewFindDelegate {
private static _webviewKeyboardHandler: WebviewKeyboardHandler | undefined;
private static getWebviewKeyboardHandler(configService: IConfigurationService) {
if (!this._webviewKeyboardHandler) {
this._webviewKeyboardHandler = new WebviewKeyboardHandler(configService);
}
return this._webviewKeyboardHandler;
}
private _webviewFindWidget: WebviewFindWidget | undefined;
private _findStarted: boolean = false;
public extension: WebviewExtensionDescription | undefined;
private readonly _protocolProvider: WebviewProtocolProvider;
private readonly _domReady: Promise<void>;
private readonly _focusDelayer = this._register(new ThrottledDelayer(10));
private _elementFocusImpl!: (options?: FocusOptions | undefined) => void;
constructor(
id: string,
options: WebviewOptions,
contentOptions: WebviewContentOptions,
private readonly _webviewThemeDataProvider: WebviewThemeDataProvider,
@IInstantiationService instantiationService: IInstantiationService,
@IFileService fileService: IFileService,
@ITunnelService tunnelService: ITunnelService,
@ITelemetryService telemetryService: ITelemetryService,
@IEnvironmentService environementService: IEnvironmentService,
@IWorkbenchEnvironmentService workbenchEnvironmentService: IWorkbenchEnvironmentService,
@IConfigurationService configurationService: IConfigurationService,
) {
super(id, options, contentOptions, _webviewThemeDataProvider, telemetryService, environementService, workbenchEnvironmentService);
const webviewAndContents = this._register(new WebviewTagHandle(this.element!));
const session = this._register(new WebviewSession(webviewAndContents));
this._protocolProvider = new WebviewProtocolProvider(webviewAndContents,
() => this.extension?.location,
() => (this.content.options.localResourceRoots || []),
fileService);
this._register(this._protocolProvider);
this._register(new WebviewPortMappingProvider(
session,
() => this.extension ? this.extension.location : undefined,
() => (this.content.options.portMapping || []),
tunnelService,
));
this._register(ElectronWebviewBasedWebview.getWebviewKeyboardHandler(configurationService).add(webviewAndContents));
this._domReady = new Promise(resolve => {
const subscription = this._register(this.on(WebviewMessageChannels.webviewReady, () => {
subscription.dispose();
resolve();
}));
});
this._register(addDisposableListener(this.element!, 'console-message', function (e: { level: number; message: string; line: number; sourceId: string; }) {
console.log(`[Embedded Page] ${e.message}`);
}));
this._register(addDisposableListener(this.element!, 'dom-ready', () => {
// Workaround for https://github.com/electron/electron/issues/14474
if (this.element && (this.focused || document.activeElement === this.element)) {
this.element.blur();
this.element.focus();
}
}));
this._register(addDisposableListener(this.element!, 'crashed', () => {
console.error('embedded page crashed');
}));
this._register(this.on('synthetic-mouse-event', (rawEvent: any) => {
if (!this.element) {
return;
}
const bounds = this.element.getBoundingClientRect();
try {
window.dispatchEvent(new MouseEvent(rawEvent.type, {
...rawEvent,
clientX: rawEvent.clientX + bounds.left,
clientY: rawEvent.clientY + bounds.top,
}));
return;
} catch {
// CustomEvent was treated as MouseEvent so don't do anything - https://github.com/microsoft/vscode/issues/78915
return;
}
}));
this._register(this.on('did-set-content', () => {
if (this.element) {
this.element.style.flex = '';
this.element.style.width = '100%';
this.element.style.height = '100%';
}
}));
this._register(addDisposableListener(this.element!, 'devtools-opened', () => {
this._send('devtools-opened');
}));
if (options.enableFindWidget) {
this._webviewFindWidget = this._register(instantiationService.createInstance(WebviewFindWidget, this));
this._register(addDisposableListener(this.element!, 'found-in-page', e => {
this._hasFindResult.fire(e.result.matches > 0);
}));
this.styledFindWidget();
}
}
protected createElement(options: WebviewOptions) {
const element = document.createElement('webview');
this._elementFocusImpl = element.focus.bind(element);
element.focus = () => {
this.doFocus();
};
element.setAttribute('partition', `webview${Date.now()}`);
element.setAttribute('webpreferences', 'contextIsolation=yes');
element.className = `webview ${options.customClasses || ''}`;
element.style.flex = '0 1';
element.style.width = '0';
element.style.height = '0';
element.style.outline = '0';
element.preload = require.toUrl('./pre/electron-index.js');
element.src = 'data:text/html;charset=utf-8,%3C%21DOCTYPE%20html%3E%0D%0A%3Chtml%20lang%3D%22en%22%20style%3D%22width%3A%20100%25%3B%20height%3A%20100%25%22%3E%0D%0A%3Chead%3E%0D%0A%3Ctitle%3EVirtual%20Document%3C%2Ftitle%3E%0D%0A%3C%2Fhead%3E%0D%0A%3Cbody%20style%3D%22margin%3A%200%3B%20overflow%3A%20hidden%3B%20width%3A%20100%25%3B%20height%3A%20100%25%22%20role%3D%22document%22%3E%0D%0A%3C%2Fbody%3E%0D%0A%3C%2Fhtml%3E';
return element;
}
protected readonly extraContentOptions = {};
public mountTo(parent: HTMLElement) {
if (!this.element) {
return;
}
if (this._webviewFindWidget) {
parent.appendChild(this._webviewFindWidget.getDomNode()!);
}
parent.appendChild(this.element);
}
protected async postMessage(channel: string, data?: any): Promise<void> {
await Promise.all([
this._protocolProvider.ready,
this._domReady,
]);
this.element?.send(channel, data);
}
public focus(): void {
this.doFocus();
// Handle focus change programmatically (do not rely on event from <webview>)
this.handleFocusChange(true);
}
private doFocus() {
if (!this.element) {
return;
}
// Workaround for https://github.com/microsoft/vscode/issues/75209
// Electron's webview.focus is async so for a sequence of actions such as:
//
// 1. Open webview
// 1. Show quick pick from command palette
//
// We end up focusing the webview after showing the quick pick, which causes
// the quick pick to instantly dismiss.
//
// Workarount this by debouncing the focus and making sure we are not focused on an input
// when we try to re-focus.
this._focusDelayer.trigger(async () => {
if (!this.focused || !this.element) {
return;
}
if (document.activeElement?.tagName === 'INPUT') {
return;
}
try {
this._elementFocusImpl();
} catch {
// noop
}
this._send('focus');
});
}
protected style(): void {
super.style();
this.styledFindWidget();
}
private styledFindWidget() {
this._webviewFindWidget?.updateTheme(this._webviewThemeDataProvider.getTheme());
}
private readonly _hasFindResult = this._register(new Emitter<boolean>());
public readonly hasFindResult: Event<boolean> = this._hasFindResult.event;
public startFind(value: string, options?: FindInPageOptions) {
if (!value || !this.element) {
return;
}
// ensure options is defined without modifying the original
options = options || {};
// FindNext must be false for a first request
const findOptions: FindInPageOptions = {
forward: options.forward,
findNext: false,
matchCase: options.matchCase,
medialCapitalAsWordStart: options.medialCapitalAsWordStart
};
this._findStarted = true;
this.element.findInPage(value, findOptions);
}
/**
* Webviews expose a stateful find API.
* Successive calls to find will move forward or backward through onFindResults
* depending on the supplied options.
*
* @param value The string to search for. Empty strings are ignored.
*/
public find(value: string, previous: boolean): void {
if (!this.element) {
return;
}
// Searching with an empty value will throw an exception
if (!value) {
return;
}
const options = { findNext: true, forward: !previous };
if (!this._findStarted) {
this.startFind(value, options);
return;
}
this.element.findInPage(value, options);
}
public stopFind(keepSelection?: boolean): void {
this._hasFindResult.fire(false);
if (!this.element) {
return;
}
this._findStarted = false;
this.element.stopFindInPage(keepSelection ? 'keepSelection' : 'clearSelection');
}
public showFind() {
this._webviewFindWidget?.reveal();
}
public hideFind() {
this._webviewFindWidget?.hide();
}
public runFindAction(previous: boolean) {
this._webviewFindWidget?.find(previous);
}
public selectAll() {
this.element?.selectAll();
}
public copy() {
this.element?.copy();
}
public paste() {
this.element?.paste();
}
public cut() {
this.element?.cut();
}
public undo() {
this.element?.undo();
}
public redo() {
this.element?.redo();
}
protected on<T = unknown>(channel: WebviewMessageChannels | string, handler: (data: T) => void): IDisposable {
if (!this.element) {
return Disposable.None;
}
return addDisposableListener(this.element, 'ipc-message', (event) => {
if (!this.element) {
return;
}
if (event.channel === channel && event.args && event.args.length) {
handler(event.args[0]);
}
});
}
}
|
private setIgnoreMenuShortcuts(value: boolean) {
for (const webview of this._webviews) {
this.setIgnoreMenuShortcutsForWebview(webview, value);
}
|
random_line_split
|
webviewElement.ts
|
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { FindInPageOptions, OnBeforeRequestListenerDetails, OnHeadersReceivedListenerDetails, Response, WebContents, WebviewTag } from 'electron';
import { addDisposableListener } from 'vs/base/browser/dom';
import { ThrottledDelayer } from 'vs/base/common/async';
import { Emitter, Event } from 'vs/base/common/event';
import { once } from 'vs/base/common/functional';
import { Disposable, DisposableStore, IDisposable, toDisposable } from 'vs/base/common/lifecycle';
import { isMacintosh } from 'vs/base/common/platform';
import { URI } from 'vs/base/common/uri';
import * as modes from 'vs/editor/common/modes';
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
import { IEnvironmentService } from 'vs/platform/environment/common/environment';
import { IFileService } from 'vs/platform/files/common/files';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { ITunnelService } from 'vs/platform/remote/common/tunnel';
import { ITelemetryService } from 'vs/platform/telemetry/common/telemetry';
import { BaseWebview, WebviewMessageChannels } from 'vs/workbench/contrib/webview/browser/baseWebviewElement';
import { Webview, WebviewContentOptions, WebviewExtensionDescription, WebviewOptions } from 'vs/workbench/contrib/webview/browser/webview';
import { WebviewPortMappingManager } from 'vs/workbench/contrib/webview/common/portMapping';
import { WebviewResourceScheme } from 'vs/workbench/contrib/webview/common/resourceLoader';
import { WebviewThemeDataProvider } from 'vs/workbench/contrib/webview/common/themeing';
import { registerFileProtocol } from 'vs/workbench/contrib/webview/electron-browser/webviewProtocols';
import { IWorkbenchEnvironmentService } from 'vs/workbench/services/environment/common/environmentService';
import { WebviewFindDelegate, WebviewFindWidget } from '../browser/webviewFindWidget';
class WebviewTagHandle extends Disposable {
private _webContents: undefined | WebContents | 'destroyed';
public constructor(
public readonly webview: WebviewTag,
) {
super();
this._register(addDisposableListener(this.webview, 'destroyed', () => {
this._webContents = 'destroyed';
}));
this._register(addDisposableListener(this.webview, 'did-start-loading', once(() => {
const contents = this.webContents;
if (contents) {
this._onFirstLoad.fire(contents);
this._register(toDisposable(() => {
contents.removeAllListeners();
}));
}
})));
}
private readonly _onFirstLoad = this._register(new Emitter<WebContents>());
public readonly onFirstLoad = this._onFirstLoad.event;
public get webContents(): WebContents | undefined {
if (this._webContents === 'destroyed') {
return undefined;
}
if (this._webContents) {
return this._webContents;
}
this._webContents = this.webview.getWebContents();
return this._webContents;
}
}
type OnBeforeRequestDelegate = (details: OnBeforeRequestListenerDetails) => Promise<Response | undefined>;
type OnHeadersReceivedDelegate = (details: OnHeadersReceivedListenerDetails) => { cancel: boolean; } | undefined;
class WebviewSession extends Disposable {
private readonly _onBeforeRequestDelegates: Array<OnBeforeRequestDelegate> = [];
private readonly _onHeadersReceivedDelegates: Array<OnHeadersReceivedDelegate> = [];
public constructor(
webviewHandle: WebviewTagHandle,
) {
super();
this._register(webviewHandle.onFirstLoad(contents => {
contents.session.webRequest.onBeforeRequest(async (details, callback) => {
for (const delegate of this._onBeforeRequestDelegates) {
const result = await delegate(details);
if (typeof result !== 'undefined') {
callback(result);
return;
}
}
callback({});
});
contents.session.webRequest.onHeadersReceived((details, callback) => {
for (const delegate of this._onHeadersReceivedDelegates) {
const result = delegate(details);
if (typeof result !== 'undefined') {
callback(result);
return;
}
}
callback({ cancel: false });
});
}));
}
public onBeforeRequest(delegate: OnBeforeRequestDelegate) {
this._onBeforeRequestDelegates.push(delegate);
}
public onHeadersReceived(delegate: OnHeadersReceivedDelegate) {
this._onHeadersReceivedDelegates.push(delegate);
}
}
class WebviewProtocolProvider extends Disposable {
private _resolve!: () => void;
private _reject!: () => void;
public readonly ready: Promise<void>;
constructor(
handle: WebviewTagHandle,
getExtensionLocation: () => URI | undefined,
getLocalResourceRoots: () => ReadonlyArray<URI>,
fileService: IFileService,
) {
super();
this.ready = new Promise((resolve, reject) => {
this._resolve = resolve;
this._reject = reject;
});
this._register(handle.onFirstLoad(contents => {
try {
registerFileProtocol(contents, WebviewResourceScheme, fileService, getExtensionLocation(), getLocalResourceRoots);
this._resolve();
} catch {
this._reject();
}
}));
}
}
class WebviewPortMappingProvider extends Disposable {
constructor(
session: WebviewSession,
getExtensionLocation: () => URI | undefined,
mappings: () => ReadonlyArray<modes.IWebviewPortMapping>,
tunnelService: ITunnelService,
) {
super();
const manager = this._register(new WebviewPortMappingManager(getExtensionLocation, mappings, tunnelService));
session.onBeforeRequest(async details => {
const redirect = await manager.getRedirect(details.url);
return redirect ? { redirectURL: redirect } : undefined;
});
}
}
class WebviewKeyboardHandler {
private readonly _webviews = new Set<WebviewTagHandle>();
private readonly _isUsingNativeTitleBars: boolean;
constructor(configurationService: IConfigurationService) {
this._isUsingNativeTitleBars = configurationService.getValue<string>('window.titleBarStyle') === 'native';
}
public add(
webviewHandle: WebviewTagHandle,
): IDisposable {
this._webviews.add(webviewHandle);
const disposables = new DisposableStore();
if (this.shouldToggleMenuShortcutsEnablement) {
disposables.add(webviewHandle.onFirstLoad(() => {
this.setIgnoreMenuShortcutsForWebview(webviewHandle, true);
}));
}
disposables.add(addDisposableListener(webviewHandle.webview, 'ipc-message', (event) => {
switch (event.channel) {
case 'did-focus':
this.setIgnoreMenuShortcuts(true);
break;
case 'did-blur':
this.setIgnoreMenuShortcuts(false);
return;
}
}));
return toDisposable(() => {
disposables.dispose();
this._webviews.delete(webviewHandle);
});
}
private get shouldToggleMenuShortcutsEnablement() {
return isMacintosh || this._isUsingNativeTitleBars;
}
private setIgnoreMenuShortcuts(value: boolean) {
for (const webview of this._webviews) {
this.setIgnoreMenuShortcutsForWebview(webview, value);
}
}
private setIgnoreMenuShortcutsForWebview(webview: WebviewTagHandle, value: boolean) {
if (this.shouldToggleMenuShortcutsEnablement) {
const contents = webview.webContents;
if (!contents?.isDestroyed()) {
contents?.setIgnoreMenuShortcuts(value);
}
}
}
}
export class ElectronWebviewBasedWebview extends BaseWebview<WebviewTag> implements Webview, WebviewFindDelegate {
private static _webviewKeyboardHandler: WebviewKeyboardHandler | undefined;
private static getWebviewKeyboardHandler(configService: IConfigurationService) {
if (!this._webviewKeyboardHandler) {
this._webviewKeyboardHandler = new WebviewKeyboardHandler(configService);
}
return this._webviewKeyboardHandler;
}
private _webviewFindWidget: WebviewFindWidget | undefined;
private _findStarted: boolean = false;
public extension: WebviewExtensionDescription | undefined;
private readonly _protocolProvider: WebviewProtocolProvider;
private readonly _domReady: Promise<void>;
private readonly _focusDelayer = this._register(new ThrottledDelayer(10));
private _elementFocusImpl!: (options?: FocusOptions | undefined) => void;
constructor(
id: string,
options: WebviewOptions,
contentOptions: WebviewContentOptions,
private readonly _webviewThemeDataProvider: WebviewThemeDataProvider,
@IInstantiationService instantiationService: IInstantiationService,
@IFileService fileService: IFileService,
@ITunnelService tunnelService: ITunnelService,
@ITelemetryService telemetryService: ITelemetryService,
@IEnvironmentService environementService: IEnvironmentService,
@IWorkbenchEnvironmentService workbenchEnvironmentService: IWorkbenchEnvironmentService,
@IConfigurationService configurationService: IConfigurationService,
) {
super(id, options, contentOptions, _webviewThemeDataProvider, telemetryService, environementService, workbenchEnvironmentService);
const webviewAndContents = this._register(new WebviewTagHandle(this.element!));
const session = this._register(new WebviewSession(webviewAndContents));
this._protocolProvider = new WebviewProtocolProvider(webviewAndContents,
() => this.extension?.location,
() => (this.content.options.localResourceRoots || []),
fileService);
this._register(this._protocolProvider);
this._register(new WebviewPortMappingProvider(
session,
() => this.extension ? this.extension.location : undefined,
() => (this.content.options.portMapping || []),
tunnelService,
));
this._register(ElectronWebviewBasedWebview.getWebviewKeyboardHandler(configurationService).add(webviewAndContents));
this._domReady = new Promise(resolve => {
const subscription = this._register(this.on(WebviewMessageChannels.webviewReady, () => {
subscription.dispose();
resolve();
}));
});
this._register(addDisposableListener(this.element!, 'console-message', function (e: { level: number; message: string; line: number; sourceId: string; }) {
console.log(`[Embedded Page] ${e.message}`);
}));
this._register(addDisposableListener(this.element!, 'dom-ready', () => {
// Workaround for https://github.com/electron/electron/issues/14474
if (this.element && (this.focused || document.activeElement === this.element)) {
this.element.blur();
this.element.focus();
}
}));
this._register(addDisposableListener(this.element!, 'crashed', () => {
console.error('embedded page crashed');
}));
this._register(this.on('synthetic-mouse-event', (rawEvent: any) => {
if (!this.element) {
return;
}
const bounds = this.element.getBoundingClientRect();
try {
window.dispatchEvent(new MouseEvent(rawEvent.type, {
...rawEvent,
clientX: rawEvent.clientX + bounds.left,
clientY: rawEvent.clientY + bounds.top,
}));
return;
} catch {
// CustomEvent was treated as MouseEvent so don't do anything - https://github.com/microsoft/vscode/issues/78915
return;
}
}));
this._register(this.on('did-set-content', () => {
if (this.element) {
this.element.style.flex = '';
this.element.style.width = '100%';
this.element.style.height = '100%';
}
}));
this._register(addDisposableListener(this.element!, 'devtools-opened', () => {
this._send('devtools-opened');
}));
if (options.enableFindWidget) {
this._webviewFindWidget = this._register(instantiationService.createInstance(WebviewFindWidget, this));
this._register(addDisposableListener(this.element!, 'found-in-page', e => {
this._hasFindResult.fire(e.result.matches > 0);
}));
this.styledFindWidget();
}
}
protected createElement(options: WebviewOptions) {
const element = document.createElement('webview');
this._elementFocusImpl = element.focus.bind(element);
element.focus = () => {
this.doFocus();
};
element.setAttribute('partition', `webview${Date.now()}`);
element.setAttribute('webpreferences', 'contextIsolation=yes');
element.className = `webview ${options.customClasses || ''}`;
element.style.flex = '0 1';
element.style.width = '0';
element.style.height = '0';
element.style.outline = '0';
element.preload = require.toUrl('./pre/electron-index.js');
element.src = 'data:text/html;charset=utf-8,%3C%21DOCTYPE%20html%3E%0D%0A%3Chtml%20lang%3D%22en%22%20style%3D%22width%3A%20100%25%3B%20height%3A%20100%25%22%3E%0D%0A%3Chead%3E%0D%0A%3Ctitle%3EVirtual%20Document%3C%2Ftitle%3E%0D%0A%3C%2Fhead%3E%0D%0A%3Cbody%20style%3D%22margin%3A%200%3B%20overflow%3A%20hidden%3B%20width%3A%20100%25%3B%20height%3A%20100%25%22%20role%3D%22document%22%3E%0D%0A%3C%2Fbody%3E%0D%0A%3C%2Fhtml%3E';
return element;
}
protected readonly extraContentOptions = {};
public mountTo(parent: HTMLElement) {
if (!this.element) {
return;
}
if (this._webviewFindWidget) {
parent.appendChild(this._webviewFindWidget.getDomNode()!);
}
parent.appendChild(this.element);
}
protected async postMessage(channel: string, data?: any): Promise<void> {
await Promise.all([
this._protocolProvider.ready,
this._domReady,
]);
this.element?.send(channel, data);
}
public focus(): void {
this.doFocus();
// Handle focus change programmatically (do not rely on event from <webview>)
this.handleFocusChange(true);
}
private doFocus() {
if (!this.element) {
return;
}
// Workaround for https://github.com/microsoft/vscode/issues/75209
// Electron's webview.focus is async so for a sequence of actions such as:
//
// 1. Open webview
// 1. Show quick pick from command palette
//
// We end up focusing the webview after showing the quick pick, which causes
// the quick pick to instantly dismiss.
//
// Workarount this by debouncing the focus and making sure we are not focused on an input
// when we try to re-focus.
this._focusDelayer.trigger(async () => {
if (!this.focused || !this.element) {
return;
}
if (document.activeElement?.tagName === 'INPUT') {
return;
}
try {
this._elementFocusImpl();
} catch {
// noop
}
this._send('focus');
});
}
protected style(): void {
super.style();
this.styledFindWidget();
}
private styledFindWidget() {
this._webviewFindWidget?.updateTheme(this._webviewThemeDataProvider.getTheme());
}
private readonly _hasFindResult = this._register(new Emitter<boolean>());
public readonly hasFindResult: Event<boolean> = this._hasFindResult.event;
public startFind(value: string, options?: FindInPageOptions) {
if (!value || !this.element) {
return;
}
// ensure options is defined without modifying the original
options = options || {};
// FindNext must be false for a first request
const findOptions: FindInPageOptions = {
forward: options.forward,
findNext: false,
matchCase: options.matchCase,
medialCapitalAsWordStart: options.medialCapitalAsWordStart
};
this._findStarted = true;
this.element.findInPage(value, findOptions);
}
/**
* Webviews expose a stateful find API.
* Successive calls to find will move forward or backward through onFindResults
* depending on the supplied options.
*
* @param value The string to search for. Empty strings are ignored.
*/
public find(value: string, previous: boolean): void {
if (!this.element) {
return;
}
// Searching with an empty value will throw an exception
if (!value) {
return;
}
const options = { findNext: true, forward: !previous };
if (!this._findStarted) {
this.startFind(value, options);
return;
}
this.element.findInPage(value, options);
}
public stopFind(keepSelection?: boolean): void {
this._hasFindResult.fire(false);
if (!this.element) {
return;
}
this._findStarted = false;
this.element.stopFindInPage(keepSelection ? 'keepSelection' : 'clearSelection');
}
public showFind() {
this._webviewFindWidget?.reveal();
}
public hideFind() {
this._webviewFindWidget?.hide();
}
public runFindAction(previous: boolean) {
this._webviewFindWidget?.find(previous);
}
public selectAll() {
this.element?.selectAll();
}
public copy() {
this.element?.copy();
}
public paste() {
this.element?.paste();
}
public
|
() {
this.element?.cut();
}
public undo() {
this.element?.undo();
}
public redo() {
this.element?.redo();
}
protected on<T = unknown>(channel: WebviewMessageChannels | string, handler: (data: T) => void): IDisposable {
if (!this.element) {
return Disposable.None;
}
return addDisposableListener(this.element, 'ipc-message', (event) => {
if (!this.element) {
return;
}
if (event.channel === channel && event.args && event.args.length) {
handler(event.args[0]);
}
});
}
}
|
cut
|
identifier_name
|
test_securecookies.py
|
#coding: utf-8
import unittest
import bottle
from bottle import tob, touni
class TestSecureCookies(unittest.TestCase):
def setUp(self):
self.data = dict(a=5, b=touni('υηι¢σ∂є'), c=[1,2,3,4,tob('bytestring')])
self.key = tob('secret')
def testDeEncode(self):
cookie = bottle.cookie_encode(self.data, self.key)
decoded = bottle.cookie_decode(cookie, self.key)
self.assertEqual(self.data, decoded)
decoded = bottle.cookie_decode(cookie+tob('x'), self.key)
self.assertEqual(None, decoded)
def testIsEncoded(self):
cookie = bottle.cookie_encode(self.data, self.key)
self.assertTrue(bottle.cookie_is_encoded(cookie))
self.assertFalse(bottle.cookie_is_encoded(tob('some string')))
class TestSecureCookiesInBottle(unittest.TestCase):
def setUp(self):
self.data = dict(a=5, b=touni('υηι¢σ∂є'), c=[1,2,3,4,tob('bytestring')])
self.secret = tob('secret')
bottle.app.push()
bottle.response.bind()
def tear_down(self):
bottle.app.pop()
def get_pairs(self):
for k, v in bottle.response.headerlist:
if k == 'Set-Cookie':
key, value = v.split(';')[0].split('=', 1)
yield key.lower().strip(), value.strip()
def set_pairs(self, pairs):
header = ','.join(['%s=%s' % (k, v) for k, v in pairs])
bottle.request.bind({'HTTP_COOKIE': header})
def testValid(self):
bottle.response.set_cookie('key', self.data, secret=self.secret)
pairs = self.get_pairs()
self.set_pairs(pairs)
result = bottle.request.get_cookie('key', secret=self.secret)
self.assertEqual(self.data, result)
|
pairs = self.get_pairs()
self.set_pairs([(k+'xxx', v) for (k, v) in pairs])
result = bottle.request.get_cookie('key', secret=self.secret)
self.assertEqual(None, result)
if __name__ == '__main__': #pragma: no cover
unittest.main()
|
def testWrongKey(self):
bottle.response.set_cookie('key', self.data, secret=self.secret)
|
random_line_split
|
test_securecookies.py
|
#coding: utf-8
import unittest
import bottle
from bottle import tob, touni
class TestSecureCookies(unittest.TestCase):
def setUp(self):
self.data = dict(a=5, b=touni('υηι¢σ∂є'), c=[1,2,3,4,tob('bytestring')])
self.key = tob('secret')
def testDeEncode(self):
cookie = bottle.cookie_encode(self.data, self.key)
decoded = bottle.cookie_decode(cookie, self.key)
self.assertEqual(self.data, decoded)
decoded = bottle.cookie_decode(cookie+tob('x'), self.key)
self.assertEqual(None, decoded)
def testIsEncoded(self):
cookie = bottle.cookie_encode(self.data, self.key)
self.assertTrue(bottle.cookie_is_encoded(cookie))
self.assertFalse(bottle.cookie_is_encoded(tob('some string')))
class TestSecureCookiesInBottle(unittest.TestCase):
def setUp(self):
self.dat
|
own(self):
bottle.app.pop()
def get_pairs(self):
for k, v in bottle.response.headerlist:
if k == 'Set-Cookie':
key, value = v.split(';')[0].split('=', 1)
yield key.lower().strip(), value.strip()
def set_pairs(self, pairs):
header = ','.join(['%s=%s' % (k, v) for k, v in pairs])
bottle.request.bind({'HTTP_COOKIE': header})
def testValid(self):
bottle.response.set_cookie('key', self.data, secret=self.secret)
pairs = self.get_pairs()
self.set_pairs(pairs)
result = bottle.request.get_cookie('key', secret=self.secret)
self.assertEqual(self.data, result)
def testWrongKey(self):
bottle.response.set_cookie('key', self.data, secret=self.secret)
pairs = self.get_pairs()
self.set_pairs([(k+'xxx', v) for (k, v) in pairs])
result = bottle.request.get_cookie('key', secret=self.secret)
self.assertEqual(None, result)
if __name__ == '__main__': #pragma: no cover
unittest.main()
|
a = dict(a=5, b=touni('υηι¢σ∂є'), c=[1,2,3,4,tob('bytestring')])
self.secret = tob('secret')
bottle.app.push()
bottle.response.bind()
def tear_d
|
identifier_body
|
test_securecookies.py
|
#coding: utf-8
import unittest
import bottle
from bottle import tob, touni
class TestSecureCookies(unittest.TestCase):
def setUp(self):
self.data = dict(a=5, b=touni('υηι¢σ∂є'), c=[1,2,3,4,tob('bytestring')])
self.key = tob('secret')
def testDeEncode(self):
cookie = bottle.cookie_encode(self.data, self.key)
decoded = bottle.cookie_decode(cookie, self.key)
self.assertEqual(self.data, decoded)
decoded = bottle.cookie_decode(cookie+tob('x'), self.key)
self.assertEqual(None, decoded)
def testIsEncoded(self):
cookie = bottle.cookie_encode(self.data, self.key)
self.assertTrue(bottle.cookie_is_encoded(cookie))
self.assertFalse(bottle.cookie_is_encoded(tob('some string')))
class TestSecureCookiesInBottle(unittest.TestCase):
def setUp(self):
self.data = dict(a=5, b=touni('υηι¢σ∂є'), c=[1,2,3,4,tob('bytestring')])
self.secret = tob('secret')
bottle.app.push()
bottle.response.bind()
def tear_down(self):
|
bottle.app.pop()
def get_pairs(self):
for k, v in bottle.response.headerlist:
if k == 'Set-Cookie':
key, value = v.split(';')[0].split('=', 1)
yield key.lower().strip(), value.strip()
def set_pairs(self, pairs):
header = ','.join(['%s=%s' % (k, v) for k, v in pairs])
bottle.request.bind({'HTTP_COOKIE': header})
def testValid(self):
bottle.response.set_cookie('key', self.data, secret=self.secret)
pairs = self.get_pairs()
self.set_pairs(pairs)
result = bottle.request.get_cookie('key', secret=self.secret)
self.assertEqual(self.data, result)
def testWrongKey(self):
bottle.response.set_cookie('key', self.data, secret=self.secret)
pairs = self.get_pairs()
self.set_pairs([(k+'xxx', v) for (k, v) in pairs])
result = bottle.request.get_cookie('key', secret=self.secret)
self.assertEqual(None, result)
if __name__ == '__main__': #pragma: no cover
unittest.main()
|
identifier_name
|
|
test_securecookies.py
|
#coding: utf-8
import unittest
import bottle
from bottle import tob, touni
class TestSecureCookies(unittest.TestCase):
def setUp(self):
self.data = dict(a=5, b=touni('υηι¢σ∂є'), c=[1,2,3,4,tob('bytestring')])
self.key = tob('secret')
def testDeEncode(self):
cookie = bottle.cookie_encode(self.data, self.key)
decoded = bottle.cookie_decode(cookie, self.key)
self.assertEqual(self.data, decoded)
decoded = bottle.cookie_decode(cookie+tob('x'), self.key)
self.assertEqual(None, decoded)
def testIsEncoded(self):
cookie = bottle.cookie_encode(self.data, self.key)
self.assertTrue(bottle.cookie_is_encoded(cookie))
self.assertFalse(bottle.cookie_is_encoded(tob('some string')))
class TestSecureCookiesInBottle(unittest.TestCase):
def setUp(self):
self.data = dict(a=5, b=touni('υηι¢σ∂є'), c=[1,2,3,4,tob('bytestring')])
self.secret = tob('secret')
bottle.app.push()
bottle.response.bind()
def tear_down(self):
bottle.app.pop()
def get_pairs(self):
for k, v in bottle.response.headerlist:
if k == 'Set-Cookie':
key, value = v.s
|
t_pairs(self, pairs):
header = ','.join(['%s=%s' % (k, v) for k, v in pairs])
bottle.request.bind({'HTTP_COOKIE': header})
def testValid(self):
bottle.response.set_cookie('key', self.data, secret=self.secret)
pairs = self.get_pairs()
self.set_pairs(pairs)
result = bottle.request.get_cookie('key', secret=self.secret)
self.assertEqual(self.data, result)
def testWrongKey(self):
bottle.response.set_cookie('key', self.data, secret=self.secret)
pairs = self.get_pairs()
self.set_pairs([(k+'xxx', v) for (k, v) in pairs])
result = bottle.request.get_cookie('key', secret=self.secret)
self.assertEqual(None, result)
if __name__ == '__main__': #pragma: no cover
unittest.main()
|
plit(';')[0].split('=', 1)
yield key.lower().strip(), value.strip()
def se
|
conditional_block
|
settings_base.py
|
"""private_base will be populated from puppet and placed in this directory"""
import logging
import os
import dj_database_url
from lib.settings_base import (
CACHE_PREFIX, ES_INDEXES, KNOWN_PROXIES, LOGGING, CSP_SCRIPT_SRC,
CSP_FRAME_SRC)
from .. import splitstrip
import private_base as private
ENGAGE_ROBOTS = False
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = private.EMAIL_HOST
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DEBUG_PROPAGATE_EXCEPTIONS = False
SESSION_COOKIE_SECURE = True
REDIRECT_SECRET_KEY = private.REDIRECT_SECRET_KEY
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'mysql_pool'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['slave'] = dj_database_url.parse(private.DATABASES_SLAVE_URL)
DATABASES['slave']['ENGINE'] = 'mysql_pool'
DATABASES['slave']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
SERVICES_DATABASE = dj_database_url.parse(private.SERVICES_DATABASE_URL)
DATABASE_POOL_ARGS = {
'max_overflow': 10,
'pool_size': 5,
'recycle': 30
}
SLAVE_DATABASES = ['slave']
CACHES = {
'default': {
'BACKEND': 'caching.backends.memcached.CacheClass',
#'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
#'BACKEND': 'memcachepool.cache.UMemcacheCache',
'LOCATION': splitstrip(private.CACHES_DEFAULT_LOCATION),
'TIMEOUT': 500,
'KEY_PREFIX': CACHE_PREFIX,
},
}
SECRET_KEY = private.SECRET_KEY
LOG_LEVEL = logging.DEBUG
# Celery
BROKER_URL = private.BROKER_URL
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
CELERYD_PREFETCH_MULTIPLIER = 1
NETAPP_STORAGE = private.NETAPP_STORAGE_ROOT + '/shared_storage'
MIRROR_STAGE_PATH = private.NETAPP_STORAGE_ROOT + '/public-staging'
GUARDED_ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/guarded-addons'
UPLOADS_PATH = NETAPP_STORAGE + '/uploads'
USERPICS_PATH = UPLOADS_PATH + '/userpics'
ADDON_ICONS_PATH = UPLOADS_PATH + '/addon_icons'
COLLECTION_ICONS_PATH = UPLOADS_PATH + '/collection_icons'
IMAGEASSETS_PATH = UPLOADS_PATH + '/imageassets'
REVIEWER_ATTACHMENTS_PATH = UPLOADS_PATH + '/reviewer_attachment'
PREVIEWS_PATH = UPLOADS_PATH + '/previews'
SIGNED_APPS_PATH = NETAPP_STORAGE + '/signed_apps'
SIGNED_APPS_REVIEWER_PATH = NETAPP_STORAGE + '/signed_apps_reviewer'
PREVIEW_THUMBNAIL_PATH = PREVIEWS_PATH + '/thumbs/%s/%d.png'
PREVIEW_FULL_PATH = PREVIEWS_PATH + '/full/%s/%d.%s'
HERA = []
LOGGING['loggers'].update({
'z.task': {'level': logging.DEBUG},
'z.hera': {'level': logging.INFO},
'z.redis': {'level': logging.DEBUG},
'z.pool': {'level': logging.ERROR},
})
REDIS_BACKEND = private.REDIS_BACKENDS_CACHE
REDIS_BACKENDS = {
'cache': private.REDIS_BACKENDS_CACHE,
'cache_slave': private.REDIS_BACKENDS_CACHE_SLAVE,
'master': private.REDIS_BACKENDS_MASTER,
'slave': private.REDIS_BACKENDS_SLAVE,
}
CACHE_MACHINE_USE_REDIS = True
RECAPTCHA_PUBLIC_KEY = private.RECAPTCHA_PUBLIC_KEY
RECAPTCHA_PRIVATE_KEY = private.RECAPTCHA_PRIVATE_KEY
RECAPTCHA_URL = (
'https://www.google.com/recaptcha/api/challenge?k=%s' %
|
ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/files'
PERF_THRESHOLD = 20
SPIDERMONKEY = '/usr/bin/tracemonkey'
# Remove DetectMobileMiddleware from middleware in production.
detect = 'mobility.middleware.DetectMobileMiddleware'
csp = 'csp.middleware.CSPMiddleware'
RESPONSYS_ID = private.RESPONSYS_ID
CRONJOB_LOCK_PREFIX = 'marketplace-identity-stage'
BUILDER_SECRET_KEY = private.BUILDER_SECRET_KEY
BUILDER_VERSIONS_URL = (
"https://builder-addons.allizom.org/repackage/sdk-versions/")
ES_HOSTS = splitstrip(private.ES_HOSTS)
ES_URLS = ['http://%s' % h for h in ES_HOSTS]
ES_INDEXES = dict((k, '%s_identity_stage' % v) for k, v in ES_INDEXES.items())
BUILDER_UPGRADE_URL = "https://builder-addons.allizom.org/repackage/rebuild/"
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
GRAPHITE_HOST = private.GRAPHITE_HOST
GRAPHITE_PORT = private.GRAPHITE_PORT
GRAPHITE_PREFIX = private.GRAPHITE_PREFIX
CEF_PRODUCT = STATSD_PREFIX
ES_TIMEOUT = 60
EXPOSE_VALIDATOR_TRACEBACKS = True
KNOWN_PROXIES += ['10.2.83.105',
'10.2.83.106',
'10.2.83.107',
'10.8.83.200',
'10.8.83.201',
'10.8.83.202',
'10.8.83.203',
'10.8.83.204',
'10.8.83.210',
'10.8.83.211',
'10.8.83.212',
'10.8.83.213',
'10.8.83.214',
'10.8.83.215',
'10.8.83.251',
'10.8.83.252',
'10.8.83.253',
]
NEW_FEATURES = True
PERF_TEST_URL = (
'http://talos-addon-master1.amotest.scl1.mozilla.com/trigger/trigger.cgi')
REDIRECT_URL = 'https://outgoing.allizom.org/v1/'
CLEANCSS_BIN = 'cleancss'
UGLIFY_BIN = 'uglifyjs'
CELERYD_TASK_SOFT_TIME_LIMIT = 240
LESS_PREPROCESS = True
XSENDFILE_HEADER = 'X-Accel-Redirect'
ALLOW_SELF_REVIEWS = True
GEOIP_URL = 'http://geo.marketplace.allizom.org'
API_THROTTLE = False
CSP_SCRIPT_SRC = CSP_SCRIPT_SRC + ("https://firefoxos.anosrep.org",)
CSP_FRAME_SRC = CSP_FRAME_SRC + ("https://firefoxos.anosrep.org",)
AES_KEYS = private.AES_KEYS
|
RECAPTCHA_PUBLIC_KEY)
TMP_PATH = os.path.join(NETAPP_STORAGE, 'tmp')
PACKAGER_PATH = os.path.join(TMP_PATH, 'packager')
|
random_line_split
|
htmldataelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLDataElementBinding;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::htmlelement::HTMLElement;
use dom::node::Node;
use util::str::DOMString;
#[dom_struct]
pub struct HTMLDataElement {
htmlelement: HTMLElement
}
impl HTMLDataElement {
fn new_inherited(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> HTMLDataElement {
HTMLDataElement {
htmlelement: HTMLElement::new_inherited(localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLDataElement>
|
}
|
{
let element = HTMLDataElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLDataElementBinding::Wrap)
}
|
identifier_body
|
htmldataelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLDataElementBinding;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::htmlelement::HTMLElement;
use dom::node::Node;
use util::str::DOMString;
#[dom_struct]
pub struct HTMLDataElement {
htmlelement: HTMLElement
}
impl HTMLDataElement {
fn
|
(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> HTMLDataElement {
HTMLDataElement {
htmlelement: HTMLElement::new_inherited(localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLDataElement> {
let element = HTMLDataElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLDataElementBinding::Wrap)
}
}
|
new_inherited
|
identifier_name
|
htmldataelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLDataElementBinding;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::htmlelement::HTMLElement;
use dom::node::Node;
use util::str::DOMString;
#[dom_struct]
|
htmlelement: HTMLElement
}
impl HTMLDataElement {
fn new_inherited(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> HTMLDataElement {
HTMLDataElement {
htmlelement: HTMLElement::new_inherited(localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLDataElement> {
let element = HTMLDataElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLDataElementBinding::Wrap)
}
}
|
pub struct HTMLDataElement {
|
random_line_split
|
get.js
|
JsonRecordApp.contorller(getCtrl, ['$scope', '$http', function($scope, $http){
var testArray = new Array();
var errorChar = 1;
for (var i = 10001; i < 10101; i++) {
var query = jQuery.ajax({
url: jsonUrl + jsonName + i + ".json",
type: "GET",
async: false,
dataType: 'json',
success: function(result){
testArray.push(result);
$scope.jsonData = testArray;
}
});
if (query.status=="404")
|
// $http.get(jsonUrl + jsonName + i + ".json").then(function(data){
// testArray.push(data.data);
// $scope.jsonData = testArray;
// console.log(testArray);
// }).catch(function(){
// errorChar = 0;
// });
}
var testJson ={
"id" : 100010,
"Name" : "testsName"
}
// var postQuery=$http.post("http://localhost:8080" + jsonUrl + jsonName + "10001.json", testJson).then(function(data){
// // var postQuery=$http.post(jsonUrl + jsonName + "test.json", testJson).then(function(data){
// console.log(data);
// }).catch(err => console.log(err));
jQuery.ajax({
url: jsonUrl + jsonName + "10001.json",
type: "POST",
context: testJson,
contentType: "application/json",
async: false,
dataType: 'json',
success: function(result){
console.log(result);
}
})
}]);
|
{ break;}
|
conditional_block
|
get.js
|
JsonRecordApp.contorller(getCtrl, ['$scope', '$http', function($scope, $http){
var testArray = new Array();
var errorChar = 1;
for (var i = 10001; i < 10101; i++) {
var query = jQuery.ajax({
url: jsonUrl + jsonName + i + ".json",
type: "GET",
async: false,
dataType: 'json',
success: function(result){
testArray.push(result);
$scope.jsonData = testArray;
}
});
if (query.status=="404") { break;}
// $http.get(jsonUrl + jsonName + i + ".json").then(function(data){
// testArray.push(data.data);
// $scope.jsonData = testArray;
// console.log(testArray);
// }).catch(function(){
// errorChar = 0;
// });
}
var testJson ={
"id" : 100010,
"Name" : "testsName"
}
// var postQuery=$http.post("http://localhost:8080" + jsonUrl + jsonName + "10001.json", testJson).then(function(data){
// // var postQuery=$http.post(jsonUrl + jsonName + "test.json", testJson).then(function(data){
// console.log(data);
// }).catch(err => console.log(err));
jQuery.ajax({
url: jsonUrl + jsonName + "10001.json",
type: "POST",
context: testJson,
|
contentType: "application/json",
async: false,
dataType: 'json',
success: function(result){
console.log(result);
}
})
}]);
|
random_line_split
|
|
usage.js
|
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
const models = require('./index');
/**
* Describes network resource usage.
*
*/
class Usage {
/**
* Create a Usage.
* @member {string} [id] Resource identifier.
* @member {number} currentValue The current value of the usage.
* @member {number} limit The limit of usage.
* @member {object} name The name of the type of usage.
* @member {string} [name.value] A string describing the resource name.
* @member {string} [name.localizedValue] A localized string describing the
* resource name.
*/
constructor() {
}
/**
* Defines the metadata of Usage
*
* @returns {object} metadata of Usage
*
*/
mapper() {
return {
required: false,
serializedName: 'Usage',
type: {
name: 'Composite',
className: 'Usage',
modelProperties: {
id: {
required: false,
readOnly: true,
serializedName: 'id',
type: {
name: 'String'
}
},
unit: {
required: true,
isConstant: true,
serializedName: 'unit',
defaultValue: 'Count',
type: {
name: 'String'
}
},
currentValue: {
required: true,
serializedName: 'currentValue',
type: {
name: 'Number'
}
},
limit: {
required: true,
serializedName: 'limit',
type: {
name: 'Number'
}
},
|
serializedName: 'name',
type: {
name: 'Composite',
className: 'UsageName'
}
}
}
}
};
}
}
module.exports = Usage;
|
name: {
required: true,
|
random_line_split
|
usage.js
|
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
const models = require('./index');
/**
* Describes network resource usage.
*
*/
class
|
{
/**
* Create a Usage.
* @member {string} [id] Resource identifier.
* @member {number} currentValue The current value of the usage.
* @member {number} limit The limit of usage.
* @member {object} name The name of the type of usage.
* @member {string} [name.value] A string describing the resource name.
* @member {string} [name.localizedValue] A localized string describing the
* resource name.
*/
constructor() {
}
/**
* Defines the metadata of Usage
*
* @returns {object} metadata of Usage
*
*/
mapper() {
return {
required: false,
serializedName: 'Usage',
type: {
name: 'Composite',
className: 'Usage',
modelProperties: {
id: {
required: false,
readOnly: true,
serializedName: 'id',
type: {
name: 'String'
}
},
unit: {
required: true,
isConstant: true,
serializedName: 'unit',
defaultValue: 'Count',
type: {
name: 'String'
}
},
currentValue: {
required: true,
serializedName: 'currentValue',
type: {
name: 'Number'
}
},
limit: {
required: true,
serializedName: 'limit',
type: {
name: 'Number'
}
},
name: {
required: true,
serializedName: 'name',
type: {
name: 'Composite',
className: 'UsageName'
}
}
}
}
};
}
}
module.exports = Usage;
|
Usage
|
identifier_name
|
indexOf.js
|
/**
* Lo-Dash 2.4.1 (Custom Build) <http://lodash.com/>
* Build: `lodash modularize exports="amd" -o ./compat/`
* Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/>
* Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE>
* Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
* Available under MIT license <http://lodash.com/license>
*/
define(['../internals/baseIndexOf', './sortedIndex'], function(baseIndexOf, sortedIndex) {
/* Native method shortcuts for methods with the same name as other `lodash` methods */
var nativeMax = Math.max;
/**
* Gets the index at which the first occurrence of `value` is found using
* strict equality for comparisons, i.e. `===`. If the array is already sorted
* providing `true` for `fromIndex` will run a faster binary search.
*
* @static
* @memberOf _
* @category Arrays
* @param {Array} array The array to search.
* @param {*} value The value to search for.
* @param {boolean|number} [fromIndex=0] The index to search from or `true`
* to perform a binary search on a sorted array.
* @returns {number} Returns the index of the matched value or `-1`.
* @example
*
* _.indexOf([1, 2, 3, 1, 2, 3], 2);
* // => 1
*
* _.indexOf([1, 2, 3, 1, 2, 3], 2, 3);
* // => 4
*
* _.indexOf([1, 1, 2, 2, 3, 3], 2, true);
* // => 2
*/
function indexOf(array, value, fromIndex)
|
return indexOf;
});
|
{
if (typeof fromIndex == 'number') {
var length = array ? array.length : 0;
fromIndex = (fromIndex < 0 ? nativeMax(0, length + fromIndex) : fromIndex || 0);
} else if (fromIndex) {
var index = sortedIndex(array, value);
return array[index] === value ? index : -1;
}
return baseIndexOf(array, value, fromIndex);
}
|
identifier_body
|
indexOf.js
|
/**
* Lo-Dash 2.4.1 (Custom Build) <http://lodash.com/>
* Build: `lodash modularize exports="amd" -o ./compat/`
* Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/>
* Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE>
* Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
* Available under MIT license <http://lodash.com/license>
*/
define(['../internals/baseIndexOf', './sortedIndex'], function(baseIndexOf, sortedIndex) {
/* Native method shortcuts for methods with the same name as other `lodash` methods */
var nativeMax = Math.max;
/**
* Gets the index at which the first occurrence of `value` is found using
* strict equality for comparisons, i.e. `===`. If the array is already sorted
* providing `true` for `fromIndex` will run a faster binary search.
*
* @static
* @memberOf _
* @category Arrays
* @param {Array} array The array to search.
* @param {*} value The value to search for.
* @param {boolean|number} [fromIndex=0] The index to search from or `true`
* to perform a binary search on a sorted array.
* @returns {number} Returns the index of the matched value or `-1`.
* @example
*
* _.indexOf([1, 2, 3, 1, 2, 3], 2);
* // => 1
*
* _.indexOf([1, 2, 3, 1, 2, 3], 2, 3);
* // => 4
*
* _.indexOf([1, 1, 2, 2, 3, 3], 2, true);
* // => 2
*/
function
|
(array, value, fromIndex) {
if (typeof fromIndex == 'number') {
var length = array ? array.length : 0;
fromIndex = (fromIndex < 0 ? nativeMax(0, length + fromIndex) : fromIndex || 0);
} else if (fromIndex) {
var index = sortedIndex(array, value);
return array[index] === value ? index : -1;
}
return baseIndexOf(array, value, fromIndex);
}
return indexOf;
});
|
indexOf
|
identifier_name
|
indexOf.js
|
/**
* Lo-Dash 2.4.1 (Custom Build) <http://lodash.com/>
* Build: `lodash modularize exports="amd" -o ./compat/`
* Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/>
* Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE>
* Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
* Available under MIT license <http://lodash.com/license>
*/
define(['../internals/baseIndexOf', './sortedIndex'], function(baseIndexOf, sortedIndex) {
/* Native method shortcuts for methods with the same name as other `lodash` methods */
var nativeMax = Math.max;
/**
* Gets the index at which the first occurrence of `value` is found using
|
* @category Arrays
* @param {Array} array The array to search.
* @param {*} value The value to search for.
* @param {boolean|number} [fromIndex=0] The index to search from or `true`
* to perform a binary search on a sorted array.
* @returns {number} Returns the index of the matched value or `-1`.
* @example
*
* _.indexOf([1, 2, 3, 1, 2, 3], 2);
* // => 1
*
* _.indexOf([1, 2, 3, 1, 2, 3], 2, 3);
* // => 4
*
* _.indexOf([1, 1, 2, 2, 3, 3], 2, true);
* // => 2
*/
function indexOf(array, value, fromIndex) {
if (typeof fromIndex == 'number') {
var length = array ? array.length : 0;
fromIndex = (fromIndex < 0 ? nativeMax(0, length + fromIndex) : fromIndex || 0);
} else if (fromIndex) {
var index = sortedIndex(array, value);
return array[index] === value ? index : -1;
}
return baseIndexOf(array, value, fromIndex);
}
return indexOf;
});
|
* strict equality for comparisons, i.e. `===`. If the array is already sorted
* providing `true` for `fromIndex` will run a faster binary search.
*
* @static
* @memberOf _
|
random_line_split
|
transport.rs
|
use std::io::{self, Read, Write};
use std::net::{Shutdown, SocketAddr};
use std::time::Duration;
use std::u32;
use byteorder::{BigEndian, ByteOrder};
use bytes::{BufMut, Bytes, BytesMut, IntoBuf};
use futures::{Async, Future, Poll};
use prost::{decode_length_delimiter, length_delimiter_len, Message};
use tokio::net::{ConnectFuture, TcpStream};
use pb::rpc::{ErrorStatusPb, RemoteMethodPb, RequestHeader, ResponseHeader};
use Error;
use Options;
use RequestBody;
use RpcError;
use RpcErrorCode;
const INITIAL_CAPACITY: usize = 8 * 1024;
const BACKPRESSURE_BOUNDARY: usize = INITIAL_CAPACITY;
pub type TransportResponse = (Bytes, Vec<BytesMut>);
/// `Transport` handles sending and receiving raw KRPC messages to a TCP stream.
///
/// The transport manages send and receive buffers, encoding and decoding of messages, message
/// framing, headers, and RPC errors.
///
/// The transport wraps a single TCP connection. When the TCP connection is shutdown or fails, the
/// transport should no longer be used. TCP connection shutdown is indicated by a fatal error being
/// returned from `poll_ready()`, `send()`, or `poll()`.
pub(crate) struct Transport {
addr: SocketAddr,
options: Options,
stream: TcpStream,
send_buf: BytesMut,
recv_buf: BytesMut,
request_header: RequestHeader,
response_header: ResponseHeader,
}
impl Transport {
/// Returns a future which will yield a new transport.
pub fn connect(addr: SocketAddr, options: Options) -> TransportNew {
let connect = TcpStream::connect(&addr);
TransportNew {
addr,
options,
connect,
}
}
/// Returns `true` if the transport is ready to send an RPC to the peer.
///
/// An error return indicates a fatal error.
pub fn poll_ready(&mut self) -> Poll<(), Error> {
let result = || -> Poll<(), Error> {
// If the buffer is already over 8KiB, then attempt to flush it. If after flushing it's
// *still* over 8KiB, then apply backpressure (reject the send).
if self.send_buf.len() >= BACKPRESSURE_BOUNDARY {
self.poll_flush()?;
if self.send_buf.len() >= BACKPRESSURE_BOUNDARY {
return Ok(Async::NotReady);
}
}
Ok(Async::Ready(()))
}();
if result.is_err() {
let _ = self.stream.shutdown(Shutdown::Both);
}
result
}
/// Sends an RPC to the peer.
///
/// This method does not provide backpressure, so callers should always check that `poll_ready`
/// indicates that there is send capacity available.
///
/// If a fatal error is returned the transport is shut down. If a non-fatal error is returned,
/// the RPC should be failed.
pub fn send(
&mut self,
call_id: i32,
service: &str,
method: &str,
required_feature_flags: &[u32],
body: &RequestBody,
timeout: Option<Duration>,
) -> Result<(), Error> {
let result = || -> Result<(), Error> {
// Set the header fields.
self.request_header.call_id = call_id;
{
let remote_method = self
.request_header
.remote_method
.get_or_insert(RemoteMethodPb::default());
remote_method.clear();
remote_method.service_name.push_str(service);
remote_method.method_name.push_str(method);
}
if let Some(timeout) = timeout {
self.request_header.timeout_millis = Some(duration_to_ms(timeout));
}
self.request_header.required_feature_flags.clear();
self.request_header
.required_feature_flags
.extend_from_slice(required_feature_flags);
let header_len = Message::encoded_len(&self.request_header);
let body_len = body.encoded_len();
let len = length_delimiter_len(header_len)
+ length_delimiter_len(body_len)
+ header_len
+ body_len;
if len > self.options.max_message_length as usize {
return Err(RpcError {
code: RpcErrorCode::ErrorInvalidRequest,
message: format!(
"RPC request exceeds maximum length ({}/{})",
len, self.options.max_message_length
),
unsupported_feature_flags: Vec::new(),
}.into());
}
self.send_buf.put_u32_be(len as u32);
Message::encode_length_delimited(&self.request_header, &mut self.send_buf).unwrap();
body.encode_length_delimited(&mut self.send_buf);
Ok(())
}();
if let Err(ref error) = result {
if error.is_fatal() {
let _ = self.stream.shutdown(Shutdown::Both);
}
}
result
}
/// Attempts to receive a response from the peer.
pub fn poll(&mut self) -> Poll<(i32, Result<TransportResponse, Error>), Error> {
self.poll_flush()?;
self.poll_recv()
}
/// Attempts to read a response from the TCP stream.
fn poll_recv(&mut self) -> Poll<(i32, Result<TransportResponse, Error>), Error> {
let result = || -> Poll<(i32, Result<TransportResponse, Error>), Error> {
// Read, or continue reading, an RPC response message from the socket into the receive
// buffer. Every RPC response is prefixed with a 4 bytes length header.
if self.recv_buf.len() < 4 {
let needed = 4 - self.recv_buf.len();
try_ready!(self.poll_fill(needed));
}
let msg_len = BigEndian::read_u32(&self.recv_buf[..4]) as usize;
if msg_len > self.options.max_message_length as usize {
return Err(Error::Serialization(format!(
"RPC response exceeds maximum length ({}/{})",
msg_len, self.options.max_message_length
)));
}
if self.recv_buf.len() - 4 < msg_len {
let needed = msg_len + 4 - self.recv_buf.len();
try_ready!(self.poll_fill(needed));
}
let _ = self.recv_buf.split_to(4);
let mut buf = self.recv_buf.split_to(msg_len);
// Decode the header.
let header_len = {
let mut cursor = buf.clone().into_buf();
self.response_header.clear();
self.response_header.merge_length_delimited(&mut cursor)?;
cursor.position() as usize
};
buf.split_to(header_len);
let call_id = self.response_header.call_id;
if self.response_header.is_error() {
let error = Error::Rpc(ErrorStatusPb::decode_length_delimited(buf)?.into());
Ok(Async::Ready((call_id, Err(error))))
} else {
// KRPC inserts a len integer before the main message whose value is the length of
// the main message and sidecars. This is completely useless since this can be
// solved for easily using the header length and the overall length. Unfortunately
// stripping this integer is not trivial, since it's variable length. In order to
// know its width we are forced to read it. Who designed this crap?
//
// There's probably a way to solve for the width of the varint based on the
// remaining length of the buffer, but it's unfortunately not as simple as just
// calling length_delimiter_len since the buffer contains the varint itself.
let main_message_len = decode_length_delimiter(&mut (&buf).into_buf()).unwrap();
buf.split_to(length_delimiter_len(main_message_len));
let mut sidecars = Vec::new();
let body;
if self.response_header.sidecar_offsets.is_empty() {
body = buf.freeze();
} else {
let mut prev_offset = self.response_header.sidecar_offsets[0] as usize;
body = buf.split_to(prev_offset).freeze();
for &offset in &self.response_header.sidecar_offsets[1..] {
let offset = offset as usize;
sidecars.push(buf.split_to(offset - prev_offset));
prev_offset = offset;
}
sidecars.push(buf);
}
Ok(Async::Ready((call_id, Ok((body, sidecars)))))
}
}();
let is_fatal = match result {
Ok(Async::Ready((_, Err(ref error)))) => error.is_fatal(),
Err(_) => true,
_ => false,
};
if is_fatal {
let _ = self.stream.shutdown(Shutdown::Both);
}
result
}
/// Reads at least `at_least` bytes into the receive buffer.
///
/// Based on tokio-io's [`FramedRead`][1] and [`AsyncRead`][2].
/// [1]: https://github.com/tokio-rs/tokio-io/blob/master/src/framed_read.rs#L259-L294
/// [2]: https://github.com/tokio-rs/tokio-io/blob/0.1.3/src/lib.rs#L138-L157
fn poll_fill(&mut self, mut at_least: usize) -> Result<Async<()>, io::Error> {
self.recv_buf.reserve(at_least);
while at_least > 0 {
let n = unsafe {
let n = try_nb!(self.stream.read(self.recv_buf.bytes_mut()));
self.recv_buf.advance_mut(n);
n
};
at_least = at_least.saturating_sub(n);
if n == 0 {
return Err(io::Error::from(io::ErrorKind::UnexpectedEof));
}
}
Ok(Async::Ready(()))
}
/// Flushes bytes from send buffer to the stream.
///
/// Based on tokio-io's [`FramedWrite`][1].
/// [1]: https://github.com/tokio-rs/tokio-io/blob/0.1.3/src/framed_write.rs#L202-L225
///
/// An error return indicates a fatal error.
fn poll_flush(&mut self) -> Result<Async<()>, io::Error> {
while !self.send_buf.is_empty() {
let n = try_nb!(self.stream.write(&self.send_buf));
if n == 0 {
return Err(io::Error::from(io::ErrorKind::WriteZero));
}
let _ = self.send_buf.split_to(n);
}
Ok(Async::Ready(()))
}
pub fn addr(&self) -> &SocketAddr {
&self.addr
}
pub fn options(&self) -> &Options {
&self.options
}
pub fn send_buf_len(&self) -> usize {
self.send_buf.len()
}
pub fn recv_buf_len(&self) -> usize {
self.recv_buf.len()
}
}
/// Future returned by `Transport::connect` which will resolve to a `Transport` when the TCP stream
/// is connected.
pub(crate) struct TransportNew {
addr: SocketAddr,
options: Options,
connect: ConnectFuture,
}
impl Future for TransportNew {
type Item = Transport;
type Error = io::Error;
fn poll(&mut self) -> Result<Async<Transport>, io::Error> {
let stream = try_ready!(self.connect.poll());
stream.set_nodelay(self.options.nodelay)?;
// Write the connection header to the send buffer.
let mut send_buf = BytesMut::with_capacity(INITIAL_CAPACITY);
send_buf.put_slice(b"hrpc\x09\0\0");
Ok(Async::Ready(Transport {
addr: self.addr,
options: self.options.clone(),
stream,
send_buf,
recv_buf: BytesMut::with_capacity(INITIAL_CAPACITY),
request_header: RequestHeader::default(),
response_header: ResponseHeader::default(),
}))
}
}
/// Converts a duration to milliseconds.
fn
|
(duration: Duration) -> u32 {
let millis = duration
.as_secs()
.saturating_mul(1000)
.saturating_add(u64::from(duration.subsec_nanos()) / 1_000_000);
if millis > u64::from(u32::MAX) {
u32::MAX
} else {
millis as u32
}
}
|
duration_to_ms
|
identifier_name
|
transport.rs
|
use std::io::{self, Read, Write};
use std::net::{Shutdown, SocketAddr};
use std::time::Duration;
use std::u32;
use byteorder::{BigEndian, ByteOrder};
use bytes::{BufMut, Bytes, BytesMut, IntoBuf};
use futures::{Async, Future, Poll};
use prost::{decode_length_delimiter, length_delimiter_len, Message};
use tokio::net::{ConnectFuture, TcpStream};
use pb::rpc::{ErrorStatusPb, RemoteMethodPb, RequestHeader, ResponseHeader};
use Error;
use Options;
use RequestBody;
use RpcError;
use RpcErrorCode;
const INITIAL_CAPACITY: usize = 8 * 1024;
const BACKPRESSURE_BOUNDARY: usize = INITIAL_CAPACITY;
pub type TransportResponse = (Bytes, Vec<BytesMut>);
/// `Transport` handles sending and receiving raw KRPC messages to a TCP stream.
///
/// The transport manages send and receive buffers, encoding and decoding of messages, message
/// framing, headers, and RPC errors.
///
/// The transport wraps a single TCP connection. When the TCP connection is shutdown or fails, the
/// transport should no longer be used. TCP connection shutdown is indicated by a fatal error being
/// returned from `poll_ready()`, `send()`, or `poll()`.
pub(crate) struct Transport {
addr: SocketAddr,
options: Options,
stream: TcpStream,
send_buf: BytesMut,
recv_buf: BytesMut,
request_header: RequestHeader,
response_header: ResponseHeader,
}
impl Transport {
/// Returns a future which will yield a new transport.
pub fn connect(addr: SocketAddr, options: Options) -> TransportNew {
let connect = TcpStream::connect(&addr);
TransportNew {
addr,
options,
connect,
}
}
/// Returns `true` if the transport is ready to send an RPC to the peer.
///
/// An error return indicates a fatal error.
pub fn poll_ready(&mut self) -> Poll<(), Error> {
let result = || -> Poll<(), Error> {
// If the buffer is already over 8KiB, then attempt to flush it. If after flushing it's
// *still* over 8KiB, then apply backpressure (reject the send).
if self.send_buf.len() >= BACKPRESSURE_BOUNDARY {
self.poll_flush()?;
if self.send_buf.len() >= BACKPRESSURE_BOUNDARY {
return Ok(Async::NotReady);
}
}
Ok(Async::Ready(()))
}();
if result.is_err() {
let _ = self.stream.shutdown(Shutdown::Both);
}
result
}
/// Sends an RPC to the peer.
///
/// This method does not provide backpressure, so callers should always check that `poll_ready`
/// indicates that there is send capacity available.
///
/// If a fatal error is returned the transport is shut down. If a non-fatal error is returned,
/// the RPC should be failed.
pub fn send(
&mut self,
call_id: i32,
service: &str,
method: &str,
required_feature_flags: &[u32],
body: &RequestBody,
timeout: Option<Duration>,
) -> Result<(), Error> {
let result = || -> Result<(), Error> {
// Set the header fields.
self.request_header.call_id = call_id;
{
let remote_method = self
.request_header
.remote_method
.get_or_insert(RemoteMethodPb::default());
remote_method.clear();
remote_method.service_name.push_str(service);
remote_method.method_name.push_str(method);
}
if let Some(timeout) = timeout {
self.request_header.timeout_millis = Some(duration_to_ms(timeout));
}
self.request_header.required_feature_flags.clear();
self.request_header
.required_feature_flags
.extend_from_slice(required_feature_flags);
let header_len = Message::encoded_len(&self.request_header);
let body_len = body.encoded_len();
let len = length_delimiter_len(header_len)
+ length_delimiter_len(body_len)
+ header_len
+ body_len;
if len > self.options.max_message_length as usize {
return Err(RpcError {
code: RpcErrorCode::ErrorInvalidRequest,
message: format!(
"RPC request exceeds maximum length ({}/{})",
len, self.options.max_message_length
),
unsupported_feature_flags: Vec::new(),
}.into());
}
self.send_buf.put_u32_be(len as u32);
Message::encode_length_delimited(&self.request_header, &mut self.send_buf).unwrap();
body.encode_length_delimited(&mut self.send_buf);
Ok(())
}();
if let Err(ref error) = result {
if error.is_fatal() {
let _ = self.stream.shutdown(Shutdown::Both);
}
}
result
}
/// Attempts to receive a response from the peer.
pub fn poll(&mut self) -> Poll<(i32, Result<TransportResponse, Error>), Error> {
self.poll_flush()?;
self.poll_recv()
}
/// Attempts to read a response from the TCP stream.
fn poll_recv(&mut self) -> Poll<(i32, Result<TransportResponse, Error>), Error> {
let result = || -> Poll<(i32, Result<TransportResponse, Error>), Error> {
// Read, or continue reading, an RPC response message from the socket into the receive
// buffer. Every RPC response is prefixed with a 4 bytes length header.
if self.recv_buf.len() < 4 {
let needed = 4 - self.recv_buf.len();
try_ready!(self.poll_fill(needed));
}
let msg_len = BigEndian::read_u32(&self.recv_buf[..4]) as usize;
if msg_len > self.options.max_message_length as usize {
return Err(Error::Serialization(format!(
"RPC response exceeds maximum length ({}/{})",
msg_len, self.options.max_message_length
)));
}
if self.recv_buf.len() - 4 < msg_len {
let needed = msg_len + 4 - self.recv_buf.len();
try_ready!(self.poll_fill(needed));
}
let _ = self.recv_buf.split_to(4);
let mut buf = self.recv_buf.split_to(msg_len);
// Decode the header.
let header_len = {
let mut cursor = buf.clone().into_buf();
self.response_header.clear();
self.response_header.merge_length_delimited(&mut cursor)?;
cursor.position() as usize
};
buf.split_to(header_len);
let call_id = self.response_header.call_id;
if self.response_header.is_error() {
let error = Error::Rpc(ErrorStatusPb::decode_length_delimited(buf)?.into());
Ok(Async::Ready((call_id, Err(error))))
} else {
// KRPC inserts a len integer before the main message whose value is the length of
// the main message and sidecars. This is completely useless since this can be
// solved for easily using the header length and the overall length. Unfortunately
// stripping this integer is not trivial, since it's variable length. In order to
// know its width we are forced to read it. Who designed this crap?
//
// There's probably a way to solve for the width of the varint based on the
// remaining length of the buffer, but it's unfortunately not as simple as just
// calling length_delimiter_len since the buffer contains the varint itself.
let main_message_len = decode_length_delimiter(&mut (&buf).into_buf()).unwrap();
buf.split_to(length_delimiter_len(main_message_len));
let mut sidecars = Vec::new();
let body;
if self.response_header.sidecar_offsets.is_empty() {
body = buf.freeze();
} else {
let mut prev_offset = self.response_header.sidecar_offsets[0] as usize;
body = buf.split_to(prev_offset).freeze();
for &offset in &self.response_header.sidecar_offsets[1..] {
let offset = offset as usize;
sidecars.push(buf.split_to(offset - prev_offset));
prev_offset = offset;
}
sidecars.push(buf);
}
Ok(Async::Ready((call_id, Ok((body, sidecars)))))
}
}();
let is_fatal = match result {
Ok(Async::Ready((_, Err(ref error)))) => error.is_fatal(),
Err(_) => true,
_ => false,
};
if is_fatal {
let _ = self.stream.shutdown(Shutdown::Both);
}
result
}
/// Reads at least `at_least` bytes into the receive buffer.
///
/// Based on tokio-io's [`FramedRead`][1] and [`AsyncRead`][2].
/// [1]: https://github.com/tokio-rs/tokio-io/blob/master/src/framed_read.rs#L259-L294
/// [2]: https://github.com/tokio-rs/tokio-io/blob/0.1.3/src/lib.rs#L138-L157
fn poll_fill(&mut self, mut at_least: usize) -> Result<Async<()>, io::Error>
|
/// Flushes bytes from send buffer to the stream.
///
/// Based on tokio-io's [`FramedWrite`][1].
/// [1]: https://github.com/tokio-rs/tokio-io/blob/0.1.3/src/framed_write.rs#L202-L225
///
/// An error return indicates a fatal error.
fn poll_flush(&mut self) -> Result<Async<()>, io::Error> {
while !self.send_buf.is_empty() {
let n = try_nb!(self.stream.write(&self.send_buf));
if n == 0 {
return Err(io::Error::from(io::ErrorKind::WriteZero));
}
let _ = self.send_buf.split_to(n);
}
Ok(Async::Ready(()))
}
pub fn addr(&self) -> &SocketAddr {
&self.addr
}
pub fn options(&self) -> &Options {
&self.options
}
pub fn send_buf_len(&self) -> usize {
self.send_buf.len()
}
pub fn recv_buf_len(&self) -> usize {
self.recv_buf.len()
}
}
/// Future returned by `Transport::connect` which will resolve to a `Transport` when the TCP stream
/// is connected.
pub(crate) struct TransportNew {
addr: SocketAddr,
options: Options,
connect: ConnectFuture,
}
impl Future for TransportNew {
type Item = Transport;
type Error = io::Error;
fn poll(&mut self) -> Result<Async<Transport>, io::Error> {
let stream = try_ready!(self.connect.poll());
stream.set_nodelay(self.options.nodelay)?;
// Write the connection header to the send buffer.
let mut send_buf = BytesMut::with_capacity(INITIAL_CAPACITY);
send_buf.put_slice(b"hrpc\x09\0\0");
Ok(Async::Ready(Transport {
addr: self.addr,
options: self.options.clone(),
stream,
send_buf,
recv_buf: BytesMut::with_capacity(INITIAL_CAPACITY),
request_header: RequestHeader::default(),
response_header: ResponseHeader::default(),
}))
}
}
/// Converts a duration to milliseconds.
fn duration_to_ms(duration: Duration) -> u32 {
let millis = duration
.as_secs()
.saturating_mul(1000)
.saturating_add(u64::from(duration.subsec_nanos()) / 1_000_000);
if millis > u64::from(u32::MAX) {
u32::MAX
} else {
millis as u32
}
}
|
{
self.recv_buf.reserve(at_least);
while at_least > 0 {
let n = unsafe {
let n = try_nb!(self.stream.read(self.recv_buf.bytes_mut()));
self.recv_buf.advance_mut(n);
n
};
at_least = at_least.saturating_sub(n);
if n == 0 {
return Err(io::Error::from(io::ErrorKind::UnexpectedEof));
}
}
Ok(Async::Ready(()))
}
|
identifier_body
|
transport.rs
|
use std::io::{self, Read, Write};
use std::net::{Shutdown, SocketAddr};
use std::time::Duration;
use std::u32;
use byteorder::{BigEndian, ByteOrder};
use bytes::{BufMut, Bytes, BytesMut, IntoBuf};
use futures::{Async, Future, Poll};
use prost::{decode_length_delimiter, length_delimiter_len, Message};
use tokio::net::{ConnectFuture, TcpStream};
use pb::rpc::{ErrorStatusPb, RemoteMethodPb, RequestHeader, ResponseHeader};
use Error;
use Options;
use RequestBody;
use RpcError;
use RpcErrorCode;
const INITIAL_CAPACITY: usize = 8 * 1024;
const BACKPRESSURE_BOUNDARY: usize = INITIAL_CAPACITY;
pub type TransportResponse = (Bytes, Vec<BytesMut>);
/// `Transport` handles sending and receiving raw KRPC messages to a TCP stream.
///
/// The transport manages send and receive buffers, encoding and decoding of messages, message
/// framing, headers, and RPC errors.
///
/// The transport wraps a single TCP connection. When the TCP connection is shutdown or fails, the
/// transport should no longer be used. TCP connection shutdown is indicated by a fatal error being
/// returned from `poll_ready()`, `send()`, or `poll()`.
pub(crate) struct Transport {
addr: SocketAddr,
options: Options,
stream: TcpStream,
send_buf: BytesMut,
recv_buf: BytesMut,
request_header: RequestHeader,
response_header: ResponseHeader,
}
impl Transport {
/// Returns a future which will yield a new transport.
pub fn connect(addr: SocketAddr, options: Options) -> TransportNew {
let connect = TcpStream::connect(&addr);
TransportNew {
addr,
options,
connect,
}
}
/// Returns `true` if the transport is ready to send an RPC to the peer.
///
/// An error return indicates a fatal error.
pub fn poll_ready(&mut self) -> Poll<(), Error> {
let result = || -> Poll<(), Error> {
// If the buffer is already over 8KiB, then attempt to flush it. If after flushing it's
// *still* over 8KiB, then apply backpressure (reject the send).
if self.send_buf.len() >= BACKPRESSURE_BOUNDARY {
self.poll_flush()?;
if self.send_buf.len() >= BACKPRESSURE_BOUNDARY {
return Ok(Async::NotReady);
}
}
Ok(Async::Ready(()))
}();
if result.is_err() {
let _ = self.stream.shutdown(Shutdown::Both);
}
result
}
/// Sends an RPC to the peer.
///
/// This method does not provide backpressure, so callers should always check that `poll_ready`
/// indicates that there is send capacity available.
///
/// If a fatal error is returned the transport is shut down. If a non-fatal error is returned,
/// the RPC should be failed.
pub fn send(
&mut self,
|
service: &str,
method: &str,
required_feature_flags: &[u32],
body: &RequestBody,
timeout: Option<Duration>,
) -> Result<(), Error> {
let result = || -> Result<(), Error> {
// Set the header fields.
self.request_header.call_id = call_id;
{
let remote_method = self
.request_header
.remote_method
.get_or_insert(RemoteMethodPb::default());
remote_method.clear();
remote_method.service_name.push_str(service);
remote_method.method_name.push_str(method);
}
if let Some(timeout) = timeout {
self.request_header.timeout_millis = Some(duration_to_ms(timeout));
}
self.request_header.required_feature_flags.clear();
self.request_header
.required_feature_flags
.extend_from_slice(required_feature_flags);
let header_len = Message::encoded_len(&self.request_header);
let body_len = body.encoded_len();
let len = length_delimiter_len(header_len)
+ length_delimiter_len(body_len)
+ header_len
+ body_len;
if len > self.options.max_message_length as usize {
return Err(RpcError {
code: RpcErrorCode::ErrorInvalidRequest,
message: format!(
"RPC request exceeds maximum length ({}/{})",
len, self.options.max_message_length
),
unsupported_feature_flags: Vec::new(),
}.into());
}
self.send_buf.put_u32_be(len as u32);
Message::encode_length_delimited(&self.request_header, &mut self.send_buf).unwrap();
body.encode_length_delimited(&mut self.send_buf);
Ok(())
}();
if let Err(ref error) = result {
if error.is_fatal() {
let _ = self.stream.shutdown(Shutdown::Both);
}
}
result
}
/// Attempts to receive a response from the peer.
pub fn poll(&mut self) -> Poll<(i32, Result<TransportResponse, Error>), Error> {
self.poll_flush()?;
self.poll_recv()
}
/// Attempts to read a response from the TCP stream.
fn poll_recv(&mut self) -> Poll<(i32, Result<TransportResponse, Error>), Error> {
let result = || -> Poll<(i32, Result<TransportResponse, Error>), Error> {
// Read, or continue reading, an RPC response message from the socket into the receive
// buffer. Every RPC response is prefixed with a 4 bytes length header.
if self.recv_buf.len() < 4 {
let needed = 4 - self.recv_buf.len();
try_ready!(self.poll_fill(needed));
}
let msg_len = BigEndian::read_u32(&self.recv_buf[..4]) as usize;
if msg_len > self.options.max_message_length as usize {
return Err(Error::Serialization(format!(
"RPC response exceeds maximum length ({}/{})",
msg_len, self.options.max_message_length
)));
}
if self.recv_buf.len() - 4 < msg_len {
let needed = msg_len + 4 - self.recv_buf.len();
try_ready!(self.poll_fill(needed));
}
let _ = self.recv_buf.split_to(4);
let mut buf = self.recv_buf.split_to(msg_len);
// Decode the header.
let header_len = {
let mut cursor = buf.clone().into_buf();
self.response_header.clear();
self.response_header.merge_length_delimited(&mut cursor)?;
cursor.position() as usize
};
buf.split_to(header_len);
let call_id = self.response_header.call_id;
if self.response_header.is_error() {
let error = Error::Rpc(ErrorStatusPb::decode_length_delimited(buf)?.into());
Ok(Async::Ready((call_id, Err(error))))
} else {
// KRPC inserts a len integer before the main message whose value is the length of
// the main message and sidecars. This is completely useless since this can be
// solved for easily using the header length and the overall length. Unfortunately
// stripping this integer is not trivial, since it's variable length. In order to
// know its width we are forced to read it. Who designed this crap?
//
// There's probably a way to solve for the width of the varint based on the
// remaining length of the buffer, but it's unfortunately not as simple as just
// calling length_delimiter_len since the buffer contains the varint itself.
let main_message_len = decode_length_delimiter(&mut (&buf).into_buf()).unwrap();
buf.split_to(length_delimiter_len(main_message_len));
let mut sidecars = Vec::new();
let body;
if self.response_header.sidecar_offsets.is_empty() {
body = buf.freeze();
} else {
let mut prev_offset = self.response_header.sidecar_offsets[0] as usize;
body = buf.split_to(prev_offset).freeze();
for &offset in &self.response_header.sidecar_offsets[1..] {
let offset = offset as usize;
sidecars.push(buf.split_to(offset - prev_offset));
prev_offset = offset;
}
sidecars.push(buf);
}
Ok(Async::Ready((call_id, Ok((body, sidecars)))))
}
}();
let is_fatal = match result {
Ok(Async::Ready((_, Err(ref error)))) => error.is_fatal(),
Err(_) => true,
_ => false,
};
if is_fatal {
let _ = self.stream.shutdown(Shutdown::Both);
}
result
}
/// Reads at least `at_least` bytes into the receive buffer.
///
/// Based on tokio-io's [`FramedRead`][1] and [`AsyncRead`][2].
/// [1]: https://github.com/tokio-rs/tokio-io/blob/master/src/framed_read.rs#L259-L294
/// [2]: https://github.com/tokio-rs/tokio-io/blob/0.1.3/src/lib.rs#L138-L157
fn poll_fill(&mut self, mut at_least: usize) -> Result<Async<()>, io::Error> {
self.recv_buf.reserve(at_least);
while at_least > 0 {
let n = unsafe {
let n = try_nb!(self.stream.read(self.recv_buf.bytes_mut()));
self.recv_buf.advance_mut(n);
n
};
at_least = at_least.saturating_sub(n);
if n == 0 {
return Err(io::Error::from(io::ErrorKind::UnexpectedEof));
}
}
Ok(Async::Ready(()))
}
/// Flushes bytes from send buffer to the stream.
///
/// Based on tokio-io's [`FramedWrite`][1].
/// [1]: https://github.com/tokio-rs/tokio-io/blob/0.1.3/src/framed_write.rs#L202-L225
///
/// An error return indicates a fatal error.
fn poll_flush(&mut self) -> Result<Async<()>, io::Error> {
while !self.send_buf.is_empty() {
let n = try_nb!(self.stream.write(&self.send_buf));
if n == 0 {
return Err(io::Error::from(io::ErrorKind::WriteZero));
}
let _ = self.send_buf.split_to(n);
}
Ok(Async::Ready(()))
}
pub fn addr(&self) -> &SocketAddr {
&self.addr
}
pub fn options(&self) -> &Options {
&self.options
}
pub fn send_buf_len(&self) -> usize {
self.send_buf.len()
}
pub fn recv_buf_len(&self) -> usize {
self.recv_buf.len()
}
}
/// Future returned by `Transport::connect` which will resolve to a `Transport` when the TCP stream
/// is connected.
pub(crate) struct TransportNew {
addr: SocketAddr,
options: Options,
connect: ConnectFuture,
}
impl Future for TransportNew {
type Item = Transport;
type Error = io::Error;
fn poll(&mut self) -> Result<Async<Transport>, io::Error> {
let stream = try_ready!(self.connect.poll());
stream.set_nodelay(self.options.nodelay)?;
// Write the connection header to the send buffer.
let mut send_buf = BytesMut::with_capacity(INITIAL_CAPACITY);
send_buf.put_slice(b"hrpc\x09\0\0");
Ok(Async::Ready(Transport {
addr: self.addr,
options: self.options.clone(),
stream,
send_buf,
recv_buf: BytesMut::with_capacity(INITIAL_CAPACITY),
request_header: RequestHeader::default(),
response_header: ResponseHeader::default(),
}))
}
}
/// Converts a duration to milliseconds.
fn duration_to_ms(duration: Duration) -> u32 {
let millis = duration
.as_secs()
.saturating_mul(1000)
.saturating_add(u64::from(duration.subsec_nanos()) / 1_000_000);
if millis > u64::from(u32::MAX) {
u32::MAX
} else {
millis as u32
}
}
|
call_id: i32,
|
random_line_split
|
frontmatterparser.py
|
#!/usr/bin/env python
__author__ = "Dulip Withanage"
__email__ = "[email protected]"
import re
import string
import sys
import operator
import globals as gv
import os
import subprocess
import shutil
#from django.utils.encoding import smart_str
class FrontMatterParser:
def __init__(self, gv):
self.gv = gv
def
|
(self, filestring):
# this works for perception-monospace, equations tables, laddering, neoliberalism, snowball, valuechain, sodium
name = re.findall(r'(\n|<p>|<bold>|<italic>)(([A-Za-z\-\.]+)\*?\s){2,5}(&|and|et|und)\s(([A-Za-z\-\.]+)\*?\s?){2,5}(</p>|</bold>|</italic>|\n)',filestring)
if len(name) == 0:
# this works for racialprofiling, antiseptics, eeg_comicsans, leadership, systemsthinker
# this would work for science.doc but there are way too many authors and that affects the string
# would work for rating.doc but need to fix linebreak comments from output
name2 = re.findall(r'(<p>|<bold>|<italic>)(([A-Za-z\-\.]+)(,?\s)){1,20}([A-Za-z\-\.]+)?(</p>|</bold>|</italic>)',filestring)
# this loops through strings and prefers those that occur earlier + have more periods/commas
guess2score = {}
guess2number = 0
for g in name2:
guess2 =''.join(str(e) for e in g)
periods = re.findall(r'\.',guess2)
italics = re.findall(r'italic',guess2)
guess2score[guess2] = len(periods)
guess2score[guess2] += len(italics)
guess2score[guess2] -= guess2number
guess2number += 1
#print operator.itemgetter(1)
print(guess2score.items())
print(type(operator.itemgetter(1)))
name[0] = max(guess2score.items(), key=operator.itemgetter(1))[0]
striptags_name = re.sub(r'<.*>','',name[0])
authorString = re.sub(r'[B|b][Y|y]\s','',striptags_name)
# this is the author string. could try sending to parscit to get individual author names.
return authorString
# entrepreneurship needs fixing, will be tough, has authors in multiple XML elements
def parse_title(self, filestring):
# need to anticipate which other special characters are allowable in titles
# first, check if a subtitle and title have wound up separated from one another
title = re.findall(r'(\n|<p>|<bold>|<italic>)(([A-Za-z\-\.]+)(,?\s)){1,20}([A-Za-z\-\.]+)?:(</p>|</bold>|</italic>|\n)(.|\s)*?(\n|<p>|<bold>|<italic>)(([A-Za-z\-\.]+)((:|,)?\s)){1,20}([A-Za-z\-\.]+)?\??(</p>|</bold>|</italic>|\n)',filestring)
if len(title) == 0:
# this works for antiseptics, eeg_comicsans, entrepreneurship, laddering, racialprofiling, snowball, sodium
title2 = re.findall(r'(\n|<p>|<bold>|<italic>)(([A-Za-z\-\.]+)((:|,)?\s)){1,20}([A-Za-z\-\.]+)?\??(</p>|</bold>|</italic>|\n)',filestring)
title = title2
#title0 = ''.join(title[0])
title_first= ''.join(title[0])
#remove <> tags
titleString = re.sub(r'<(.*)>','',re.sub(r'</(.*)>','',title_first))
return titleString
def get_file_text(self, filename):
f = open(filename)
text= f.read()
f.close()
return text
def update_tmp_file(self):
shutil.copy2(self.gv.NLM_FILE_PATH,self.gv.NLM_TEMP_FILE_PATH)
def write_output(self, text):
out = open(self.gv.NLM_FILE_PATH,'w')
out.write(text)
out.close()
def run(self):
text = self.get_file_text(self.gv.NLM_TEMP_FILE_PATH)
#self.parse_authors(text)
self.parse_title(text)
self.write_output(text)
self.update_tmp_file()
|
parse_authors
|
identifier_name
|
frontmatterparser.py
|
#!/usr/bin/env python
__author__ = "Dulip Withanage"
__email__ = "[email protected]"
import re
import string
import sys
import operator
import globals as gv
import os
import subprocess
import shutil
#from django.utils.encoding import smart_str
class FrontMatterParser:
def __init__(self, gv):
self.gv = gv
def parse_authors(self, filestring):
# this works for perception-monospace, equations tables, laddering, neoliberalism, snowball, valuechain, sodium
name = re.findall(r'(\n|<p>|<bold>|<italic>)(([A-Za-z\-\.]+)\*?\s){2,5}(&|and|et|und)\s(([A-Za-z\-\.]+)\*?\s?){2,5}(</p>|</bold>|</italic>|\n)',filestring)
if len(name) == 0:
# this works for racialprofiling, antiseptics, eeg_comicsans, leadership, systemsthinker
# this would work for science.doc but there are way too many authors and that affects the string
# would work for rating.doc but need to fix linebreak comments from output
name2 = re.findall(r'(<p>|<bold>|<italic>)(([A-Za-z\-\.]+)(,?\s)){1,20}([A-Za-z\-\.]+)?(</p>|</bold>|</italic>)',filestring)
# this loops through strings and prefers those that occur earlier + have more periods/commas
guess2score = {}
guess2number = 0
for g in name2:
guess2 =''.join(str(e) for e in g)
periods = re.findall(r'\.',guess2)
italics = re.findall(r'italic',guess2)
guess2score[guess2] = len(periods)
guess2score[guess2] += len(italics)
guess2score[guess2] -= guess2number
guess2number += 1
#print operator.itemgetter(1)
print(guess2score.items())
print(type(operator.itemgetter(1)))
name[0] = max(guess2score.items(), key=operator.itemgetter(1))[0]
striptags_name = re.sub(r'<.*>','',name[0])
authorString = re.sub(r'[B|b][Y|y]\s','',striptags_name)
# this is the author string. could try sending to parscit to get individual author names.
return authorString
# entrepreneurship needs fixing, will be tough, has authors in multiple XML elements
def parse_title(self, filestring):
# need to anticipate which other special characters are allowable in titles
# first, check if a subtitle and title have wound up separated from one another
title = re.findall(r'(\n|<p>|<bold>|<italic>)(([A-Za-z\-\.]+)(,?\s)){1,20}([A-Za-z\-\.]+)?:(</p>|</bold>|</italic>|\n)(.|\s)*?(\n|<p>|<bold>|<italic>)(([A-Za-z\-\.]+)((:|,)?\s)){1,20}([A-Za-z\-\.]+)?\??(</p>|</bold>|</italic>|\n)',filestring)
if len(title) == 0:
# this works for antiseptics, eeg_comicsans, entrepreneurship, laddering, racialprofiling, snowball, sodium
|
#title0 = ''.join(title[0])
title_first= ''.join(title[0])
#remove <> tags
titleString = re.sub(r'<(.*)>','',re.sub(r'</(.*)>','',title_first))
return titleString
def get_file_text(self, filename):
f = open(filename)
text= f.read()
f.close()
return text
def update_tmp_file(self):
shutil.copy2(self.gv.NLM_FILE_PATH,self.gv.NLM_TEMP_FILE_PATH)
def write_output(self, text):
out = open(self.gv.NLM_FILE_PATH,'w')
out.write(text)
out.close()
def run(self):
text = self.get_file_text(self.gv.NLM_TEMP_FILE_PATH)
#self.parse_authors(text)
self.parse_title(text)
self.write_output(text)
self.update_tmp_file()
|
title2 = re.findall(r'(\n|<p>|<bold>|<italic>)(([A-Za-z\-\.]+)((:|,)?\s)){1,20}([A-Za-z\-\.]+)?\??(</p>|</bold>|</italic>|\n)',filestring)
title = title2
|
conditional_block
|
frontmatterparser.py
|
#!/usr/bin/env python
__author__ = "Dulip Withanage"
__email__ = "[email protected]"
import re
import string
import sys
import operator
import globals as gv
import os
import subprocess
import shutil
#from django.utils.encoding import smart_str
class FrontMatterParser:
def __init__(self, gv):
self.gv = gv
def parse_authors(self, filestring):
# this works for perception-monospace, equations tables, laddering, neoliberalism, snowball, valuechain, sodium
name = re.findall(r'(\n|<p>|<bold>|<italic>)(([A-Za-z\-\.]+)\*?\s){2,5}(&|and|et|und)\s(([A-Za-z\-\.]+)\*?\s?){2,5}(</p>|</bold>|</italic>|\n)',filestring)
if len(name) == 0:
# this works for racialprofiling, antiseptics, eeg_comicsans, leadership, systemsthinker
# this would work for science.doc but there are way too many authors and that affects the string
# would work for rating.doc but need to fix linebreak comments from output
name2 = re.findall(r'(<p>|<bold>|<italic>)(([A-Za-z\-\.]+)(,?\s)){1,20}([A-Za-z\-\.]+)?(</p>|</bold>|</italic>)',filestring)
# this loops through strings and prefers those that occur earlier + have more periods/commas
guess2score = {}
guess2number = 0
for g in name2:
guess2 =''.join(str(e) for e in g)
periods = re.findall(r'\.',guess2)
italics = re.findall(r'italic',guess2)
guess2score[guess2] = len(periods)
guess2score[guess2] += len(italics)
guess2score[guess2] -= guess2number
guess2number += 1
#print operator.itemgetter(1)
print(guess2score.items())
print(type(operator.itemgetter(1)))
name[0] = max(guess2score.items(), key=operator.itemgetter(1))[0]
striptags_name = re.sub(r'<.*>','',name[0])
authorString = re.sub(r'[B|b][Y|y]\s','',striptags_name)
# this is the author string. could try sending to parscit to get individual author names.
return authorString
# entrepreneurship needs fixing, will be tough, has authors in multiple XML elements
def parse_title(self, filestring):
# need to anticipate which other special characters are allowable in titles
# first, check if a subtitle and title have wound up separated from one another
title = re.findall(r'(\n|<p>|<bold>|<italic>)(([A-Za-z\-\.]+)(,?\s)){1,20}([A-Za-z\-\.]+)?:(</p>|</bold>|</italic>|\n)(.|\s)*?(\n|<p>|<bold>|<italic>)(([A-Za-z\-\.]+)((:|,)?\s)){1,20}([A-Za-z\-\.]+)?\??(</p>|</bold>|</italic>|\n)',filestring)
if len(title) == 0:
# this works for antiseptics, eeg_comicsans, entrepreneurship, laddering, racialprofiling, snowball, sodium
title2 = re.findall(r'(\n|<p>|<bold>|<italic>)(([A-Za-z\-\.]+)((:|,)?\s)){1,20}([A-Za-z\-\.]+)?\??(</p>|</bold>|</italic>|\n)',filestring)
title = title2
#title0 = ''.join(title[0])
title_first= ''.join(title[0])
#remove <> tags
titleString = re.sub(r'<(.*)>','',re.sub(r'</(.*)>','',title_first))
return titleString
def get_file_text(self, filename):
f = open(filename)
text= f.read()
f.close()
return text
def update_tmp_file(self):
shutil.copy2(self.gv.NLM_FILE_PATH,self.gv.NLM_TEMP_FILE_PATH)
|
def write_output(self, text):
out = open(self.gv.NLM_FILE_PATH,'w')
out.write(text)
out.close()
def run(self):
text = self.get_file_text(self.gv.NLM_TEMP_FILE_PATH)
#self.parse_authors(text)
self.parse_title(text)
self.write_output(text)
self.update_tmp_file()
|
random_line_split
|
|
frontmatterparser.py
|
#!/usr/bin/env python
__author__ = "Dulip Withanage"
__email__ = "[email protected]"
import re
import string
import sys
import operator
import globals as gv
import os
import subprocess
import shutil
#from django.utils.encoding import smart_str
class FrontMatterParser:
def __init__(self, gv):
self.gv = gv
def parse_authors(self, filestring):
# this works for perception-monospace, equations tables, laddering, neoliberalism, snowball, valuechain, sodium
name = re.findall(r'(\n|<p>|<bold>|<italic>)(([A-Za-z\-\.]+)\*?\s){2,5}(&|and|et|und)\s(([A-Za-z\-\.]+)\*?\s?){2,5}(</p>|</bold>|</italic>|\n)',filestring)
if len(name) == 0:
# this works for racialprofiling, antiseptics, eeg_comicsans, leadership, systemsthinker
# this would work for science.doc but there are way too many authors and that affects the string
# would work for rating.doc but need to fix linebreak comments from output
name2 = re.findall(r'(<p>|<bold>|<italic>)(([A-Za-z\-\.]+)(,?\s)){1,20}([A-Za-z\-\.]+)?(</p>|</bold>|</italic>)',filestring)
# this loops through strings and prefers those that occur earlier + have more periods/commas
guess2score = {}
guess2number = 0
for g in name2:
guess2 =''.join(str(e) for e in g)
periods = re.findall(r'\.',guess2)
italics = re.findall(r'italic',guess2)
guess2score[guess2] = len(periods)
guess2score[guess2] += len(italics)
guess2score[guess2] -= guess2number
guess2number += 1
#print operator.itemgetter(1)
print(guess2score.items())
print(type(operator.itemgetter(1)))
name[0] = max(guess2score.items(), key=operator.itemgetter(1))[0]
striptags_name = re.sub(r'<.*>','',name[0])
authorString = re.sub(r'[B|b][Y|y]\s','',striptags_name)
# this is the author string. could try sending to parscit to get individual author names.
return authorString
# entrepreneurship needs fixing, will be tough, has authors in multiple XML elements
def parse_title(self, filestring):
# need to anticipate which other special characters are allowable in titles
# first, check if a subtitle and title have wound up separated from one another
|
def get_file_text(self, filename):
f = open(filename)
text= f.read()
f.close()
return text
def update_tmp_file(self):
shutil.copy2(self.gv.NLM_FILE_PATH,self.gv.NLM_TEMP_FILE_PATH)
def write_output(self, text):
out = open(self.gv.NLM_FILE_PATH,'w')
out.write(text)
out.close()
def run(self):
text = self.get_file_text(self.gv.NLM_TEMP_FILE_PATH)
#self.parse_authors(text)
self.parse_title(text)
self.write_output(text)
self.update_tmp_file()
|
title = re.findall(r'(\n|<p>|<bold>|<italic>)(([A-Za-z\-\.]+)(,?\s)){1,20}([A-Za-z\-\.]+)?:(</p>|</bold>|</italic>|\n)(.|\s)*?(\n|<p>|<bold>|<italic>)(([A-Za-z\-\.]+)((:|,)?\s)){1,20}([A-Za-z\-\.]+)?\??(</p>|</bold>|</italic>|\n)',filestring)
if len(title) == 0:
# this works for antiseptics, eeg_comicsans, entrepreneurship, laddering, racialprofiling, snowball, sodium
title2 = re.findall(r'(\n|<p>|<bold>|<italic>)(([A-Za-z\-\.]+)((:|,)?\s)){1,20}([A-Za-z\-\.]+)?\??(</p>|</bold>|</italic>|\n)',filestring)
title = title2
#title0 = ''.join(title[0])
title_first= ''.join(title[0])
#remove <> tags
titleString = re.sub(r'<(.*)>','',re.sub(r'</(.*)>','',title_first))
return titleString
|
identifier_body
|
calendarfetcher.js
|
/* Magic Mirror
* Node Helper: Calendar - CalendarFetcher
*
* By Michael Teeuw http://michaelteeuw.nl
* MIT Licensed.
*/
var ical = require("./vendor/ical.js");
var moment = require("moment");
var CalendarFetcher = function(url, reloadInterval, excludedEvents, maximumEntries, maximumNumberOfDays, auth) {
var self = this;
var reloadTimer = null;
var events = [];
var fetchFailedCallback = function() {};
var eventsReceivedCallback = function() {};
/* fetchCalendar()
* Initiates calendar fetch.
*/
var fetchCalendar = function() {
clearTimeout(reloadTimer);
reloadTimer = null;
nodeVersion = Number(process.version.match(/^v(\d+\.\d+)/)[1]);
var opts = {
headers: {
"User-Agent": "Mozilla/5.0 (Node.js "+ nodeVersion + ") MagicMirror/" + global.version + " (https://github.com/MichMich/MagicMirror/)"
},
gzip: true
};
if (auth) {
if(auth.method === "bearer"){
opts.auth = {
bearer: auth.pass
}
}else{
opts.auth = {
user: auth.user,
pass: auth.pass
};
if(auth.method === "digest"){
opts.auth.sendImmediately = false;
}else{
opts.auth.sendImmediately = true;
}
}
}
ical.fromURL(url, opts, function(err, data) {
if (err) {
fetchFailedCallback(self, err);
scheduleTimer();
return;
}
// console.log(data);
newEvents = [];
var limitFunction = function(date, i) {return i < maximumEntries;};
var eventDate = function(event, time) {
return (event[time].length === 8) ? moment(event[time], "YYYYMMDD") : moment(new Date(event[time]));
};
for (var e in data) {
var event = data[e];
var now = new Date();
var today = moment().startOf("day").toDate();
var future = moment().startOf("day").add(maximumNumberOfDays, "days").subtract(1,"seconds").toDate(); // Subtract 1 second so that events that start on the middle of the night will not repeat.
// FIXME:
// Ugly fix to solve the facebook birthday issue.
// Otherwise, the recurring events only show the birthday for next year.
var isFacebookBirthday = false;
if (typeof event.uid !== "undefined") {
if (event.uid.indexOf("@facebook.com") !== -1) {
isFacebookBirthday = true;
}
}
if (event.type === "VEVENT") {
var startDate = eventDate(event, "start");
var endDate;
if (typeof event.end !== "undefined") {
endDate = eventDate(event, "end");
} else if(typeof event.duration !== "undefined") {
dur=moment.duration(event.duration);
endDate = startDate.clone().add(dur);
} else {
if (!isFacebookBirthday) {
endDate = startDate;
} else {
endDate = moment(startDate).add(1, "days");
}
}
// calculate the duration f the event for use with recurring events.
var duration = parseInt(endDate.format("x")) - parseInt(startDate.format("x"));
if (event.start.length === 8) {
startDate = startDate.startOf("day");
}
var title = "Event";
if (event.summary) {
title = (typeof event.summary.val !== "undefined") ? event.summary.val : event.summary;
} else if(event.description) {
title = event.description;
}
var excluded = false,
dateFilter = null;
for (var f in excludedEvents) {
var filter = excludedEvents[f],
testTitle = title.toLowerCase(),
until = null,
useRegex = false,
regexFlags = "g";
if (filter instanceof Object) {
if (typeof filter.until !== "undefined") {
until = filter.until;
}
if (typeof filter.regex !== "undefined") {
useRegex = filter.regex;
}
// If additional advanced filtering is added in, this section
// must remain last as we overwrite the filter object with the
// filterBy string
if (filter.caseSensitive) {
filter = filter.filterBy;
testTitle = title;
} else if (useRegex) {
filter = filter.filterBy;
testTitle = title;
regexFlags += "i";
} else {
filter = filter.filterBy.toLowerCase();
}
} else {
filter = filter.toLowerCase();
}
if (testTitleByFilter(testTitle, filter, useRegex, regexFlags)) {
if (until) {
dateFilter = until;
} else {
excluded = true;
}
break;
}
}
if (excluded) {
continue;
}
var location = event.location || false;
var geo = event.geo || false;
var description = event.description || false;
if (typeof event.rrule != "undefined" && !isFacebookBirthday) {
var rule = event.rrule;
var dates = rule.between(today, future, true, limitFunction);
for (var d in dates) {
startDate = moment(new Date(dates[d]));
endDate = moment(parseInt(startDate.format("x")) + duration, "x");
if (timeFilterApplies(now, endDate, dateFilter)) {
continue;
}
if (endDate.format("x") > now) {
newEvents.push({
title: title,
startDate: startDate.format("x"),
endDate: endDate.format("x"),
fullDayEvent: isFullDayEvent(event),
class: event.class,
firstYear: event.start.getFullYear(),
location: location,
geo: geo,
description: description
});
}
}
} else {
// console.log("Single event ...");
// Single event.
var fullDayEvent = (isFacebookBirthday) ? true : isFullDayEvent(event);
if (!fullDayEvent && endDate < new Date()) {
//console.log("It's not a fullday event, and it is in the past. So skip: " + title);
continue;
}
if (fullDayEvent && endDate <= today) {
//console.log("It's a fullday event, and it is before today. So skip: " + title);
continue;
}
if (startDate > future) {
//console.log("It exceeds the maximumNumberOfDays limit. So skip: " + title);
continue;
}
if (timeFilterApplies(now, endDate, dateFilter)) {
continue;
}
// Every thing is good. Add it to the list.
newEvents.push({
title: title,
startDate: startDate.format("x"),
endDate: endDate.format("x"),
fullDayEvent: fullDayEvent,
class: event.class,
location: location,
geo: geo,
description: description
});
}
}
}
newEvents.sort(function(a, b) {
return a.startDate - b.startDate;
|
//console.log(newEvents);
events = newEvents.slice(0, maximumEntries);
self.broadcastEvents();
scheduleTimer();
});
};
/* scheduleTimer()
* Schedule the timer for the next update.
*/
var scheduleTimer = function() {
//console.log('Schedule update timer.');
clearTimeout(reloadTimer);
reloadTimer = setTimeout(function() {
fetchCalendar();
}, reloadInterval);
};
/* isFullDayEvent(event)
* Checks if an event is a fullday event.
*
* argument event obejct - The event object to check.
*
* return bool - The event is a fullday event.
*/
var isFullDayEvent = function(event) {
if (event.start.length === 8) {
return true;
}
var start = event.start || 0;
var startDate = new Date(start);
var end = event.end || 0;
if (((end - start) % (24 * 60 * 60 * 1000)) === 0 && startDate.getHours() === 0 && startDate.getMinutes() === 0) {
// Is 24 hours, and starts on the middle of the night.
return true;
}
return false;
};
/* timeFilterApplies()
* Determines if the user defined time filter should apply
*
* argument now Date - Date object using previously created object for consistency
* argument endDate Moment - Moment object representing the event end date
* argument filter string - The time to subtract from the end date to determine if an event should be shown
*
* return bool - The event should be filtered out
*/
var timeFilterApplies = function(now, endDate, filter) {
if (filter) {
var until = filter.split(" "),
value = parseInt(until[0]),
increment = until[1].slice("-1") === "s" ? until[1] : until[1] + "s", // Massage the data for moment js
filterUntil = moment(endDate.format()).subtract(value, increment);
return now < filterUntil.format("x");
}
return false;
};
var testTitleByFilter = function (title, filter, useRegex, regexFlags) {
if (useRegex) {
// Assume if leading slash, there is also trailing slash
if (filter[0] === "/") {
// Strip leading and trailing slashes
filter = filter.substr(1).slice(0, -1);
}
filter = new RegExp(filter, regexFlags);
return filter.test(title);
} else {
return title.includes(filter);
}
}
/* public methods */
/* startFetch()
* Initiate fetchCalendar();
*/
this.startFetch = function() {
fetchCalendar();
};
/* broadcastItems()
* Broadcast the existing events.
*/
this.broadcastEvents = function() {
//console.log('Broadcasting ' + events.length + ' events.');
eventsReceivedCallback(self);
};
/* onReceive(callback)
* Sets the on success callback
*
* argument callback function - The on success callback.
*/
this.onReceive = function(callback) {
eventsReceivedCallback = callback;
};
/* onError(callback)
* Sets the on error callback
*
* argument callback function - The on error callback.
*/
this.onError = function(callback) {
fetchFailedCallback = callback;
};
/* url()
* Returns the url of this fetcher.
*
* return string - The url of this fetcher.
*/
this.url = function() {
return url;
};
/* events()
* Returns current available events for this fetcher.
*
* return array - The current available events for this fetcher.
*/
this.events = function() {
return events;
};
};
module.exports = CalendarFetcher;
|
});
|
random_line_split
|
calendarfetcher.js
|
/* Magic Mirror
* Node Helper: Calendar - CalendarFetcher
*
* By Michael Teeuw http://michaelteeuw.nl
* MIT Licensed.
*/
var ical = require("./vendor/ical.js");
var moment = require("moment");
var CalendarFetcher = function(url, reloadInterval, excludedEvents, maximumEntries, maximumNumberOfDays, auth) {
var self = this;
var reloadTimer = null;
var events = [];
var fetchFailedCallback = function() {};
var eventsReceivedCallback = function() {};
/* fetchCalendar()
* Initiates calendar fetch.
*/
var fetchCalendar = function() {
clearTimeout(reloadTimer);
reloadTimer = null;
nodeVersion = Number(process.version.match(/^v(\d+\.\d+)/)[1]);
var opts = {
headers: {
"User-Agent": "Mozilla/5.0 (Node.js "+ nodeVersion + ") MagicMirror/" + global.version + " (https://github.com/MichMich/MagicMirror/)"
},
gzip: true
};
if (auth) {
if(auth.method === "bearer"){
opts.auth = {
bearer: auth.pass
}
}else{
opts.auth = {
user: auth.user,
pass: auth.pass
};
if(auth.method === "digest"){
opts.auth.sendImmediately = false;
}else{
opts.auth.sendImmediately = true;
}
}
}
ical.fromURL(url, opts, function(err, data) {
if (err) {
fetchFailedCallback(self, err);
scheduleTimer();
return;
}
// console.log(data);
newEvents = [];
var limitFunction = function(date, i) {return i < maximumEntries;};
var eventDate = function(event, time) {
return (event[time].length === 8) ? moment(event[time], "YYYYMMDD") : moment(new Date(event[time]));
};
for (var e in data) {
var event = data[e];
var now = new Date();
var today = moment().startOf("day").toDate();
var future = moment().startOf("day").add(maximumNumberOfDays, "days").subtract(1,"seconds").toDate(); // Subtract 1 second so that events that start on the middle of the night will not repeat.
// FIXME:
// Ugly fix to solve the facebook birthday issue.
// Otherwise, the recurring events only show the birthday for next year.
var isFacebookBirthday = false;
if (typeof event.uid !== "undefined") {
if (event.uid.indexOf("@facebook.com") !== -1) {
isFacebookBirthday = true;
}
}
if (event.type === "VEVENT") {
var startDate = eventDate(event, "start");
var endDate;
if (typeof event.end !== "undefined") {
endDate = eventDate(event, "end");
} else if(typeof event.duration !== "undefined") {
dur=moment.duration(event.duration);
endDate = startDate.clone().add(dur);
} else {
if (!isFacebookBirthday) {
endDate = startDate;
} else {
endDate = moment(startDate).add(1, "days");
}
}
// calculate the duration f the event for use with recurring events.
var duration = parseInt(endDate.format("x")) - parseInt(startDate.format("x"));
if (event.start.length === 8) {
startDate = startDate.startOf("day");
}
var title = "Event";
if (event.summary) {
title = (typeof event.summary.val !== "undefined") ? event.summary.val : event.summary;
} else if(event.description) {
title = event.description;
}
var excluded = false,
dateFilter = null;
for (var f in excludedEvents) {
var filter = excludedEvents[f],
testTitle = title.toLowerCase(),
until = null,
useRegex = false,
regexFlags = "g";
if (filter instanceof Object) {
if (typeof filter.until !== "undefined") {
until = filter.until;
}
if (typeof filter.regex !== "undefined")
|
// If additional advanced filtering is added in, this section
// must remain last as we overwrite the filter object with the
// filterBy string
if (filter.caseSensitive) {
filter = filter.filterBy;
testTitle = title;
} else if (useRegex) {
filter = filter.filterBy;
testTitle = title;
regexFlags += "i";
} else {
filter = filter.filterBy.toLowerCase();
}
} else {
filter = filter.toLowerCase();
}
if (testTitleByFilter(testTitle, filter, useRegex, regexFlags)) {
if (until) {
dateFilter = until;
} else {
excluded = true;
}
break;
}
}
if (excluded) {
continue;
}
var location = event.location || false;
var geo = event.geo || false;
var description = event.description || false;
if (typeof event.rrule != "undefined" && !isFacebookBirthday) {
var rule = event.rrule;
var dates = rule.between(today, future, true, limitFunction);
for (var d in dates) {
startDate = moment(new Date(dates[d]));
endDate = moment(parseInt(startDate.format("x")) + duration, "x");
if (timeFilterApplies(now, endDate, dateFilter)) {
continue;
}
if (endDate.format("x") > now) {
newEvents.push({
title: title,
startDate: startDate.format("x"),
endDate: endDate.format("x"),
fullDayEvent: isFullDayEvent(event),
class: event.class,
firstYear: event.start.getFullYear(),
location: location,
geo: geo,
description: description
});
}
}
} else {
// console.log("Single event ...");
// Single event.
var fullDayEvent = (isFacebookBirthday) ? true : isFullDayEvent(event);
if (!fullDayEvent && endDate < new Date()) {
//console.log("It's not a fullday event, and it is in the past. So skip: " + title);
continue;
}
if (fullDayEvent && endDate <= today) {
//console.log("It's a fullday event, and it is before today. So skip: " + title);
continue;
}
if (startDate > future) {
//console.log("It exceeds the maximumNumberOfDays limit. So skip: " + title);
continue;
}
if (timeFilterApplies(now, endDate, dateFilter)) {
continue;
}
// Every thing is good. Add it to the list.
newEvents.push({
title: title,
startDate: startDate.format("x"),
endDate: endDate.format("x"),
fullDayEvent: fullDayEvent,
class: event.class,
location: location,
geo: geo,
description: description
});
}
}
}
newEvents.sort(function(a, b) {
return a.startDate - b.startDate;
});
//console.log(newEvents);
events = newEvents.slice(0, maximumEntries);
self.broadcastEvents();
scheduleTimer();
});
};
/* scheduleTimer()
* Schedule the timer for the next update.
*/
var scheduleTimer = function() {
//console.log('Schedule update timer.');
clearTimeout(reloadTimer);
reloadTimer = setTimeout(function() {
fetchCalendar();
}, reloadInterval);
};
/* isFullDayEvent(event)
* Checks if an event is a fullday event.
*
* argument event obejct - The event object to check.
*
* return bool - The event is a fullday event.
*/
var isFullDayEvent = function(event) {
if (event.start.length === 8) {
return true;
}
var start = event.start || 0;
var startDate = new Date(start);
var end = event.end || 0;
if (((end - start) % (24 * 60 * 60 * 1000)) === 0 && startDate.getHours() === 0 && startDate.getMinutes() === 0) {
// Is 24 hours, and starts on the middle of the night.
return true;
}
return false;
};
/* timeFilterApplies()
* Determines if the user defined time filter should apply
*
* argument now Date - Date object using previously created object for consistency
* argument endDate Moment - Moment object representing the event end date
* argument filter string - The time to subtract from the end date to determine if an event should be shown
*
* return bool - The event should be filtered out
*/
var timeFilterApplies = function(now, endDate, filter) {
if (filter) {
var until = filter.split(" "),
value = parseInt(until[0]),
increment = until[1].slice("-1") === "s" ? until[1] : until[1] + "s", // Massage the data for moment js
filterUntil = moment(endDate.format()).subtract(value, increment);
return now < filterUntil.format("x");
}
return false;
};
var testTitleByFilter = function (title, filter, useRegex, regexFlags) {
if (useRegex) {
// Assume if leading slash, there is also trailing slash
if (filter[0] === "/") {
// Strip leading and trailing slashes
filter = filter.substr(1).slice(0, -1);
}
filter = new RegExp(filter, regexFlags);
return filter.test(title);
} else {
return title.includes(filter);
}
}
/* public methods */
/* startFetch()
* Initiate fetchCalendar();
*/
this.startFetch = function() {
fetchCalendar();
};
/* broadcastItems()
* Broadcast the existing events.
*/
this.broadcastEvents = function() {
//console.log('Broadcasting ' + events.length + ' events.');
eventsReceivedCallback(self);
};
/* onReceive(callback)
* Sets the on success callback
*
* argument callback function - The on success callback.
*/
this.onReceive = function(callback) {
eventsReceivedCallback = callback;
};
/* onError(callback)
* Sets the on error callback
*
* argument callback function - The on error callback.
*/
this.onError = function(callback) {
fetchFailedCallback = callback;
};
/* url()
* Returns the url of this fetcher.
*
* return string - The url of this fetcher.
*/
this.url = function() {
return url;
};
/* events()
* Returns current available events for this fetcher.
*
* return array - The current available events for this fetcher.
*/
this.events = function() {
return events;
};
};
module.exports = CalendarFetcher;
|
{
useRegex = filter.regex;
}
|
conditional_block
|
application.js
|
// This is a manifest file that'll be compiled into application.js, which will include all the files
// listed below.
//
// Any JavaScript/Coffee file within this directory, lib/assets/javascripts, or any plugin's
// vendor/assets/javascripts directory can be referenced here using a relative path.
//
// It's not advisable to add code directly here, but if you do, it'll appear at the bottom of the
// compiled file. JavaScript code in this file should be added after the last require_* statement.
//
// Read Sprockets README (https://github.com/rails/sprockets#sprockets-directives) for details
// about supported directives.
//
// require rails-ujs
//= require turbolinks
//= require jquery
//= require jquery_ujs
//= require bootstrap-sprockets
//= require bootstrap.file-input
document.addEventListener("turbolinks:load", function() {
if ($('.btn-file').length === 0) {
$('input[type=file]').bootstrapFileInput();
$('.file-inputs').bootstrapFileInput();
}
});
$(document).ready(function() {
// Javascript for button validation
$(document).on('click', 'input[type=radio]', function() {
el = $(this);
col = el.data("col");
el
.parents(".table")
.find("input[data-col=" + col + "]")
.prop("checked", false);
el.prop("checked", true);
})
// Javascript for submit button validation
$(document).on('click', '#quizSubmit', function() {
var questions = validate_form();
if(questions.size == 0 ) {
return;
}
else {
questionString = "";
questions.forEach(function(value) {
questionString = questionString + ", " + value;
});
if (questions.size == 1) {
alert("Please finish question" + questionString.substring(1) + " before submitting!");
} else {
alert("Please finish questions" + questionString.substring(1) + " before submitting!");
}
event.preventDefault();
}
})
// Iterates through all answers and checks that they are ranked
// Returns 0 if all are checked, otherwise returns first question that isn't finished
function
|
() {
var numbers = new Set();
scroll = 0;
for (i = 1; i < 19; i++) {
$("#q" + i).css("border", "2px solid white");
for (j = 1; j < 5; j++) {
name = "q" + i + "a" + j;
if ($("input[name='" + name + "']:checked").length == 0) {
numbers.add(i);
$("#q" + i).css("border", "2px solid red");
if (scroll == 0) {
var top = $('#q' + i).position().top;
$(window).scrollTop( top - 75); //Offset because header blocks some of screen
scroll = 1;
}
}
}
}
return numbers;
}
// Prevents arrow keys from moving radio button
$(document).on('click', 'input[type=radio]', function() {
document.addEventListener("keydown", function (e) {
if ([37].indexOf(e.keyCode) > -1) { // left
e.preventDefault();
window.scrollBy(-50, -0);
}
if ([38].indexOf(e.keyCode) > -1) { //up
e.preventDefault();
window.scrollBy(0, -50);
}
if ([39].indexOf(e.keyCode) > -1) { //right
e.preventDefault();
window.scrollBy(50, 0);
}
if([40].indexOf(e.keyCode) > -1) { //down
e.preventDefault();
window.scrollBy(0, 50);
}
}, false);
})
});
|
validate_form
|
identifier_name
|
application.js
|
// This is a manifest file that'll be compiled into application.js, which will include all the files
// listed below.
//
// Any JavaScript/Coffee file within this directory, lib/assets/javascripts, or any plugin's
// vendor/assets/javascripts directory can be referenced here using a relative path.
//
// It's not advisable to add code directly here, but if you do, it'll appear at the bottom of the
// compiled file. JavaScript code in this file should be added after the last require_* statement.
//
// Read Sprockets README (https://github.com/rails/sprockets#sprockets-directives) for details
// about supported directives.
//
// require rails-ujs
//= require turbolinks
//= require jquery
//= require jquery_ujs
//= require bootstrap-sprockets
//= require bootstrap.file-input
document.addEventListener("turbolinks:load", function() {
if ($('.btn-file').length === 0) {
$('input[type=file]').bootstrapFileInput();
$('.file-inputs').bootstrapFileInput();
}
});
$(document).ready(function() {
// Javascript for button validation
$(document).on('click', 'input[type=radio]', function() {
el = $(this);
col = el.data("col");
el
.parents(".table")
.find("input[data-col=" + col + "]")
.prop("checked", false);
el.prop("checked", true);
})
// Javascript for submit button validation
$(document).on('click', '#quizSubmit', function() {
var questions = validate_form();
if(questions.size == 0 ) {
return;
}
else {
questionString = "";
questions.forEach(function(value) {
questionString = questionString + ", " + value;
});
if (questions.size == 1) {
alert("Please finish question" + questionString.substring(1) + " before submitting!");
} else {
alert("Please finish questions" + questionString.substring(1) + " before submitting!");
}
event.preventDefault();
}
})
// Iterates through all answers and checks that they are ranked
// Returns 0 if all are checked, otherwise returns first question that isn't finished
function validate_form() {
var numbers = new Set();
scroll = 0;
for (i = 1; i < 19; i++) {
$("#q" + i).css("border", "2px solid white");
for (j = 1; j < 5; j++) {
name = "q" + i + "a" + j;
if ($("input[name='" + name + "']:checked").length == 0) {
numbers.add(i);
$("#q" + i).css("border", "2px solid red");
if (scroll == 0) {
var top = $('#q' + i).position().top;
$(window).scrollTop( top - 75); //Offset because header blocks some of screen
scroll = 1;
}
}
}
}
return numbers;
}
// Prevents arrow keys from moving radio button
$(document).on('click', 'input[type=radio]', function() {
document.addEventListener("keydown", function (e) {
if ([37].indexOf(e.keyCode) > -1) { // left
e.preventDefault();
window.scrollBy(-50, -0);
}
if ([38].indexOf(e.keyCode) > -1)
|
if ([39].indexOf(e.keyCode) > -1) { //right
e.preventDefault();
window.scrollBy(50, 0);
}
if([40].indexOf(e.keyCode) > -1) { //down
e.preventDefault();
window.scrollBy(0, 50);
}
}, false);
})
});
|
{ //up
e.preventDefault();
window.scrollBy(0, -50);
}
|
conditional_block
|
application.js
|
// This is a manifest file that'll be compiled into application.js, which will include all the files
// listed below.
//
// Any JavaScript/Coffee file within this directory, lib/assets/javascripts, or any plugin's
// vendor/assets/javascripts directory can be referenced here using a relative path.
//
// It's not advisable to add code directly here, but if you do, it'll appear at the bottom of the
// compiled file. JavaScript code in this file should be added after the last require_* statement.
//
// Read Sprockets README (https://github.com/rails/sprockets#sprockets-directives) for details
// about supported directives.
//
// require rails-ujs
//= require turbolinks
//= require jquery
//= require jquery_ujs
//= require bootstrap-sprockets
//= require bootstrap.file-input
document.addEventListener("turbolinks:load", function() {
if ($('.btn-file').length === 0) {
$('input[type=file]').bootstrapFileInput();
$('.file-inputs').bootstrapFileInput();
}
});
$(document).ready(function() {
// Javascript for button validation
$(document).on('click', 'input[type=radio]', function() {
el = $(this);
col = el.data("col");
el
.parents(".table")
.find("input[data-col=" + col + "]")
.prop("checked", false);
el.prop("checked", true);
})
// Javascript for submit button validation
$(document).on('click', '#quizSubmit', function() {
var questions = validate_form();
if(questions.size == 0 ) {
return;
}
else {
questionString = "";
questions.forEach(function(value) {
questionString = questionString + ", " + value;
});
if (questions.size == 1) {
alert("Please finish question" + questionString.substring(1) + " before submitting!");
} else {
alert("Please finish questions" + questionString.substring(1) + " before submitting!");
}
event.preventDefault();
}
})
// Iterates through all answers and checks that they are ranked
// Returns 0 if all are checked, otherwise returns first question that isn't finished
function validate_form()
|
// Prevents arrow keys from moving radio button
$(document).on('click', 'input[type=radio]', function() {
document.addEventListener("keydown", function (e) {
if ([37].indexOf(e.keyCode) > -1) { // left
e.preventDefault();
window.scrollBy(-50, -0);
}
if ([38].indexOf(e.keyCode) > -1) { //up
e.preventDefault();
window.scrollBy(0, -50);
}
if ([39].indexOf(e.keyCode) > -1) { //right
e.preventDefault();
window.scrollBy(50, 0);
}
if([40].indexOf(e.keyCode) > -1) { //down
e.preventDefault();
window.scrollBy(0, 50);
}
}, false);
})
});
|
{
var numbers = new Set();
scroll = 0;
for (i = 1; i < 19; i++) {
$("#q" + i).css("border", "2px solid white");
for (j = 1; j < 5; j++) {
name = "q" + i + "a" + j;
if ($("input[name='" + name + "']:checked").length == 0) {
numbers.add(i);
$("#q" + i).css("border", "2px solid red");
if (scroll == 0) {
var top = $('#q' + i).position().top;
$(window).scrollTop( top - 75); //Offset because header blocks some of screen
scroll = 1;
}
}
}
}
return numbers;
}
|
identifier_body
|
application.js
|
// This is a manifest file that'll be compiled into application.js, which will include all the files
// listed below.
//
// Any JavaScript/Coffee file within this directory, lib/assets/javascripts, or any plugin's
// vendor/assets/javascripts directory can be referenced here using a relative path.
//
// It's not advisable to add code directly here, but if you do, it'll appear at the bottom of the
// compiled file. JavaScript code in this file should be added after the last require_* statement.
//
// Read Sprockets README (https://github.com/rails/sprockets#sprockets-directives) for details
// about supported directives.
//
// require rails-ujs
//= require turbolinks
//= require jquery
//= require jquery_ujs
//= require bootstrap-sprockets
//= require bootstrap.file-input
document.addEventListener("turbolinks:load", function() {
if ($('.btn-file').length === 0) {
$('input[type=file]').bootstrapFileInput();
$('.file-inputs').bootstrapFileInput();
}
});
$(document).ready(function() {
// Javascript for button validation
$(document).on('click', 'input[type=radio]', function() {
el = $(this);
col = el.data("col");
el
.parents(".table")
.find("input[data-col=" + col + "]")
.prop("checked", false);
el.prop("checked", true);
})
// Javascript for submit button validation
$(document).on('click', '#quizSubmit', function() {
var questions = validate_form();
if(questions.size == 0 ) {
return;
}
else {
questionString = "";
questions.forEach(function(value) {
questionString = questionString + ", " + value;
});
if (questions.size == 1) {
alert("Please finish question" + questionString.substring(1) + " before submitting!");
} else {
alert("Please finish questions" + questionString.substring(1) + " before submitting!");
}
event.preventDefault();
}
})
// Iterates through all answers and checks that they are ranked
// Returns 0 if all are checked, otherwise returns first question that isn't finished
function validate_form() {
var numbers = new Set();
scroll = 0;
for (i = 1; i < 19; i++) {
$("#q" + i).css("border", "2px solid white");
for (j = 1; j < 5; j++) {
name = "q" + i + "a" + j;
if ($("input[name='" + name + "']:checked").length == 0) {
numbers.add(i);
$("#q" + i).css("border", "2px solid red");
if (scroll == 0) {
var top = $('#q' + i).position().top;
$(window).scrollTop( top - 75); //Offset because header blocks some of screen
scroll = 1;
}
}
}
}
return numbers;
}
// Prevents arrow keys from moving radio button
$(document).on('click', 'input[type=radio]', function() {
document.addEventListener("keydown", function (e) {
if ([37].indexOf(e.keyCode) > -1) { // left
e.preventDefault();
window.scrollBy(-50, -0);
}
if ([38].indexOf(e.keyCode) > -1) { //up
e.preventDefault();
|
e.preventDefault();
window.scrollBy(50, 0);
}
if([40].indexOf(e.keyCode) > -1) { //down
e.preventDefault();
window.scrollBy(0, 50);
}
}, false);
})
});
|
window.scrollBy(0, -50);
}
if ([39].indexOf(e.keyCode) > -1) { //right
|
random_line_split
|
authored-state.tsx
|
import * as React from "react";
import { useState, useEffect, useRef } from "react";
interface Props {
id: string;
name: string;
authoredState: string;
}
const prettyAuthoredState = (authoredState: string) => {
try {
const json = JSON.parse(authoredState);
return JSON.stringify(json, null, 2);
} catch (e) {
return "{}";
}
};
const useValidateAuthoredState = (_authoredState: string) => {
const [authoredState, setAuthoredState] = useState(_authoredState);
const [isValidJSON, setIsValidJSON] = useState(true);
useEffect(() => {
try {
JSON.parse(authoredState);
setIsValidJSON(true);
} catch (e) {
setIsValidJSON(false);
}
}, [authoredState]);
return {isValidJSON, setAuthoredState};
};
export const AuthoredState: React.FC<Props> = ({id, name, authoredState}) => {
const [edit, setEdit] = useState(false);
const {isValidJSON, setAuthoredState} = useValidateAuthoredState(authoredState);
const prettyState = prettyAuthoredState(authoredState);
const textareaRef = useRef<HTMLTextAreaElement|null>(null);
const handleEditChange = () => {
if (textareaRef.current)
|
};
const renderEditMode = () => {
const style: React.CSSProperties = {width: "98%", height: "200px", border: "1px solid #aaa", outline: "none"};
return (
<textarea ref={textareaRef} onChange={handleEditChange} id={id} name={name} style={style}>
{prettyState}
</textarea>
);
};
const renderReadOnlyMode = () => {
return (
<div style={{padding: 10, border: "1px solid #aaa", whiteSpace: "pre"}}>
{prettyState}
</div>
);
};
const handleToggleEdit = () => {
setEdit(!edit);
if (!edit) {
setTimeout(() => textareaRef.current?.focus(), 1);
}
};
const editCheckboxLabel = "Edit authored state";
return (
<div>
{edit ? renderEditMode() : renderReadOnlyMode()}
<div style={{marginTop: 5}}>
{isValidJSON ? "JSON is valid" : <span style={{color: "#f00"}}>JSON is INVALID!</span>}
</div>
<p>
<input type="checkbox" checked={edit} onChange={handleToggleEdit} /> {editCheckboxLabel}
</p>
{edit ? <p>
<strong>Note:</strong> any changes you make on the Authoring tab will be superseded
by any changes made here when you save.<br/>You must leave the "{editCheckboxLabel}" checkbox
enabled for your changes to be saved.
</p> : undefined}
</div>
);
};
|
{
// this is only here to check if the JSON is valid, the text area is not a controlled component
setAuthoredState(textareaRef.current.value);
}
|
conditional_block
|
authored-state.tsx
|
import * as React from "react";
import { useState, useEffect, useRef } from "react";
interface Props {
id: string;
name: string;
authoredState: string;
}
const prettyAuthoredState = (authoredState: string) => {
try {
const json = JSON.parse(authoredState);
return JSON.stringify(json, null, 2);
} catch (e) {
return "{}";
}
};
const useValidateAuthoredState = (_authoredState: string) => {
const [authoredState, setAuthoredState] = useState(_authoredState);
const [isValidJSON, setIsValidJSON] = useState(true);
useEffect(() => {
try {
JSON.parse(authoredState);
|
return {isValidJSON, setAuthoredState};
};
export const AuthoredState: React.FC<Props> = ({id, name, authoredState}) => {
const [edit, setEdit] = useState(false);
const {isValidJSON, setAuthoredState} = useValidateAuthoredState(authoredState);
const prettyState = prettyAuthoredState(authoredState);
const textareaRef = useRef<HTMLTextAreaElement|null>(null);
const handleEditChange = () => {
if (textareaRef.current) {
// this is only here to check if the JSON is valid, the text area is not a controlled component
setAuthoredState(textareaRef.current.value);
}
};
const renderEditMode = () => {
const style: React.CSSProperties = {width: "98%", height: "200px", border: "1px solid #aaa", outline: "none"};
return (
<textarea ref={textareaRef} onChange={handleEditChange} id={id} name={name} style={style}>
{prettyState}
</textarea>
);
};
const renderReadOnlyMode = () => {
return (
<div style={{padding: 10, border: "1px solid #aaa", whiteSpace: "pre"}}>
{prettyState}
</div>
);
};
const handleToggleEdit = () => {
setEdit(!edit);
if (!edit) {
setTimeout(() => textareaRef.current?.focus(), 1);
}
};
const editCheckboxLabel = "Edit authored state";
return (
<div>
{edit ? renderEditMode() : renderReadOnlyMode()}
<div style={{marginTop: 5}}>
{isValidJSON ? "JSON is valid" : <span style={{color: "#f00"}}>JSON is INVALID!</span>}
</div>
<p>
<input type="checkbox" checked={edit} onChange={handleToggleEdit} /> {editCheckboxLabel}
</p>
{edit ? <p>
<strong>Note:</strong> any changes you make on the Authoring tab will be superseded
by any changes made here when you save.<br/>You must leave the "{editCheckboxLabel}" checkbox
enabled for your changes to be saved.
</p> : undefined}
</div>
);
};
|
setIsValidJSON(true);
} catch (e) {
setIsValidJSON(false);
}
}, [authoredState]);
|
random_line_split
|
helper_objects.rs
|
//! Defines some structs, enums, and constants mainly useful for passing information around
//! over the FFI and over threads.
use std::collections::HashMap;
use std::sync::Mutex;
use libc::{c_char, c_void, uint64_t, c_double, c_int};
use futures::sync::mpsc::UnboundedSender;
use futures::Stream;
use futures::stream::BoxStream;
use tickgrinder_util::transport::command_server::CommandServer;
use tickgrinder_util::trading::broker::*;
use tickgrinder_util::trading::tick::*;
pub const NULL: *mut c_void = 0 as *mut c_void;
/// Contains all possible commands that can be received by the broker server.
#[repr(C)]
#[derive(Clone)]
#[allow(dead_code)]
pub enum ServerCommand {
MARKET_OPEN,
MARKET_CLOSE,
LIST_ACCOUNTS,
DISCONNECT,
PING,
INIT_TICK_SUB,
GET_OFFER_ROW,
DELETE_ORDER,
MODIFY_ORDER,
}
/// Contains all possible responses that can be received by the broker server.
#[repr(C)]
#[derive(Clone, Debug)]
#[allow(dead_code)]
pub enum ServerResponse {
POSITION_OPENED,
POSITION_CLOSED,
TRADE_EXECUTED,
TRADE_CLOSED,
SESSION_TERMINATED,
PONG,
ERROR,
TICK_SUB_SUCCESSFUL,
OFFER_ROW,
ORDER_MODIFIED,
}
/// A packet of information asynchronously received from the broker server.
#[repr(C)]
#[derive(Clone)]
pub struct ServerMessage {
pub response: ServerResponse,
pub payload: *mut c_void,
}
/// A packet of information that can be sent to the broker server.
#[repr(C)]
#[derive(Clone)]
pub struct
|
{
pub command: ServerCommand,
pub payload: *mut c_void,
}
pub struct FXCMNative {
pub settings_hash: HashMap<String, String>,
pub server_environment: *mut c_void,
pub raw_rx: Option<BoxStream<(u64, BrokerResult), ()>>,
pub tickstream_obj: Mutex<Tickstream>,
}
// TODO: Move to Util
#[derive(Debug)]
#[repr(C)]
#[allow(dead_code)]
pub struct CTick {
pub timestamp: uint64_t,
pub bid: c_double,
pub ask: c_double,
}
// TODO: Move to Util
#[derive(Debug)]
#[repr(C)]
pub struct CSymbolTick {
pub symbol: *const c_char,
pub timestamp: uint64_t,
pub bid: c_double,
pub ask: c_double,
}
impl CSymbolTick {
/// Converts a CSymbolTick into a Tick given the amount of decimal places precision.
pub fn to_tick(&self, decimals: usize) -> Tick {
let multiplier = 10usize.pow(decimals as u32) as f64;
let bid_pips = self.bid * multiplier;
let ask_pips = self.ask * multiplier;
Tick {
timestamp: self.timestamp,
bid: bid_pips as usize,
ask: ask_pips as usize,
}
}
}
/// Contains data necessary to initialize a tickstream
#[repr(C)]
pub struct TickstreamDef {
pub env_ptr: *mut c_void,
pub cb: Option<extern fn (tx_ptr: *mut c_void, cst: CSymbolTick)>,
}
/// Holds the currently subscribed symbols as well as a channel to send them through
pub struct Tickstream {
pub subbed_pairs: Vec<SubbedPair>,
pub cs: CommandServer,
}
/// A pair that a user has subscribed to containing the symbol, an sender through which to
/// send ticks, and the decimal precision of the exchange rate's float value.
pub struct SubbedPair {
pub symbol: *const c_char,
pub sender: UnboundedSender<Tick>,
pub decimals: usize,
}
/// Holds the state for the `handle_message` function
pub struct HandlerState {
pub sender: UnboundedSender<(u64, BrokerResult)>,
pub cs: CommandServer,
}
/// A request to open or close a position at market price.
#[repr(C)]
#[allow(dead_code)]
struct MarketRequest{
pub symbol: *const c_char,
pub quantity: c_int,
// when opening a long or closing a short, should be true
// when opening a short or closing a long, should be false
pub is_long: bool,
pub uuid: *const c_char,
}
// something to hold our environment so we can convince Rust to send it between threads
#[derive(Clone)]
pub struct Spaceship(pub *mut c_void);
unsafe impl Send for Spaceship{}
unsafe impl Send for FXCMNative {}
unsafe impl Send for ServerMessage {}
unsafe impl Send for ClientMessage {}
|
ClientMessage
|
identifier_name
|
helper_objects.rs
|
//! Defines some structs, enums, and constants mainly useful for passing information around
//! over the FFI and over threads.
use std::collections::HashMap;
use std::sync::Mutex;
use libc::{c_char, c_void, uint64_t, c_double, c_int};
use futures::sync::mpsc::UnboundedSender;
use futures::Stream;
use futures::stream::BoxStream;
use tickgrinder_util::transport::command_server::CommandServer;
use tickgrinder_util::trading::broker::*;
use tickgrinder_util::trading::tick::*;
pub const NULL: *mut c_void = 0 as *mut c_void;
/// Contains all possible commands that can be received by the broker server.
#[repr(C)]
#[derive(Clone)]
#[allow(dead_code)]
pub enum ServerCommand {
MARKET_OPEN,
MARKET_CLOSE,
LIST_ACCOUNTS,
DISCONNECT,
PING,
INIT_TICK_SUB,
GET_OFFER_ROW,
DELETE_ORDER,
MODIFY_ORDER,
}
/// Contains all possible responses that can be received by the broker server.
#[repr(C)]
#[derive(Clone, Debug)]
#[allow(dead_code)]
pub enum ServerResponse {
POSITION_OPENED,
POSITION_CLOSED,
TRADE_EXECUTED,
TRADE_CLOSED,
SESSION_TERMINATED,
PONG,
ERROR,
TICK_SUB_SUCCESSFUL,
OFFER_ROW,
ORDER_MODIFIED,
}
/// A packet of information asynchronously received from the broker server.
#[repr(C)]
#[derive(Clone)]
pub struct ServerMessage {
pub response: ServerResponse,
pub payload: *mut c_void,
}
/// A packet of information that can be sent to the broker server.
#[repr(C)]
#[derive(Clone)]
pub struct ClientMessage {
pub command: ServerCommand,
pub payload: *mut c_void,
}
pub struct FXCMNative {
pub settings_hash: HashMap<String, String>,
pub server_environment: *mut c_void,
pub raw_rx: Option<BoxStream<(u64, BrokerResult), ()>>,
pub tickstream_obj: Mutex<Tickstream>,
}
// TODO: Move to Util
#[derive(Debug)]
#[repr(C)]
#[allow(dead_code)]
pub struct CTick {
pub timestamp: uint64_t,
pub bid: c_double,
pub ask: c_double,
}
// TODO: Move to Util
#[derive(Debug)]
#[repr(C)]
pub struct CSymbolTick {
pub symbol: *const c_char,
pub timestamp: uint64_t,
|
impl CSymbolTick {
/// Converts a CSymbolTick into a Tick given the amount of decimal places precision.
pub fn to_tick(&self, decimals: usize) -> Tick {
let multiplier = 10usize.pow(decimals as u32) as f64;
let bid_pips = self.bid * multiplier;
let ask_pips = self.ask * multiplier;
Tick {
timestamp: self.timestamp,
bid: bid_pips as usize,
ask: ask_pips as usize,
}
}
}
/// Contains data necessary to initialize a tickstream
#[repr(C)]
pub struct TickstreamDef {
pub env_ptr: *mut c_void,
pub cb: Option<extern fn (tx_ptr: *mut c_void, cst: CSymbolTick)>,
}
/// Holds the currently subscribed symbols as well as a channel to send them through
pub struct Tickstream {
pub subbed_pairs: Vec<SubbedPair>,
pub cs: CommandServer,
}
/// A pair that a user has subscribed to containing the symbol, an sender through which to
/// send ticks, and the decimal precision of the exchange rate's float value.
pub struct SubbedPair {
pub symbol: *const c_char,
pub sender: UnboundedSender<Tick>,
pub decimals: usize,
}
/// Holds the state for the `handle_message` function
pub struct HandlerState {
pub sender: UnboundedSender<(u64, BrokerResult)>,
pub cs: CommandServer,
}
/// A request to open or close a position at market price.
#[repr(C)]
#[allow(dead_code)]
struct MarketRequest{
pub symbol: *const c_char,
pub quantity: c_int,
// when opening a long or closing a short, should be true
// when opening a short or closing a long, should be false
pub is_long: bool,
pub uuid: *const c_char,
}
// something to hold our environment so we can convince Rust to send it between threads
#[derive(Clone)]
pub struct Spaceship(pub *mut c_void);
unsafe impl Send for Spaceship{}
unsafe impl Send for FXCMNative {}
unsafe impl Send for ServerMessage {}
unsafe impl Send for ClientMessage {}
|
pub bid: c_double,
pub ask: c_double,
}
|
random_line_split
|
invoke_shellcodemsil.py
|
import re
from lib.common import helpers
class Module:
def
|
(self, mainMenu, params=[]):
self.info = {
'Name': 'Invoke-ShellcodeMSIL',
'Author': ['@mattifestation'],
'Description': ('Execute shellcode within the context of the running PowerShell '
'process without making any Win32 function calls. Warning: This script has '
'no way to validate that your shellcode is 32 vs. 64-bit!'
'Note: Your shellcode must end in a ret (0xC3) and maintain proper stack '
'alignment or PowerShell will crash!'),
'Background' : False,
'OutputExtension' : None,
'NeedsAdmin' : False,
'OpsecSafe' : True,
'Language' : 'powershell',
'MinLanguageVersion' : '2',
'Comments': [
'http://www.exploit-monday.com',
'https://github.com/mattifestation/PowerSploit/blob/master/CodeExecution/Invoke-ShellcodeMSIL.ps1'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'Shellcode' : {
'Description' : 'Shellcode to inject, 0x00,0x0a,... format.',
'Required' : True,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self, obfuscate=False, obfuscationCommand=""):
# read in the common module source code
moduleSource = self.mainMenu.installPath + "/data/module_source/code_execution/Invoke-ShellcodeMSIL.ps1"
if obfuscate:
helpers.obfuscate_module(moduleSource=moduleSource, obfuscationCommand=obfuscationCommand)
moduleSource = moduleSource.replace("module_source", "obfuscated_module_source")
try:
f = open(moduleSource, 'r')
except:
print helpers.color("[!] Could not read module source path at: " + str(moduleSource))
return ""
moduleCode = f.read()
f.close()
script = moduleCode
scriptEnd = "Invoke-ShellcodeMSIL"
for option,values in self.options.iteritems():
if option.lower() != "agent":
if values['Value'] and values['Value'] != '':
if option.lower() == "shellcode":
# transform the shellcode to the correct format
sc = ",0".join(values['Value'].split("\\"))[1:]
scriptEnd += " -" + str(option) + " @(" + sc + ")"
if obfuscate:
scriptEnd = helpers.obfuscate(psScript=scriptEnd, obfuscationCommand=obfuscationCommand)
script += scriptEnd
return script
|
__init__
|
identifier_name
|
invoke_shellcodemsil.py
|
import re
from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Invoke-ShellcodeMSIL',
'Author': ['@mattifestation'],
'Description': ('Execute shellcode within the context of the running PowerShell '
'process without making any Win32 function calls. Warning: This script has '
'no way to validate that your shellcode is 32 vs. 64-bit!'
'Note: Your shellcode must end in a ret (0xC3) and maintain proper stack '
'alignment or PowerShell will crash!'),
'Background' : False,
'OutputExtension' : None,
'NeedsAdmin' : False,
'OpsecSafe' : True,
'Language' : 'powershell',
'MinLanguageVersion' : '2',
'Comments': [
'http://www.exploit-monday.com',
'https://github.com/mattifestation/PowerSploit/blob/master/CodeExecution/Invoke-ShellcodeMSIL.ps1'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'Shellcode' : {
'Description' : 'Shellcode to inject, 0x00,0x0a,... format.',
'Required' : True,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
|
def generate(self, obfuscate=False, obfuscationCommand=""):
# read in the common module source code
moduleSource = self.mainMenu.installPath + "/data/module_source/code_execution/Invoke-ShellcodeMSIL.ps1"
if obfuscate:
helpers.obfuscate_module(moduleSource=moduleSource, obfuscationCommand=obfuscationCommand)
moduleSource = moduleSource.replace("module_source", "obfuscated_module_source")
try:
f = open(moduleSource, 'r')
except:
print helpers.color("[!] Could not read module source path at: " + str(moduleSource))
return ""
moduleCode = f.read()
f.close()
script = moduleCode
scriptEnd = "Invoke-ShellcodeMSIL"
for option,values in self.options.iteritems():
if option.lower() != "agent":
if values['Value'] and values['Value'] != '':
if option.lower() == "shellcode":
# transform the shellcode to the correct format
sc = ",0".join(values['Value'].split("\\"))[1:]
scriptEnd += " -" + str(option) + " @(" + sc + ")"
if obfuscate:
scriptEnd = helpers.obfuscate(psScript=scriptEnd, obfuscationCommand=obfuscationCommand)
script += scriptEnd
return script
|
option, value = param
if option in self.options:
self.options[option]['Value'] = value
|
conditional_block
|
invoke_shellcodemsil.py
|
import re
from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Invoke-ShellcodeMSIL',
'Author': ['@mattifestation'],
'Description': ('Execute shellcode within the context of the running PowerShell '
'process without making any Win32 function calls. Warning: This script has '
'no way to validate that your shellcode is 32 vs. 64-bit!'
'Note: Your shellcode must end in a ret (0xC3) and maintain proper stack '
'alignment or PowerShell will crash!'),
'Background' : False,
'OutputExtension' : None,
'NeedsAdmin' : False,
'OpsecSafe' : True,
'Language' : 'powershell',
'MinLanguageVersion' : '2',
'Comments': [
'http://www.exploit-monday.com',
'https://github.com/mattifestation/PowerSploit/blob/master/CodeExecution/Invoke-ShellcodeMSIL.ps1'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'Shellcode' : {
'Description' : 'Shellcode to inject, 0x00,0x0a,... format.',
'Required' : True,
'Value' : ''
}
|
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self, obfuscate=False, obfuscationCommand=""):
# read in the common module source code
moduleSource = self.mainMenu.installPath + "/data/module_source/code_execution/Invoke-ShellcodeMSIL.ps1"
if obfuscate:
helpers.obfuscate_module(moduleSource=moduleSource, obfuscationCommand=obfuscationCommand)
moduleSource = moduleSource.replace("module_source", "obfuscated_module_source")
try:
f = open(moduleSource, 'r')
except:
print helpers.color("[!] Could not read module source path at: " + str(moduleSource))
return ""
moduleCode = f.read()
f.close()
script = moduleCode
scriptEnd = "Invoke-ShellcodeMSIL"
for option,values in self.options.iteritems():
if option.lower() != "agent":
if values['Value'] and values['Value'] != '':
if option.lower() == "shellcode":
# transform the shellcode to the correct format
sc = ",0".join(values['Value'].split("\\"))[1:]
scriptEnd += " -" + str(option) + " @(" + sc + ")"
if obfuscate:
scriptEnd = helpers.obfuscate(psScript=scriptEnd, obfuscationCommand=obfuscationCommand)
script += scriptEnd
return script
|
random_line_split
|
|
invoke_shellcodemsil.py
|
import re
from lib.common import helpers
class Module:
|
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Invoke-ShellcodeMSIL',
'Author': ['@mattifestation'],
'Description': ('Execute shellcode within the context of the running PowerShell '
'process without making any Win32 function calls. Warning: This script has '
'no way to validate that your shellcode is 32 vs. 64-bit!'
'Note: Your shellcode must end in a ret (0xC3) and maintain proper stack '
'alignment or PowerShell will crash!'),
'Background' : False,
'OutputExtension' : None,
'NeedsAdmin' : False,
'OpsecSafe' : True,
'Language' : 'powershell',
'MinLanguageVersion' : '2',
'Comments': [
'http://www.exploit-monday.com',
'https://github.com/mattifestation/PowerSploit/blob/master/CodeExecution/Invoke-ShellcodeMSIL.ps1'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'Shellcode' : {
'Description' : 'Shellcode to inject, 0x00,0x0a,... format.',
'Required' : True,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self, obfuscate=False, obfuscationCommand=""):
# read in the common module source code
moduleSource = self.mainMenu.installPath + "/data/module_source/code_execution/Invoke-ShellcodeMSIL.ps1"
if obfuscate:
helpers.obfuscate_module(moduleSource=moduleSource, obfuscationCommand=obfuscationCommand)
moduleSource = moduleSource.replace("module_source", "obfuscated_module_source")
try:
f = open(moduleSource, 'r')
except:
print helpers.color("[!] Could not read module source path at: " + str(moduleSource))
return ""
moduleCode = f.read()
f.close()
script = moduleCode
scriptEnd = "Invoke-ShellcodeMSIL"
for option,values in self.options.iteritems():
if option.lower() != "agent":
if values['Value'] and values['Value'] != '':
if option.lower() == "shellcode":
# transform the shellcode to the correct format
sc = ",0".join(values['Value'].split("\\"))[1:]
scriptEnd += " -" + str(option) + " @(" + sc + ")"
if obfuscate:
scriptEnd = helpers.obfuscate(psScript=scriptEnd, obfuscationCommand=obfuscationCommand)
script += scriptEnd
return script
|
identifier_body
|
|
day17.py
|
#!/usr/bin/env python3
"""
http://adventofcode.com/day/17
Part 1
------
The elves bought too much eggnog again - 150 liters this time. To
fit it all into your refrigerator, you'll need to move it into
smaller containers. You take an inventory of the capacities of
the available containers.
For example, suppose you have containers of size 20, 15, 10, 5,
and 5 liters. If you need to store 25 liters, there are four ways
to do it:
- 15 and 10
- 20 and 5 (the first 5)
- 20 and 5 (the second 5)
- 15, 5, and 5
Filling all containers entirely, how many different combinations
of containers can exactly fit all 150 liters of eggnog?
Part 2
------
While playing with all the containers in the kitchen, another load
of eggnog arrives! The shipping and receiving department is
requesting as many containers as you can spare.
Find the minimum number of containers that can exactly fit all
150 liters of eggnog. How many different ways can you fill that
number of containers and still hold exactly 150 litres?
In the example above, the minimum number of containers was two.
There were three ways to use that many containers, and so the
answer there would be 3.
"""
from __future__ import print_function, unicode_literals
from itertools import combinations
import os
import re
import sys
INFILE = 'inputs/input17.txt'
def
|
():
containers = list()
with open(INFILE) as f:
for line in f:
containers.append(int(line.strip()))
# Part 1
p1count = 0
for s in range(len(containers)):
for c in combinations(containers, s):
if sum(c) == 150:
p1count += 1
# Part 2
p2sizes = dict()
p2min = len(containers)
for i in range(p2min):
p2sizes[i] = 0
for s in range(len(containers)):
for c in combinations(containers, s):
if sum(c) == 150:
if len(c) < p2min:
p2min = len(c)
p2sizes[s] += 1
msg = '[Python] Puzzle 17-1: {}'
print(msg.format(p1count))
msg = '[Python] Puzzle 17-2: {}'
print(msg.format(p2sizes[p2min]))
if __name__ == '__main__':
main()
|
main
|
identifier_name
|
day17.py
|
#!/usr/bin/env python3
"""
http://adventofcode.com/day/17
Part 1
------
The elves bought too much eggnog again - 150 liters this time. To
fit it all into your refrigerator, you'll need to move it into
smaller containers. You take an inventory of the capacities of
the available containers.
For example, suppose you have containers of size 20, 15, 10, 5,
and 5 liters. If you need to store 25 liters, there are four ways
to do it:
- 15 and 10
- 20 and 5 (the first 5)
- 20 and 5 (the second 5)
- 15, 5, and 5
Filling all containers entirely, how many different combinations
of containers can exactly fit all 150 liters of eggnog?
Part 2
------
While playing with all the containers in the kitchen, another load
of eggnog arrives! The shipping and receiving department is
requesting as many containers as you can spare.
Find the minimum number of containers that can exactly fit all
150 liters of eggnog. How many different ways can you fill that
number of containers and still hold exactly 150 litres?
In the example above, the minimum number of containers was two.
There were three ways to use that many containers, and so the
answer there would be 3.
"""
from __future__ import print_function, unicode_literals
from itertools import combinations
import os
import re
import sys
INFILE = 'inputs/input17.txt'
def main():
containers = list()
with open(INFILE) as f:
for line in f:
containers.append(int(line.strip()))
# Part 1
p1count = 0
for s in range(len(containers)):
for c in combinations(containers, s):
if sum(c) == 150:
p1count += 1
# Part 2
p2sizes = dict()
p2min = len(containers)
for i in range(p2min):
p2sizes[i] = 0
for s in range(len(containers)):
for c in combinations(containers, s):
if sum(c) == 150:
if len(c) < p2min:
p2min = len(c)
p2sizes[s] += 1
msg = '[Python] Puzzle 17-1: {}'
print(msg.format(p1count))
msg = '[Python] Puzzle 17-2: {}'
print(msg.format(p2sizes[p2min]))
if __name__ == '__main__':
|
main()
|
conditional_block
|
|
day17.py
|
#!/usr/bin/env python3
"""
http://adventofcode.com/day/17
Part 1
------
The elves bought too much eggnog again - 150 liters this time. To
fit it all into your refrigerator, you'll need to move it into
smaller containers. You take an inventory of the capacities of
the available containers.
For example, suppose you have containers of size 20, 15, 10, 5,
and 5 liters. If you need to store 25 liters, there are four ways
to do it:
- 15 and 10
- 20 and 5 (the first 5)
- 20 and 5 (the second 5)
- 15, 5, and 5
Filling all containers entirely, how many different combinations
of containers can exactly fit all 150 liters of eggnog?
Part 2
------
While playing with all the containers in the kitchen, another load
of eggnog arrives! The shipping and receiving department is
requesting as many containers as you can spare.
Find the minimum number of containers that can exactly fit all
150 liters of eggnog. How many different ways can you fill that
number of containers and still hold exactly 150 litres?
In the example above, the minimum number of containers was two.
There were three ways to use that many containers, and so the
answer there would be 3.
"""
from __future__ import print_function, unicode_literals
from itertools import combinations
import os
import re
import sys
INFILE = 'inputs/input17.txt'
def main():
|
if __name__ == '__main__':
main()
|
containers = list()
with open(INFILE) as f:
for line in f:
containers.append(int(line.strip()))
# Part 1
p1count = 0
for s in range(len(containers)):
for c in combinations(containers, s):
if sum(c) == 150:
p1count += 1
# Part 2
p2sizes = dict()
p2min = len(containers)
for i in range(p2min):
p2sizes[i] = 0
for s in range(len(containers)):
for c in combinations(containers, s):
if sum(c) == 150:
if len(c) < p2min:
p2min = len(c)
p2sizes[s] += 1
msg = '[Python] Puzzle 17-1: {}'
print(msg.format(p1count))
msg = '[Python] Puzzle 17-2: {}'
print(msg.format(p2sizes[p2min]))
|
identifier_body
|
day17.py
|
#!/usr/bin/env python3
"""
http://adventofcode.com/day/17
Part 1
------
The elves bought too much eggnog again - 150 liters this time. To
fit it all into your refrigerator, you'll need to move it into
smaller containers. You take an inventory of the capacities of
the available containers.
For example, suppose you have containers of size 20, 15, 10, 5,
and 5 liters. If you need to store 25 liters, there are four ways
to do it:
- 15 and 10
- 20 and 5 (the first 5)
- 20 and 5 (the second 5)
- 15, 5, and 5
Filling all containers entirely, how many different combinations
of containers can exactly fit all 150 liters of eggnog?
Part 2
------
While playing with all the containers in the kitchen, another load
of eggnog arrives! The shipping and receiving department is
requesting as many containers as you can spare.
Find the minimum number of containers that can exactly fit all
150 liters of eggnog. How many different ways can you fill that
number of containers and still hold exactly 150 litres?
In the example above, the minimum number of containers was two.
There were three ways to use that many containers, and so the
answer there would be 3.
"""
from __future__ import print_function, unicode_literals
from itertools import combinations
import os
import re
import sys
INFILE = 'inputs/input17.txt'
def main():
containers = list()
with open(INFILE) as f:
for line in f:
containers.append(int(line.strip()))
# Part 1
p1count = 0
for s in range(len(containers)):
for c in combinations(containers, s):
if sum(c) == 150:
p1count += 1
# Part 2
p2sizes = dict()
p2min = len(containers)
for i in range(p2min):
p2sizes[i] = 0
for s in range(len(containers)):
for c in combinations(containers, s):
if sum(c) == 150:
if len(c) < p2min:
p2min = len(c)
p2sizes[s] += 1
msg = '[Python] Puzzle 17-1: {}'
print(msg.format(p1count))
msg = '[Python] Puzzle 17-2: {}'
|
if __name__ == '__main__':
main()
|
print(msg.format(p2sizes[p2min]))
|
random_line_split
|
agent.py
|
# -*- encoding: utf-8 -*-
# Copyright (c) 2015 b<>com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""In charge of collecting data from drivers and push it to the publisher."""
import os
import msgpack
import nanomsg
from oslo_log import log
from watcher_metering.agent.manager import MetricManager
LOG = log.getLogger(__name__)
class Agent(MetricManager):
def __init__(self, conf, driver_names, use_nanoconfig_service,
publisher_endpoint, nanoconfig_service_endpoint,
nanoconfig_update_endpoint, nanoconfig_profile):
"""
:param conf: Configuration obtained from a configuration file
:type conf: oslo_config.cfg.ConfigOpts instance
:param driver_names: The list of driver names to register
:type driver_names: list of str
:param use_nanoconfig_service: Indicates whether or not it should use a
nanoconfig service
:type use_nanoconfig_service: bool
:param publisher_endpoint: Publisher server URI
:type publisher_endpoint: str
:param nanoconfig_service_endpoint: Nanoconfig service URI
:type nanoconfig_service_endpoint: str
:param nanoconfig_update_endpoint: Nanoconfig update service URI
:type nanoconfig_update_endpoint: str
:param nanoconfig_profile: Nanoconfig profile URI
:type nanoconfig_profile: str
"""
super(Agent, self).__init__(conf, driver_names)
self.socket = nanomsg.Socket(nanomsg.PUSH)
self.use_nanoconfig_service = use_nanoconfig_service
self.publisher_endpoint = publisher_endpoint
self.nanoconfig_service_endpoint = nanoconfig_service_endpoint
self.nanoconfig_update_endpoint = nanoconfig_update_endpoint
self.nanoconfig_profile = nanoconfig_profile
@property
def namespace(self):
return "watcher_metering.drivers"
def start(self):
LOG.info("[Agent] Starting main thread...")
super(Agent, self).start()
def setup_socket(self):
if self.use_nanoconfig_service:
self.set_nanoconfig_endpoints()
self.socket.configure(self.nanoconfig_profile)
LOG.info("[Agent] Agent nanomsg's profile `%s`",
self.nanoconfig_profile)
else:
LOG.debug("[Agent] Agent connected to: `%s`",
self.publisher_endpoint)
|
`NN_CONFIG_UPDATES` environment variable as nanoconfig uses it to
access the nanoconfig service
"""
# NN_CONFIG_SERVICE:
nn_config_service = os.environ.get("NN_CONFIG_SERVICE")
if not self.nanoconfig_service_endpoint and not nn_config_service:
raise ValueError(
"Invalid configuration! No NN_CONFIG_SERVICE set. You need to "
"configure your `nanoconfig_service_endpoint`.")
if self.nanoconfig_service_endpoint:
os.environ["NN_CONFIG_SERVICE"] = self.nanoconfig_service_endpoint
else:
self.nanoconfig_service_endpoint = nn_config_service
# NN_CONFIG_UPDATES
nn_config_updates = os.environ.get("NN_CONFIG_UPDATES")
if not self.nanoconfig_update_endpoint and not nn_config_updates:
raise ValueError(
"Invalid configuration! No NN_CONFIG_UPDATES set. You need to "
"configure your `nanoconfig_update_endpoint`.")
if self.nanoconfig_update_endpoint:
os.environ["NN_CONFIG_UPDATES"] = self.nanoconfig_update_endpoint
else:
self.nanoconfig_update_endpoint = nn_config_updates
def run(self):
self.setup_socket()
super(Agent, self).run()
def stop(self):
self.socket.close()
super(Agent, self).stop()
LOG.debug("[Agent] Stopped")
def update(self, notifier, data):
LOG.debug("[Agent] Updated by: %s", notifier)
LOG.debug("[Agent] Preparing to send message %s", msgpack.loads(data))
try:
LOG.debug("[Agent] Sending message...")
# The agent will wait for the publisher server to be listening on
# the related publisher_endpoint before continuing
# In which case, you should start the publisher to make it work!
self.socket.send(data)
LOG.debug("[Agent] Message sent successfully!")
except nanomsg.NanoMsgError as exc:
LOG.error("Exception during sending the message to controller %s",
exc.args[0])
|
self.socket.connect(self.publisher_endpoint)
LOG.info("[Agent] Ready for pushing to Publisher node")
def set_nanoconfig_endpoints(self):
"""This methods sets both the `NN_CONFIG_SERVICE` and
|
random_line_split
|
agent.py
|
# -*- encoding: utf-8 -*-
# Copyright (c) 2015 b<>com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""In charge of collecting data from drivers and push it to the publisher."""
import os
import msgpack
import nanomsg
from oslo_log import log
from watcher_metering.agent.manager import MetricManager
LOG = log.getLogger(__name__)
class Agent(MetricManager):
def __init__(self, conf, driver_names, use_nanoconfig_service,
publisher_endpoint, nanoconfig_service_endpoint,
nanoconfig_update_endpoint, nanoconfig_profile):
"""
:param conf: Configuration obtained from a configuration file
:type conf: oslo_config.cfg.ConfigOpts instance
:param driver_names: The list of driver names to register
:type driver_names: list of str
:param use_nanoconfig_service: Indicates whether or not it should use a
nanoconfig service
:type use_nanoconfig_service: bool
:param publisher_endpoint: Publisher server URI
:type publisher_endpoint: str
:param nanoconfig_service_endpoint: Nanoconfig service URI
:type nanoconfig_service_endpoint: str
:param nanoconfig_update_endpoint: Nanoconfig update service URI
:type nanoconfig_update_endpoint: str
:param nanoconfig_profile: Nanoconfig profile URI
:type nanoconfig_profile: str
"""
super(Agent, self).__init__(conf, driver_names)
self.socket = nanomsg.Socket(nanomsg.PUSH)
self.use_nanoconfig_service = use_nanoconfig_service
self.publisher_endpoint = publisher_endpoint
self.nanoconfig_service_endpoint = nanoconfig_service_endpoint
self.nanoconfig_update_endpoint = nanoconfig_update_endpoint
self.nanoconfig_profile = nanoconfig_profile
@property
def namespace(self):
return "watcher_metering.drivers"
def start(self):
LOG.info("[Agent] Starting main thread...")
super(Agent, self).start()
def setup_socket(self):
if self.use_nanoconfig_service:
self.set_nanoconfig_endpoints()
self.socket.configure(self.nanoconfig_profile)
LOG.info("[Agent] Agent nanomsg's profile `%s`",
self.nanoconfig_profile)
else:
LOG.debug("[Agent] Agent connected to: `%s`",
self.publisher_endpoint)
self.socket.connect(self.publisher_endpoint)
LOG.info("[Agent] Ready for pushing to Publisher node")
def set_nanoconfig_endpoints(self):
"""This methods sets both the `NN_CONFIG_SERVICE` and
`NN_CONFIG_UPDATES` environment variable as nanoconfig uses it to
access the nanoconfig service
"""
# NN_CONFIG_SERVICE:
nn_config_service = os.environ.get("NN_CONFIG_SERVICE")
if not self.nanoconfig_service_endpoint and not nn_config_service:
raise ValueError(
"Invalid configuration! No NN_CONFIG_SERVICE set. You need to "
"configure your `nanoconfig_service_endpoint`.")
if self.nanoconfig_service_endpoint:
os.environ["NN_CONFIG_SERVICE"] = self.nanoconfig_service_endpoint
else:
self.nanoconfig_service_endpoint = nn_config_service
# NN_CONFIG_UPDATES
nn_config_updates = os.environ.get("NN_CONFIG_UPDATES")
if not self.nanoconfig_update_endpoint and not nn_config_updates:
raise ValueError(
"Invalid configuration! No NN_CONFIG_UPDATES set. You need to "
"configure your `nanoconfig_update_endpoint`.")
if self.nanoconfig_update_endpoint:
os.environ["NN_CONFIG_UPDATES"] = self.nanoconfig_update_endpoint
else:
self.nanoconfig_update_endpoint = nn_config_updates
def run(self):
|
def stop(self):
self.socket.close()
super(Agent, self).stop()
LOG.debug("[Agent] Stopped")
def update(self, notifier, data):
LOG.debug("[Agent] Updated by: %s", notifier)
LOG.debug("[Agent] Preparing to send message %s", msgpack.loads(data))
try:
LOG.debug("[Agent] Sending message...")
# The agent will wait for the publisher server to be listening on
# the related publisher_endpoint before continuing
# In which case, you should start the publisher to make it work!
self.socket.send(data)
LOG.debug("[Agent] Message sent successfully!")
except nanomsg.NanoMsgError as exc:
LOG.error("Exception during sending the message to controller %s",
exc.args[0])
|
self.setup_socket()
super(Agent, self).run()
|
identifier_body
|
agent.py
|
# -*- encoding: utf-8 -*-
# Copyright (c) 2015 b<>com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""In charge of collecting data from drivers and push it to the publisher."""
import os
import msgpack
import nanomsg
from oslo_log import log
from watcher_metering.agent.manager import MetricManager
LOG = log.getLogger(__name__)
class Agent(MetricManager):
def __init__(self, conf, driver_names, use_nanoconfig_service,
publisher_endpoint, nanoconfig_service_endpoint,
nanoconfig_update_endpoint, nanoconfig_profile):
"""
:param conf: Configuration obtained from a configuration file
:type conf: oslo_config.cfg.ConfigOpts instance
:param driver_names: The list of driver names to register
:type driver_names: list of str
:param use_nanoconfig_service: Indicates whether or not it should use a
nanoconfig service
:type use_nanoconfig_service: bool
:param publisher_endpoint: Publisher server URI
:type publisher_endpoint: str
:param nanoconfig_service_endpoint: Nanoconfig service URI
:type nanoconfig_service_endpoint: str
:param nanoconfig_update_endpoint: Nanoconfig update service URI
:type nanoconfig_update_endpoint: str
:param nanoconfig_profile: Nanoconfig profile URI
:type nanoconfig_profile: str
"""
super(Agent, self).__init__(conf, driver_names)
self.socket = nanomsg.Socket(nanomsg.PUSH)
self.use_nanoconfig_service = use_nanoconfig_service
self.publisher_endpoint = publisher_endpoint
self.nanoconfig_service_endpoint = nanoconfig_service_endpoint
self.nanoconfig_update_endpoint = nanoconfig_update_endpoint
self.nanoconfig_profile = nanoconfig_profile
@property
def namespace(self):
return "watcher_metering.drivers"
def start(self):
LOG.info("[Agent] Starting main thread...")
super(Agent, self).start()
def setup_socket(self):
if self.use_nanoconfig_service:
self.set_nanoconfig_endpoints()
self.socket.configure(self.nanoconfig_profile)
LOG.info("[Agent] Agent nanomsg's profile `%s`",
self.nanoconfig_profile)
else:
LOG.debug("[Agent] Agent connected to: `%s`",
self.publisher_endpoint)
self.socket.connect(self.publisher_endpoint)
LOG.info("[Agent] Ready for pushing to Publisher node")
def set_nanoconfig_endpoints(self):
"""This methods sets both the `NN_CONFIG_SERVICE` and
`NN_CONFIG_UPDATES` environment variable as nanoconfig uses it to
access the nanoconfig service
"""
# NN_CONFIG_SERVICE:
nn_config_service = os.environ.get("NN_CONFIG_SERVICE")
if not self.nanoconfig_service_endpoint and not nn_config_service:
|
if self.nanoconfig_service_endpoint:
os.environ["NN_CONFIG_SERVICE"] = self.nanoconfig_service_endpoint
else:
self.nanoconfig_service_endpoint = nn_config_service
# NN_CONFIG_UPDATES
nn_config_updates = os.environ.get("NN_CONFIG_UPDATES")
if not self.nanoconfig_update_endpoint and not nn_config_updates:
raise ValueError(
"Invalid configuration! No NN_CONFIG_UPDATES set. You need to "
"configure your `nanoconfig_update_endpoint`.")
if self.nanoconfig_update_endpoint:
os.environ["NN_CONFIG_UPDATES"] = self.nanoconfig_update_endpoint
else:
self.nanoconfig_update_endpoint = nn_config_updates
def run(self):
self.setup_socket()
super(Agent, self).run()
def stop(self):
self.socket.close()
super(Agent, self).stop()
LOG.debug("[Agent] Stopped")
def update(self, notifier, data):
LOG.debug("[Agent] Updated by: %s", notifier)
LOG.debug("[Agent] Preparing to send message %s", msgpack.loads(data))
try:
LOG.debug("[Agent] Sending message...")
# The agent will wait for the publisher server to be listening on
# the related publisher_endpoint before continuing
# In which case, you should start the publisher to make it work!
self.socket.send(data)
LOG.debug("[Agent] Message sent successfully!")
except nanomsg.NanoMsgError as exc:
LOG.error("Exception during sending the message to controller %s",
exc.args[0])
|
raise ValueError(
"Invalid configuration! No NN_CONFIG_SERVICE set. You need to "
"configure your `nanoconfig_service_endpoint`.")
|
conditional_block
|
agent.py
|
# -*- encoding: utf-8 -*-
# Copyright (c) 2015 b<>com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""In charge of collecting data from drivers and push it to the publisher."""
import os
import msgpack
import nanomsg
from oslo_log import log
from watcher_metering.agent.manager import MetricManager
LOG = log.getLogger(__name__)
class Agent(MetricManager):
def __init__(self, conf, driver_names, use_nanoconfig_service,
publisher_endpoint, nanoconfig_service_endpoint,
nanoconfig_update_endpoint, nanoconfig_profile):
"""
:param conf: Configuration obtained from a configuration file
:type conf: oslo_config.cfg.ConfigOpts instance
:param driver_names: The list of driver names to register
:type driver_names: list of str
:param use_nanoconfig_service: Indicates whether or not it should use a
nanoconfig service
:type use_nanoconfig_service: bool
:param publisher_endpoint: Publisher server URI
:type publisher_endpoint: str
:param nanoconfig_service_endpoint: Nanoconfig service URI
:type nanoconfig_service_endpoint: str
:param nanoconfig_update_endpoint: Nanoconfig update service URI
:type nanoconfig_update_endpoint: str
:param nanoconfig_profile: Nanoconfig profile URI
:type nanoconfig_profile: str
"""
super(Agent, self).__init__(conf, driver_names)
self.socket = nanomsg.Socket(nanomsg.PUSH)
self.use_nanoconfig_service = use_nanoconfig_service
self.publisher_endpoint = publisher_endpoint
self.nanoconfig_service_endpoint = nanoconfig_service_endpoint
self.nanoconfig_update_endpoint = nanoconfig_update_endpoint
self.nanoconfig_profile = nanoconfig_profile
@property
def namespace(self):
return "watcher_metering.drivers"
def start(self):
LOG.info("[Agent] Starting main thread...")
super(Agent, self).start()
def setup_socket(self):
if self.use_nanoconfig_service:
self.set_nanoconfig_endpoints()
self.socket.configure(self.nanoconfig_profile)
LOG.info("[Agent] Agent nanomsg's profile `%s`",
self.nanoconfig_profile)
else:
LOG.debug("[Agent] Agent connected to: `%s`",
self.publisher_endpoint)
self.socket.connect(self.publisher_endpoint)
LOG.info("[Agent] Ready for pushing to Publisher node")
def set_nanoconfig_endpoints(self):
"""This methods sets both the `NN_CONFIG_SERVICE` and
`NN_CONFIG_UPDATES` environment variable as nanoconfig uses it to
access the nanoconfig service
"""
# NN_CONFIG_SERVICE:
nn_config_service = os.environ.get("NN_CONFIG_SERVICE")
if not self.nanoconfig_service_endpoint and not nn_config_service:
raise ValueError(
"Invalid configuration! No NN_CONFIG_SERVICE set. You need to "
"configure your `nanoconfig_service_endpoint`.")
if self.nanoconfig_service_endpoint:
os.environ["NN_CONFIG_SERVICE"] = self.nanoconfig_service_endpoint
else:
self.nanoconfig_service_endpoint = nn_config_service
# NN_CONFIG_UPDATES
nn_config_updates = os.environ.get("NN_CONFIG_UPDATES")
if not self.nanoconfig_update_endpoint and not nn_config_updates:
raise ValueError(
"Invalid configuration! No NN_CONFIG_UPDATES set. You need to "
"configure your `nanoconfig_update_endpoint`.")
if self.nanoconfig_update_endpoint:
os.environ["NN_CONFIG_UPDATES"] = self.nanoconfig_update_endpoint
else:
self.nanoconfig_update_endpoint = nn_config_updates
def run(self):
self.setup_socket()
super(Agent, self).run()
def
|
(self):
self.socket.close()
super(Agent, self).stop()
LOG.debug("[Agent] Stopped")
def update(self, notifier, data):
LOG.debug("[Agent] Updated by: %s", notifier)
LOG.debug("[Agent] Preparing to send message %s", msgpack.loads(data))
try:
LOG.debug("[Agent] Sending message...")
# The agent will wait for the publisher server to be listening on
# the related publisher_endpoint before continuing
# In which case, you should start the publisher to make it work!
self.socket.send(data)
LOG.debug("[Agent] Message sent successfully!")
except nanomsg.NanoMsgError as exc:
LOG.error("Exception during sending the message to controller %s",
exc.args[0])
|
stop
|
identifier_name
|
test_hierarchy_control.py
|
#!/usr/bin/env python
# Copyright (c) 2015, Robot Control and Pattern Recognition Group,
# Institute of Control and Computation Engineering
# Warsaw University of Technology
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Warsaw University of Technology nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <COPYright HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Author: Dawid Seredynski
#
import roslib
roslib.load_manifest('velma_scripts')
import rospy
import tf
from std_msgs.msg import *
from sensor_msgs.msg import *
from geometry_msgs.msg import *
from visualization_msgs.msg import *
import tf
from tf import *
from tf.transformations import *
import tf_conversions.posemath as pm
from tf2_msgs.msg import *
import PyKDL
import math
import numpy as np
import copy
import matplotlib.pyplot as plt
import thread
import random
import openravepy
from openravepy import *
from optparse import OptionParser
from openravepy.misc import OpenRAVEGlobalArguments
import itertools
import rospkg
import multiprocessing
import velmautils
from velma import Velma
import openraveinstance
import conversions as conv
#import rrt_star_planner_ee
import rrt_star_connect_planner
import tree
import rosparam
import tasks
def identityMatrix(size):
I = np.matrix(numpy.zeros( (size, size) ))
for idx in range(size):
I[idx,idx] = 1.0
return I
class TestHierarchyControl:
|
# rospy.sleep(0.01)
if __name__ == '__main__':
rospy.init_node('test_hierarchy_control')
task = TestHierarchyControl()
rospy.sleep(0.5)
task.spin()
|
"""
"""
def __init__(self):
self.pub_marker = velmautils.MarkerPublisher()
def spin(self):
simulation = True
rospack = rospkg.RosPack()
env_file=rospack.get_path('velma_scripts') + '/data/jar/cabinet_test.env.xml'
srdf_path=rospack.get_path('velma_description') + '/robots/'
print "creating interface for Velma..."
# create the interface for Velma robot
velma = Velma()
print "done."
#
# Initialise Openrave
#
openrave = openraveinstance.OpenraveInstance()
openrave.startOpenraveURDF(env_file=env_file, viewer=True)
openrave.readRobot(srdf_path=srdf_path)
openrave.setCamera(PyKDL.Vector(2.0, 0.0, 2.0), PyKDL.Vector(0.60, 0.0, 1.10))
velma.waitForInit()
openrave.updateRobotConfigurationRos(velma.js_pos)
non_adj_links_ids = openrave.robot_rave.GetNonAdjacentLinks()
velma.switchToJoint()
lim_bo_soft, lim_up_soft = velma.getJointSoftLimitsVectors()
lim_bo, lim_up = velma.getJointLimitsVectors()
velma.fk_ik_solver.createJacobianFkSolvers('torso_base', 'right_HandPalmLink', velma.getJointStatesVectorNames())
velma.fk_ik_solver.createJacobianFkSolvers('torso_base', 'left_HandPalmLink', velma.getJointStatesVectorNames())
velma.fk_ik_solver.createSegmentToJointMap(velma.getJointStatesVectorNames(), velma.getInactiveJointStatesVector())
print velma.getJointStatesVectorNames()
r_HAND_targets = [
# PyKDL.Frame(PyKDL.Vector(0.5,0,1.8)),
# PyKDL.Frame(PyKDL.Rotation.RotY(170.0/180.0*math.pi), PyKDL.Vector(0.5,0,1.6)),
PyKDL.Frame(PyKDL.Rotation.RotY(90.0/180.0*math.pi), PyKDL.Vector(0.2,0.0,1.0)),
PyKDL.Frame(PyKDL.Rotation.RotY(90.0/180.0*math.pi), PyKDL.Vector(0.2,-0.5,1.0)),
]
l_HAND_targets = [
# PyKDL.Frame(PyKDL.Vector(0.5,0,1.8)),
# PyKDL.Frame(PyKDL.Rotation.RotY(170.0/180.0*math.pi), PyKDL.Vector(0.5,0,1.6)),
PyKDL.Frame(PyKDL.Rotation.RotY(90.0/180.0*math.pi) * PyKDL.Rotation.RotZ(180.0/180.0*math.pi), PyKDL.Vector(0.2,0.0,1.0)),
PyKDL.Frame(PyKDL.Rotation.RotY(90.0/180.0*math.pi) * PyKDL.Rotation.RotZ(180.0/180.0*math.pi), PyKDL.Vector(0.2,0.5,1.0)),
]
target_idx = 0
r_HAND_target = r_HAND_targets[target_idx]
l_HAND_target = l_HAND_targets[target_idx]
target_idx += 1
last_time = rospy.Time.now()
q = velma.getJointStatesVector()
q_names = velma.getJointStatesVectorNames()
iq = velma.getInactiveJointStatesVector()
counter = 0
while not rospy.is_shutdown():
if counter > 300:
r_HAND_target = r_HAND_targets[target_idx]
l_HAND_target = l_HAND_targets[target_idx]
target_idx = (target_idx + 1)%len(r_HAND_targets)
counter = 0
counter += 1
time_elapsed = rospy.Time.now() - last_time
J_JLC = np.matrix(numpy.zeros( (len(q), len(q)) ))
delta_V_JLC = np.empty(len(q))
for q_idx in range(len(q)):
if q[q_idx] < lim_bo_soft[q_idx]:
delta_V_JLC[q_idx] = q[q_idx] - lim_bo_soft[q_idx]
J_JLC[q_idx,q_idx] = min(1.0, 10*abs(q[q_idx] - lim_bo_soft[q_idx]) / abs(lim_bo[q_idx] - lim_bo_soft[q_idx]))
elif q[q_idx] > lim_up_soft[q_idx]:
delta_V_JLC[q_idx] = q[q_idx] - lim_up_soft[q_idx]
J_JLC[q_idx,q_idx] = min(1.0, 10*abs(q[q_idx] - lim_up_soft[q_idx]) / abs(lim_up[q_idx] - lim_up_soft[q_idx]))
else:
delta_V_JLC[q_idx] = 0.0
J_JLC[q_idx,q_idx] = 0.0
J_JLC_inv = np.linalg.pinv(J_JLC)
N_JLC = identityMatrix(len(q)) - (J_JLC_inv * J_JLC)
N_JLC_inv = np.linalg.pinv(N_JLC)
v_max_JLC = 20.0/180.0*math.pi
kp_JLC = 1.0
dx_JLC_des = kp_JLC * delta_V_JLC
# min(1.0, v_max_JLC/np.linalg.norm(dx_JLC_des))
if v_max_JLC > np.linalg.norm(dx_JLC_des):
vv_JLC = 1.0
else:
vv_JLC = v_max_JLC/np.linalg.norm(dx_JLC_des)
dx_JLC_ref = - vv_JLC * dx_JLC_des
# right hand
J_r_HAND = velma.fk_ik_solver.getJacobian('torso_base', 'right_HandPalmLink', q)
J_r_HAND_inv = np.linalg.pinv(J_r_HAND)
N_r_HAND = identityMatrix(len(q)) - (J_r_HAND_inv * J_r_HAND)
T_B_E = velma.fk_ik_solver.calculateFk2('torso_base', 'right_HandPalmLink', q)
r_HAND_current = T_B_E
r_HAND_diff = PyKDL.diff(r_HAND_target, r_HAND_current)
delta_V_HAND = np.empty(6)
delta_V_HAND[0] = r_HAND_diff.vel[0]
delta_V_HAND[1] = r_HAND_diff.vel[1]
delta_V_HAND[2] = r_HAND_diff.vel[2]
delta_V_HAND[3] = r_HAND_diff.rot[0]
delta_V_HAND[4] = r_HAND_diff.rot[1]
delta_V_HAND[5] = r_HAND_diff.rot[2]
v_max_HAND = 2.0
kp_HAND = 2.0
dx_HAND_des = kp_HAND * delta_V_HAND
if v_max_HAND > np.linalg.norm(dx_HAND_des):
vv_HAND = 1.0
else:
vv_HAND = v_max_HAND/np.linalg.norm(dx_HAND_des)
dx_r_HAND_ref = - vv_HAND * dx_HAND_des
# left hand
J_l_HAND = velma.fk_ik_solver.getJacobian('torso_base', 'left_HandPalmLink', q)
J_l_HAND_inv = np.linalg.pinv(J_l_HAND)
N_l_HAND = identityMatrix(len(q)) - (J_l_HAND_inv * J_l_HAND)
T_B_E = velma.fk_ik_solver.calculateFk2('torso_base', 'left_HandPalmLink', q)
l_HAND_current = T_B_E
l_HAND_diff = PyKDL.diff(l_HAND_target, l_HAND_current)
delta_V_HAND = np.empty(6)
delta_V_HAND[0] = l_HAND_diff.vel[0]
delta_V_HAND[1] = l_HAND_diff.vel[1]
delta_V_HAND[2] = l_HAND_diff.vel[2]
delta_V_HAND[3] = l_HAND_diff.rot[0]
delta_V_HAND[4] = l_HAND_diff.rot[1]
delta_V_HAND[5] = l_HAND_diff.rot[2]
v_max_HAND = 2.0
kp_HAND = 2.0
dx_HAND_des = kp_HAND * delta_V_HAND
if v_max_HAND > np.linalg.norm(dx_HAND_des):
vv_HAND = 1.0
else:
vv_HAND = v_max_HAND/np.linalg.norm(dx_HAND_des)
dx_l_HAND_ref = - vv_HAND * dx_HAND_des
link_collision_map = {}
openrave.updateRobotConfiguration(q, velma.getJointStatesVectorNames())
if True:
openrave.switchCollisionModel("velmasimplified1")
col_chk = openrave.env.GetCollisionChecker()
col_opt = col_chk.GetCollisionOptions()
col_chk.SetCollisionOptions(0x04) # CO_Contacts(0x04), CO_AllLinkCollisions(0x20)
total_contacts = 0
for link1_idx, link2_idx in non_adj_links_ids:
link1 = openrave.robot_rave.GetLinks()[link1_idx]
link2 = openrave.robot_rave.GetLinks()[link2_idx]
report = CollisionReport()
if col_chk.CheckCollision(link1=link1, link2=link2, report=report):
T_L1_W = conv.OpenraveToKDL(link1.GetTransform()).Inverse()
TR_L1_W = PyKDL.Frame(T_L1_W.M)
T_L2_W = conv.OpenraveToKDL(link2.GetTransform()).Inverse()
TR_L2_W = PyKDL.Frame(T_L2_W.M)
swapped = False
if link1_idx > link2_idx:
link1_idx, link2_idx = link2_idx, link1_idx
swapped = True
if not (link1_idx, link2_idx) in link_collision_map:
link_collision_map[(link1_idx, link2_idx)] = []
for contact in report.contacts:
pos_W = PyKDL.Vector(contact.pos[0], contact.pos[1], contact.pos[2])
norm_W = PyKDL.Vector(contact.norm[0], contact.norm[1], contact.norm[2])
if swapped:
link_collision_map[(link1_idx, link2_idx)].append( (pos_W, -norm_W, T_L2_W * pos_W, T_L1_W * pos_W, TR_L2_W * (-norm_W), TR_L1_W * (-norm_W), contact.depth) )
else:
link_collision_map[(link1_idx, link2_idx)].append( (pos_W, norm_W, T_L1_W * pos_W, T_L2_W * pos_W, TR_L1_W * norm_W, TR_L2_W * norm_W, contact.depth) )
total_contacts += len(report.contacts)
col_chk.SetCollisionOptions(col_opt)
print "links in contact:", len(link_collision_map), "total contacts:", total_contacts
omega_col = np.matrix(np.zeros( (len(q),1) ))
Ncol = identityMatrix(len(q))
for link1_idx, link2_idx in link_collision_map:
link1_name = openrave.robot_rave.GetLinks()[link1_idx].GetName()
link2_name = openrave.robot_rave.GetLinks()[link2_idx].GetName()
# l1_parent = velma.fk_ik_solver.isParent(link1_name, link2_name)
# l2_parent = velma.fk_ik_solver.isParent(link2_name, link1_name)
affected_dof = velma.fk_ik_solver.getAffectedDof(link1_name, link2_name)
# print "affected dof:"
# for dof_idx in affected_dof:
# print q_names[dof_idx]
contacts = link_collision_map[ (link1_idx, link2_idx) ]
for c in contacts:
pos_W, norm_W, pos_L1, pos_L2, norm_L1, norm_L2, depth = c
m_id = self.pub_marker.publishVectorMarker(pos_W, pos_W + norm_W*0.05, 1, 1, 0, 0, frame='world', namespace='default', scale=0.005)
if depth < 0:
print "ERROR: depth < 0:", depth
exit(0)
# print link1_name, link2_name
jac1 = PyKDL.Jacobian(len(q))
velma.fk_ik_solver.getJacobianForX(jac1, link1_name, pos_L1, q, iq)
jac2 = PyKDL.Jacobian(len(q))
velma.fk_ik_solver.getJacobianForX(jac2, link2_name, pos_L2, q, iq)
# repulsive velocity
V_max = 0.1
depth_max = 0.002
if depth > depth_max:
depth = depth_max
Vrep = V_max * depth * depth / (depth_max * depth_max)
# the mapping between motions along contact normal and the Cartesian coordinates
e1 = norm_L1
e2 = norm_L2
Jd1 = np.matrix([e1[0], e1[1], e1[2]])
Jd2 = np.matrix([e2[0], e2[1], e2[2]])
# print "Jd1.shape", Jd1.shape
# rewrite the linear part of the jacobian
jac1_mx = np.matrix(np.zeros( (3, len(q)) ))
jac2_mx = np.matrix(np.zeros( (3, len(q)) ))
for q_idx in range(len(q)):
col1 = jac1.getColumn(q_idx)
col2 = jac2.getColumn(q_idx)
for row_idx in range(3):
jac1_mx[row_idx, q_idx] = col1[row_idx]
jac2_mx[row_idx, q_idx] = col2[row_idx]
# print "jac1_mx, jac2_mx"
# print jac1_mx
# print jac2_mx
# print "jac1_mx.shape", jac1_mx.shape
Jcol1 = Jd1 * jac1_mx
Jcol2 = Jd2 * jac2_mx
# print "Jcol2.shape", Jcol2.shape
Jcol = np.matrix(np.zeros( (2, len(q)) ))
for q_idx in range(len(q)):
if Jcol1[0, q_idx] < 0.000000001 or not q_idx in affected_dof:#l1_parent:
Jcol1[0, q_idx] = 0.0
if Jcol2[0, q_idx] < 0.000000001 or not q_idx in affected_dof:#l2_parent:
Jcol2[0, q_idx] = 0.0
Jcol[0, q_idx] = Jcol1[0, q_idx]
Jcol[1, q_idx] = Jcol2[0, q_idx]
# print Jcol.shape
# print "Jcol"
# print Jcol
Jcol_pinv = np.linalg.pinv(Jcol)
# Jcol_pinv = Jcol.transpose()
# print "Jcol_pinv"
# print Jcol_pinv
# Ncol1 = identityMatrix(len(q)) - np.linalg.pinv(Jcol1) * Jcol1
# Ncol2 = identityMatrix(len(q)) - np.linalg.pinv(Jcol2) * Jcol2
# Ncol = Ncol * Ncol1
# Ncol = Ncol * Ncol2
# omega_col += np.linalg.pinv(Jcol1) * (-Vrep)
# omega_col += np.linalg.pinv(Jcol2) * (Vrep)
# continue
# activation = min(1.0, depth/0.001)
# a_des = np.matrix(np.zeros( (len(q),len(q)) ))
# a_des[0,0] = a_des[1,1] = 1.0#activation
# U, S, V = numpy.linalg.svd(Jcol, full_matrices=True, compute_uv=True)
# print "V"
# print V
# print "S"
# print S
Ncol12 = identityMatrix(len(q)) - Jcol_pinv * Jcol
# Ncol12 = identityMatrix(len(q)) - Jcol.transpose() * (Jcol_pinv).transpose()
# Ncol12 = identityMatrix(len(q)) - (V * a_des * V.transpose())
# Ncol12 = identityMatrix(len(q)) - (Jcol.transpose() * a_des * Jcol)
Ncol = Ncol * Ncol12
Vrep1 = -Vrep
Vrep2 = Vrep
# if l1_parent:
# Vrep1 = 0.0
# if l2_parent:
# Vrep2 = 0.0
d_omega = Jcol_pinv * np.matrix([-Vrep, Vrep]).transpose()
# print "d_omega", d_omega
# print "Vrep", Vrep
# print q_names
# print "Jcol", Jcol
# print "Jcol_pinv", Jcol_pinv
# print "Jcol_pinv * Jcol", Jcol_pinv * Jcol
# print "Jcol * Jcol_pinv", Jcol * Jcol_pinv
# print "a_des", a_des
omega_col += d_omega
# print "depth", depth
# raw_input(".")
# print "omega_col", omega_col
# print dx_HAND_ref
omega_r_HAND = (J_r_HAND_inv * np.matrix(dx_r_HAND_ref).transpose())
omega_l_HAND = (J_l_HAND_inv * np.matrix(dx_l_HAND_ref).transpose())
Ncol_inv = np.linalg.pinv(Ncol)
N_r_HAND_inv = np.linalg.pinv(N_r_HAND)
# omega = J_JLC_inv * np.matrix(dx_JLC_ref).transpose() + N_JLC_inv * (omega_col + Ncol_inv * (omega_r_HAND))# + N_r_HAND_inv * omega_l_HAND))
omega = J_JLC_inv * np.matrix(dx_JLC_ref).transpose() + N_JLC.transpose() * (omega_col + Ncol.transpose() * (omega_r_HAND))# + N_r_HAND.transpose() * omega_l_HAND))
# print "omega", omega
# print "dx_JLC_ref"
# print dx_JLC_ref
# print "dx_HAND_ref"
# print dx_HAND_ref
omega_vector = np.empty(len(q))
for q_idx in range(len(q)):
omega_vector[q_idx] = omega[q_idx][0]
q += omega_vector * 0.002
if time_elapsed.to_sec() > 0.2:
last_time = rospy.Time.now()
velma.moveJoint(q, velma.getJointStatesVectorNames(), 0.05, start_time=0.14)
|
identifier_body
|
test_hierarchy_control.py
|
#!/usr/bin/env python
# Copyright (c) 2015, Robot Control and Pattern Recognition Group,
# Institute of Control and Computation Engineering
# Warsaw University of Technology
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Warsaw University of Technology nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <COPYright HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Author: Dawid Seredynski
#
import roslib
roslib.load_manifest('velma_scripts')
import rospy
import tf
from std_msgs.msg import *
from sensor_msgs.msg import *
from geometry_msgs.msg import *
from visualization_msgs.msg import *
import tf
from tf import *
from tf.transformations import *
import tf_conversions.posemath as pm
from tf2_msgs.msg import *
import PyKDL
import math
import numpy as np
import copy
import matplotlib.pyplot as plt
import thread
import random
import openravepy
from openravepy import *
from optparse import OptionParser
from openravepy.misc import OpenRAVEGlobalArguments
import itertools
import rospkg
import multiprocessing
import velmautils
from velma import Velma
import openraveinstance
import conversions as conv
#import rrt_star_planner_ee
import rrt_star_connect_planner
import tree
import rosparam
import tasks
def identityMatrix(size):
I = np.matrix(numpy.zeros( (size, size) ))
for idx in range(size):
I[idx,idx] = 1.0
return I
class TestHierarchyControl:
"""
"""
def __init__(self):
self.pub_marker = velmautils.MarkerPublisher()
def spin(self):
simulation = True
rospack = rospkg.RosPack()
env_file=rospack.get_path('velma_scripts') + '/data/jar/cabinet_test.env.xml'
srdf_path=rospack.get_path('velma_description') + '/robots/'
print "creating interface for Velma..."
# create the interface for Velma robot
velma = Velma()
print "done."
#
# Initialise Openrave
#
openrave = openraveinstance.OpenraveInstance()
openrave.startOpenraveURDF(env_file=env_file, viewer=True)
openrave.readRobot(srdf_path=srdf_path)
openrave.setCamera(PyKDL.Vector(2.0, 0.0, 2.0), PyKDL.Vector(0.60, 0.0, 1.10))
velma.waitForInit()
openrave.updateRobotConfigurationRos(velma.js_pos)
non_adj_links_ids = openrave.robot_rave.GetNonAdjacentLinks()
velma.switchToJoint()
lim_bo_soft, lim_up_soft = velma.getJointSoftLimitsVectors()
lim_bo, lim_up = velma.getJointLimitsVectors()
velma.fk_ik_solver.createJacobianFkSolvers('torso_base', 'right_HandPalmLink', velma.getJointStatesVectorNames())
velma.fk_ik_solver.createJacobianFkSolvers('torso_base', 'left_HandPalmLink', velma.getJointStatesVectorNames())
velma.fk_ik_solver.createSegmentToJointMap(velma.getJointStatesVectorNames(), velma.getInactiveJointStatesVector())
print velma.getJointStatesVectorNames()
r_HAND_targets = [
# PyKDL.Frame(PyKDL.Vector(0.5,0,1.8)),
# PyKDL.Frame(PyKDL.Rotation.RotY(170.0/180.0*math.pi), PyKDL.Vector(0.5,0,1.6)),
PyKDL.Frame(PyKDL.Rotation.RotY(90.0/180.0*math.pi), PyKDL.Vector(0.2,0.0,1.0)),
PyKDL.Frame(PyKDL.Rotation.RotY(90.0/180.0*math.pi), PyKDL.Vector(0.2,-0.5,1.0)),
]
l_HAND_targets = [
# PyKDL.Frame(PyKDL.Vector(0.5,0,1.8)),
# PyKDL.Frame(PyKDL.Rotation.RotY(170.0/180.0*math.pi), PyKDL.Vector(0.5,0,1.6)),
PyKDL.Frame(PyKDL.Rotation.RotY(90.0/180.0*math.pi) * PyKDL.Rotation.RotZ(180.0/180.0*math.pi), PyKDL.Vector(0.2,0.0,1.0)),
PyKDL.Frame(PyKDL.Rotation.RotY(90.0/180.0*math.pi) * PyKDL.Rotation.RotZ(180.0/180.0*math.pi), PyKDL.Vector(0.2,0.5,1.0)),
]
target_idx = 0
r_HAND_target = r_HAND_targets[target_idx]
l_HAND_target = l_HAND_targets[target_idx]
target_idx += 1
last_time = rospy.Time.now()
q = velma.getJointStatesVector()
q_names = velma.getJointStatesVectorNames()
iq = velma.getInactiveJointStatesVector()
counter = 0
while not rospy.is_shutdown():
if counter > 300:
r_HAND_target = r_HAND_targets[target_idx]
l_HAND_target = l_HAND_targets[target_idx]
target_idx = (target_idx + 1)%len(r_HAND_targets)
counter = 0
counter += 1
time_elapsed = rospy.Time.now() - last_time
J_JLC = np.matrix(numpy.zeros( (len(q), len(q)) ))
delta_V_JLC = np.empty(len(q))
for q_idx in range(len(q)):
if q[q_idx] < lim_bo_soft[q_idx]:
delta_V_JLC[q_idx] = q[q_idx] - lim_bo_soft[q_idx]
J_JLC[q_idx,q_idx] = min(1.0, 10*abs(q[q_idx] - lim_bo_soft[q_idx]) / abs(lim_bo[q_idx] - lim_bo_soft[q_idx]))
elif q[q_idx] > lim_up_soft[q_idx]:
delta_V_JLC[q_idx] = q[q_idx] - lim_up_soft[q_idx]
J_JLC[q_idx,q_idx] = min(1.0, 10*abs(q[q_idx] - lim_up_soft[q_idx]) / abs(lim_up[q_idx] - lim_up_soft[q_idx]))
else:
delta_V_JLC[q_idx] = 0.0
J_JLC[q_idx,q_idx] = 0.0
J_JLC_inv = np.linalg.pinv(J_JLC)
N_JLC = identityMatrix(len(q)) - (J_JLC_inv * J_JLC)
N_JLC_inv = np.linalg.pinv(N_JLC)
v_max_JLC = 20.0/180.0*math.pi
kp_JLC = 1.0
dx_JLC_des = kp_JLC * delta_V_JLC
# min(1.0, v_max_JLC/np.linalg.norm(dx_JLC_des))
if v_max_JLC > np.linalg.norm(dx_JLC_des):
vv_JLC = 1.0
else:
vv_JLC = v_max_JLC/np.linalg.norm(dx_JLC_des)
dx_JLC_ref = - vv_JLC * dx_JLC_des
# right hand
J_r_HAND = velma.fk_ik_solver.getJacobian('torso_base', 'right_HandPalmLink', q)
J_r_HAND_inv = np.linalg.pinv(J_r_HAND)
N_r_HAND = identityMatrix(len(q)) - (J_r_HAND_inv * J_r_HAND)
T_B_E = velma.fk_ik_solver.calculateFk2('torso_base', 'right_HandPalmLink', q)
r_HAND_current = T_B_E
r_HAND_diff = PyKDL.diff(r_HAND_target, r_HAND_current)
delta_V_HAND = np.empty(6)
delta_V_HAND[0] = r_HAND_diff.vel[0]
delta_V_HAND[1] = r_HAND_diff.vel[1]
delta_V_HAND[2] = r_HAND_diff.vel[2]
delta_V_HAND[3] = r_HAND_diff.rot[0]
delta_V_HAND[4] = r_HAND_diff.rot[1]
delta_V_HAND[5] = r_HAND_diff.rot[2]
v_max_HAND = 2.0
kp_HAND = 2.0
dx_HAND_des = kp_HAND * delta_V_HAND
if v_max_HAND > np.linalg.norm(dx_HAND_des):
vv_HAND = 1.0
else:
vv_HAND = v_max_HAND/np.linalg.norm(dx_HAND_des)
dx_r_HAND_ref = - vv_HAND * dx_HAND_des
# left hand
J_l_HAND = velma.fk_ik_solver.getJacobian('torso_base', 'left_HandPalmLink', q)
J_l_HAND_inv = np.linalg.pinv(J_l_HAND)
N_l_HAND = identityMatrix(len(q)) - (J_l_HAND_inv * J_l_HAND)
T_B_E = velma.fk_ik_solver.calculateFk2('torso_base', 'left_HandPalmLink', q)
l_HAND_current = T_B_E
l_HAND_diff = PyKDL.diff(l_HAND_target, l_HAND_current)
delta_V_HAND = np.empty(6)
delta_V_HAND[0] = l_HAND_diff.vel[0]
delta_V_HAND[1] = l_HAND_diff.vel[1]
delta_V_HAND[2] = l_HAND_diff.vel[2]
delta_V_HAND[3] = l_HAND_diff.rot[0]
delta_V_HAND[4] = l_HAND_diff.rot[1]
delta_V_HAND[5] = l_HAND_diff.rot[2]
v_max_HAND = 2.0
kp_HAND = 2.0
dx_HAND_des = kp_HAND * delta_V_HAND
if v_max_HAND > np.linalg.norm(dx_HAND_des):
vv_HAND = 1.0
else:
vv_HAND = v_max_HAND/np.linalg.norm(dx_HAND_des)
dx_l_HAND_ref = - vv_HAND * dx_HAND_des
link_collision_map = {}
openrave.updateRobotConfiguration(q, velma.getJointStatesVectorNames())
if True:
openrave.switchCollisionModel("velmasimplified1")
col_chk = openrave.env.GetCollisionChecker()
col_opt = col_chk.GetCollisionOptions()
col_chk.SetCollisionOptions(0x04) # CO_Contacts(0x04), CO_AllLinkCollisions(0x20)
total_contacts = 0
for link1_idx, link2_idx in non_adj_links_ids:
link1 = openrave.robot_rave.GetLinks()[link1_idx]
link2 = openrave.robot_rave.GetLinks()[link2_idx]
report = CollisionReport()
if col_chk.CheckCollision(link1=link1, link2=link2, report=report):
T_L1_W = conv.OpenraveToKDL(link1.GetTransform()).Inverse()
TR_L1_W = PyKDL.Frame(T_L1_W.M)
T_L2_W = conv.OpenraveToKDL(link2.GetTransform()).Inverse()
TR_L2_W = PyKDL.Frame(T_L2_W.M)
swapped = False
if link1_idx > link2_idx:
link1_idx, link2_idx = link2_idx, link1_idx
swapped = True
if not (link1_idx, link2_idx) in link_collision_map:
link_collision_map[(link1_idx, link2_idx)] = []
for contact in report.contacts:
pos_W = PyKDL.Vector(contact.pos[0], contact.pos[1], contact.pos[2])
norm_W = PyKDL.Vector(contact.norm[0], contact.norm[1], contact.norm[2])
if swapped:
link_collision_map[(link1_idx, link2_idx)].append( (pos_W, -norm_W, T_L2_W * pos_W, T_L1_W * pos_W, TR_L2_W * (-norm_W), TR_L1_W * (-norm_W), contact.depth) )
else:
link_collision_map[(link1_idx, link2_idx)].append( (pos_W, norm_W, T_L1_W * pos_W, T_L2_W * pos_W, TR_L1_W * norm_W, TR_L2_W * norm_W, contact.depth) )
total_contacts += len(report.contacts)
col_chk.SetCollisionOptions(col_opt)
print "links in contact:", len(link_collision_map), "total contacts:", total_contacts
omega_col = np.matrix(np.zeros( (len(q),1) ))
Ncol = identityMatrix(len(q))
for link1_idx, link2_idx in link_collision_map:
link1_name = openrave.robot_rave.GetLinks()[link1_idx].GetName()
link2_name = openrave.robot_rave.GetLinks()[link2_idx].GetName()
# l1_parent = velma.fk_ik_solver.isParent(link1_name, link2_name)
# l2_parent = velma.fk_ik_solver.isParent(link2_name, link1_name)
affected_dof = velma.fk_ik_solver.getAffectedDof(link1_name, link2_name)
# print "affected dof:"
# for dof_idx in affected_dof:
# print q_names[dof_idx]
contacts = link_collision_map[ (link1_idx, link2_idx) ]
for c in contacts:
pos_W, norm_W, pos_L1, pos_L2, norm_L1, norm_L2, depth = c
m_id = self.pub_marker.publishVectorMarker(pos_W, pos_W + norm_W*0.05, 1, 1, 0, 0, frame='world', namespace='default', scale=0.005)
if depth < 0:
print "ERROR: depth < 0:", depth
exit(0)
# print link1_name, link2_name
jac1 = PyKDL.Jacobian(len(q))
velma.fk_ik_solver.getJacobianForX(jac1, link1_name, pos_L1, q, iq)
jac2 = PyKDL.Jacobian(len(q))
velma.fk_ik_solver.getJacobianForX(jac2, link2_name, pos_L2, q, iq)
# repulsive velocity
V_max = 0.1
depth_max = 0.002
if depth > depth_max:
depth = depth_max
Vrep = V_max * depth * depth / (depth_max * depth_max)
# the mapping between motions along contact normal and the Cartesian coordinates
e1 = norm_L1
e2 = norm_L2
Jd1 = np.matrix([e1[0], e1[1], e1[2]])
Jd2 = np.matrix([e2[0], e2[1], e2[2]])
# print "Jd1.shape", Jd1.shape
# rewrite the linear part of the jacobian
jac1_mx = np.matrix(np.zeros( (3, len(q)) ))
jac2_mx = np.matrix(np.zeros( (3, len(q)) ))
for q_idx in range(len(q)):
col1 = jac1.getColumn(q_idx)
col2 = jac2.getColumn(q_idx)
for row_idx in range(3):
jac1_mx[row_idx, q_idx] = col1[row_idx]
jac2_mx[row_idx, q_idx] = col2[row_idx]
# print "jac1_mx, jac2_mx"
# print jac1_mx
# print jac2_mx
# print "jac1_mx.shape", jac1_mx.shape
Jcol1 = Jd1 * jac1_mx
Jcol2 = Jd2 * jac2_mx
# print "Jcol2.shape", Jcol2.shape
Jcol = np.matrix(np.zeros( (2, len(q)) ))
for q_idx in range(len(q)):
|
# print Jcol.shape
# print "Jcol"
# print Jcol
Jcol_pinv = np.linalg.pinv(Jcol)
# Jcol_pinv = Jcol.transpose()
# print "Jcol_pinv"
# print Jcol_pinv
# Ncol1 = identityMatrix(len(q)) - np.linalg.pinv(Jcol1) * Jcol1
# Ncol2 = identityMatrix(len(q)) - np.linalg.pinv(Jcol2) * Jcol2
# Ncol = Ncol * Ncol1
# Ncol = Ncol * Ncol2
# omega_col += np.linalg.pinv(Jcol1) * (-Vrep)
# omega_col += np.linalg.pinv(Jcol2) * (Vrep)
# continue
# activation = min(1.0, depth/0.001)
# a_des = np.matrix(np.zeros( (len(q),len(q)) ))
# a_des[0,0] = a_des[1,1] = 1.0#activation
# U, S, V = numpy.linalg.svd(Jcol, full_matrices=True, compute_uv=True)
# print "V"
# print V
# print "S"
# print S
Ncol12 = identityMatrix(len(q)) - Jcol_pinv * Jcol
# Ncol12 = identityMatrix(len(q)) - Jcol.transpose() * (Jcol_pinv).transpose()
# Ncol12 = identityMatrix(len(q)) - (V * a_des * V.transpose())
# Ncol12 = identityMatrix(len(q)) - (Jcol.transpose() * a_des * Jcol)
Ncol = Ncol * Ncol12
Vrep1 = -Vrep
Vrep2 = Vrep
# if l1_parent:
# Vrep1 = 0.0
# if l2_parent:
# Vrep2 = 0.0
d_omega = Jcol_pinv * np.matrix([-Vrep, Vrep]).transpose()
# print "d_omega", d_omega
# print "Vrep", Vrep
# print q_names
# print "Jcol", Jcol
# print "Jcol_pinv", Jcol_pinv
# print "Jcol_pinv * Jcol", Jcol_pinv * Jcol
# print "Jcol * Jcol_pinv", Jcol * Jcol_pinv
# print "a_des", a_des
omega_col += d_omega
# print "depth", depth
# raw_input(".")
# print "omega_col", omega_col
# print dx_HAND_ref
omega_r_HAND = (J_r_HAND_inv * np.matrix(dx_r_HAND_ref).transpose())
omega_l_HAND = (J_l_HAND_inv * np.matrix(dx_l_HAND_ref).transpose())
Ncol_inv = np.linalg.pinv(Ncol)
N_r_HAND_inv = np.linalg.pinv(N_r_HAND)
# omega = J_JLC_inv * np.matrix(dx_JLC_ref).transpose() + N_JLC_inv * (omega_col + Ncol_inv * (omega_r_HAND))# + N_r_HAND_inv * omega_l_HAND))
omega = J_JLC_inv * np.matrix(dx_JLC_ref).transpose() + N_JLC.transpose() * (omega_col + Ncol.transpose() * (omega_r_HAND))# + N_r_HAND.transpose() * omega_l_HAND))
# print "omega", omega
# print "dx_JLC_ref"
# print dx_JLC_ref
# print "dx_HAND_ref"
# print dx_HAND_ref
omega_vector = np.empty(len(q))
for q_idx in range(len(q)):
omega_vector[q_idx] = omega[q_idx][0]
q += omega_vector * 0.002
if time_elapsed.to_sec() > 0.2:
last_time = rospy.Time.now()
velma.moveJoint(q, velma.getJointStatesVectorNames(), 0.05, start_time=0.14)
# rospy.sleep(0.01)
if __name__ == '__main__':
rospy.init_node('test_hierarchy_control')
task = TestHierarchyControl()
rospy.sleep(0.5)
task.spin()
|
if Jcol1[0, q_idx] < 0.000000001 or not q_idx in affected_dof:#l1_parent:
Jcol1[0, q_idx] = 0.0
if Jcol2[0, q_idx] < 0.000000001 or not q_idx in affected_dof:#l2_parent:
Jcol2[0, q_idx] = 0.0
Jcol[0, q_idx] = Jcol1[0, q_idx]
Jcol[1, q_idx] = Jcol2[0, q_idx]
|
conditional_block
|
test_hierarchy_control.py
|
#!/usr/bin/env python
# Copyright (c) 2015, Robot Control and Pattern Recognition Group,
# Institute of Control and Computation Engineering
# Warsaw University of Technology
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Warsaw University of Technology nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <COPYright HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Author: Dawid Seredynski
#
import roslib
roslib.load_manifest('velma_scripts')
import rospy
import tf
from std_msgs.msg import *
from sensor_msgs.msg import *
from geometry_msgs.msg import *
from visualization_msgs.msg import *
import tf
from tf import *
from tf.transformations import *
import tf_conversions.posemath as pm
from tf2_msgs.msg import *
import PyKDL
import math
import numpy as np
import copy
import matplotlib.pyplot as plt
import thread
import random
import openravepy
from openravepy import *
from optparse import OptionParser
from openravepy.misc import OpenRAVEGlobalArguments
import itertools
import rospkg
import multiprocessing
import velmautils
from velma import Velma
import openraveinstance
import conversions as conv
#import rrt_star_planner_ee
import rrt_star_connect_planner
import tree
import rosparam
import tasks
def identityMatrix(size):
I = np.matrix(numpy.zeros( (size, size) ))
for idx in range(size):
I[idx,idx] = 1.0
return I
class
|
:
"""
"""
def __init__(self):
self.pub_marker = velmautils.MarkerPublisher()
def spin(self):
simulation = True
rospack = rospkg.RosPack()
env_file=rospack.get_path('velma_scripts') + '/data/jar/cabinet_test.env.xml'
srdf_path=rospack.get_path('velma_description') + '/robots/'
print "creating interface for Velma..."
# create the interface for Velma robot
velma = Velma()
print "done."
#
# Initialise Openrave
#
openrave = openraveinstance.OpenraveInstance()
openrave.startOpenraveURDF(env_file=env_file, viewer=True)
openrave.readRobot(srdf_path=srdf_path)
openrave.setCamera(PyKDL.Vector(2.0, 0.0, 2.0), PyKDL.Vector(0.60, 0.0, 1.10))
velma.waitForInit()
openrave.updateRobotConfigurationRos(velma.js_pos)
non_adj_links_ids = openrave.robot_rave.GetNonAdjacentLinks()
velma.switchToJoint()
lim_bo_soft, lim_up_soft = velma.getJointSoftLimitsVectors()
lim_bo, lim_up = velma.getJointLimitsVectors()
velma.fk_ik_solver.createJacobianFkSolvers('torso_base', 'right_HandPalmLink', velma.getJointStatesVectorNames())
velma.fk_ik_solver.createJacobianFkSolvers('torso_base', 'left_HandPalmLink', velma.getJointStatesVectorNames())
velma.fk_ik_solver.createSegmentToJointMap(velma.getJointStatesVectorNames(), velma.getInactiveJointStatesVector())
print velma.getJointStatesVectorNames()
r_HAND_targets = [
# PyKDL.Frame(PyKDL.Vector(0.5,0,1.8)),
# PyKDL.Frame(PyKDL.Rotation.RotY(170.0/180.0*math.pi), PyKDL.Vector(0.5,0,1.6)),
PyKDL.Frame(PyKDL.Rotation.RotY(90.0/180.0*math.pi), PyKDL.Vector(0.2,0.0,1.0)),
PyKDL.Frame(PyKDL.Rotation.RotY(90.0/180.0*math.pi), PyKDL.Vector(0.2,-0.5,1.0)),
]
l_HAND_targets = [
# PyKDL.Frame(PyKDL.Vector(0.5,0,1.8)),
# PyKDL.Frame(PyKDL.Rotation.RotY(170.0/180.0*math.pi), PyKDL.Vector(0.5,0,1.6)),
PyKDL.Frame(PyKDL.Rotation.RotY(90.0/180.0*math.pi) * PyKDL.Rotation.RotZ(180.0/180.0*math.pi), PyKDL.Vector(0.2,0.0,1.0)),
PyKDL.Frame(PyKDL.Rotation.RotY(90.0/180.0*math.pi) * PyKDL.Rotation.RotZ(180.0/180.0*math.pi), PyKDL.Vector(0.2,0.5,1.0)),
]
target_idx = 0
r_HAND_target = r_HAND_targets[target_idx]
l_HAND_target = l_HAND_targets[target_idx]
target_idx += 1
last_time = rospy.Time.now()
q = velma.getJointStatesVector()
q_names = velma.getJointStatesVectorNames()
iq = velma.getInactiveJointStatesVector()
counter = 0
while not rospy.is_shutdown():
if counter > 300:
r_HAND_target = r_HAND_targets[target_idx]
l_HAND_target = l_HAND_targets[target_idx]
target_idx = (target_idx + 1)%len(r_HAND_targets)
counter = 0
counter += 1
time_elapsed = rospy.Time.now() - last_time
J_JLC = np.matrix(numpy.zeros( (len(q), len(q)) ))
delta_V_JLC = np.empty(len(q))
for q_idx in range(len(q)):
if q[q_idx] < lim_bo_soft[q_idx]:
delta_V_JLC[q_idx] = q[q_idx] - lim_bo_soft[q_idx]
J_JLC[q_idx,q_idx] = min(1.0, 10*abs(q[q_idx] - lim_bo_soft[q_idx]) / abs(lim_bo[q_idx] - lim_bo_soft[q_idx]))
elif q[q_idx] > lim_up_soft[q_idx]:
delta_V_JLC[q_idx] = q[q_idx] - lim_up_soft[q_idx]
J_JLC[q_idx,q_idx] = min(1.0, 10*abs(q[q_idx] - lim_up_soft[q_idx]) / abs(lim_up[q_idx] - lim_up_soft[q_idx]))
else:
delta_V_JLC[q_idx] = 0.0
J_JLC[q_idx,q_idx] = 0.0
J_JLC_inv = np.linalg.pinv(J_JLC)
N_JLC = identityMatrix(len(q)) - (J_JLC_inv * J_JLC)
N_JLC_inv = np.linalg.pinv(N_JLC)
v_max_JLC = 20.0/180.0*math.pi
kp_JLC = 1.0
dx_JLC_des = kp_JLC * delta_V_JLC
# min(1.0, v_max_JLC/np.linalg.norm(dx_JLC_des))
if v_max_JLC > np.linalg.norm(dx_JLC_des):
vv_JLC = 1.0
else:
vv_JLC = v_max_JLC/np.linalg.norm(dx_JLC_des)
dx_JLC_ref = - vv_JLC * dx_JLC_des
# right hand
J_r_HAND = velma.fk_ik_solver.getJacobian('torso_base', 'right_HandPalmLink', q)
J_r_HAND_inv = np.linalg.pinv(J_r_HAND)
N_r_HAND = identityMatrix(len(q)) - (J_r_HAND_inv * J_r_HAND)
T_B_E = velma.fk_ik_solver.calculateFk2('torso_base', 'right_HandPalmLink', q)
r_HAND_current = T_B_E
r_HAND_diff = PyKDL.diff(r_HAND_target, r_HAND_current)
delta_V_HAND = np.empty(6)
delta_V_HAND[0] = r_HAND_diff.vel[0]
delta_V_HAND[1] = r_HAND_diff.vel[1]
delta_V_HAND[2] = r_HAND_diff.vel[2]
delta_V_HAND[3] = r_HAND_diff.rot[0]
delta_V_HAND[4] = r_HAND_diff.rot[1]
delta_V_HAND[5] = r_HAND_diff.rot[2]
v_max_HAND = 2.0
kp_HAND = 2.0
dx_HAND_des = kp_HAND * delta_V_HAND
if v_max_HAND > np.linalg.norm(dx_HAND_des):
vv_HAND = 1.0
else:
vv_HAND = v_max_HAND/np.linalg.norm(dx_HAND_des)
dx_r_HAND_ref = - vv_HAND * dx_HAND_des
# left hand
J_l_HAND = velma.fk_ik_solver.getJacobian('torso_base', 'left_HandPalmLink', q)
J_l_HAND_inv = np.linalg.pinv(J_l_HAND)
N_l_HAND = identityMatrix(len(q)) - (J_l_HAND_inv * J_l_HAND)
T_B_E = velma.fk_ik_solver.calculateFk2('torso_base', 'left_HandPalmLink', q)
l_HAND_current = T_B_E
l_HAND_diff = PyKDL.diff(l_HAND_target, l_HAND_current)
delta_V_HAND = np.empty(6)
delta_V_HAND[0] = l_HAND_diff.vel[0]
delta_V_HAND[1] = l_HAND_diff.vel[1]
delta_V_HAND[2] = l_HAND_diff.vel[2]
delta_V_HAND[3] = l_HAND_diff.rot[0]
delta_V_HAND[4] = l_HAND_diff.rot[1]
delta_V_HAND[5] = l_HAND_diff.rot[2]
v_max_HAND = 2.0
kp_HAND = 2.0
dx_HAND_des = kp_HAND * delta_V_HAND
if v_max_HAND > np.linalg.norm(dx_HAND_des):
vv_HAND = 1.0
else:
vv_HAND = v_max_HAND/np.linalg.norm(dx_HAND_des)
dx_l_HAND_ref = - vv_HAND * dx_HAND_des
link_collision_map = {}
openrave.updateRobotConfiguration(q, velma.getJointStatesVectorNames())
if True:
openrave.switchCollisionModel("velmasimplified1")
col_chk = openrave.env.GetCollisionChecker()
col_opt = col_chk.GetCollisionOptions()
col_chk.SetCollisionOptions(0x04) # CO_Contacts(0x04), CO_AllLinkCollisions(0x20)
total_contacts = 0
for link1_idx, link2_idx in non_adj_links_ids:
link1 = openrave.robot_rave.GetLinks()[link1_idx]
link2 = openrave.robot_rave.GetLinks()[link2_idx]
report = CollisionReport()
if col_chk.CheckCollision(link1=link1, link2=link2, report=report):
T_L1_W = conv.OpenraveToKDL(link1.GetTransform()).Inverse()
TR_L1_W = PyKDL.Frame(T_L1_W.M)
T_L2_W = conv.OpenraveToKDL(link2.GetTransform()).Inverse()
TR_L2_W = PyKDL.Frame(T_L2_W.M)
swapped = False
if link1_idx > link2_idx:
link1_idx, link2_idx = link2_idx, link1_idx
swapped = True
if not (link1_idx, link2_idx) in link_collision_map:
link_collision_map[(link1_idx, link2_idx)] = []
for contact in report.contacts:
pos_W = PyKDL.Vector(contact.pos[0], contact.pos[1], contact.pos[2])
norm_W = PyKDL.Vector(contact.norm[0], contact.norm[1], contact.norm[2])
if swapped:
link_collision_map[(link1_idx, link2_idx)].append( (pos_W, -norm_W, T_L2_W * pos_W, T_L1_W * pos_W, TR_L2_W * (-norm_W), TR_L1_W * (-norm_W), contact.depth) )
else:
link_collision_map[(link1_idx, link2_idx)].append( (pos_W, norm_W, T_L1_W * pos_W, T_L2_W * pos_W, TR_L1_W * norm_W, TR_L2_W * norm_W, contact.depth) )
total_contacts += len(report.contacts)
col_chk.SetCollisionOptions(col_opt)
print "links in contact:", len(link_collision_map), "total contacts:", total_contacts
omega_col = np.matrix(np.zeros( (len(q),1) ))
Ncol = identityMatrix(len(q))
for link1_idx, link2_idx in link_collision_map:
link1_name = openrave.robot_rave.GetLinks()[link1_idx].GetName()
link2_name = openrave.robot_rave.GetLinks()[link2_idx].GetName()
# l1_parent = velma.fk_ik_solver.isParent(link1_name, link2_name)
# l2_parent = velma.fk_ik_solver.isParent(link2_name, link1_name)
affected_dof = velma.fk_ik_solver.getAffectedDof(link1_name, link2_name)
# print "affected dof:"
# for dof_idx in affected_dof:
# print q_names[dof_idx]
contacts = link_collision_map[ (link1_idx, link2_idx) ]
for c in contacts:
pos_W, norm_W, pos_L1, pos_L2, norm_L1, norm_L2, depth = c
m_id = self.pub_marker.publishVectorMarker(pos_W, pos_W + norm_W*0.05, 1, 1, 0, 0, frame='world', namespace='default', scale=0.005)
if depth < 0:
print "ERROR: depth < 0:", depth
exit(0)
# print link1_name, link2_name
jac1 = PyKDL.Jacobian(len(q))
velma.fk_ik_solver.getJacobianForX(jac1, link1_name, pos_L1, q, iq)
jac2 = PyKDL.Jacobian(len(q))
velma.fk_ik_solver.getJacobianForX(jac2, link2_name, pos_L2, q, iq)
# repulsive velocity
V_max = 0.1
depth_max = 0.002
if depth > depth_max:
depth = depth_max
Vrep = V_max * depth * depth / (depth_max * depth_max)
# the mapping between motions along contact normal and the Cartesian coordinates
e1 = norm_L1
e2 = norm_L2
Jd1 = np.matrix([e1[0], e1[1], e1[2]])
Jd2 = np.matrix([e2[0], e2[1], e2[2]])
# print "Jd1.shape", Jd1.shape
# rewrite the linear part of the jacobian
jac1_mx = np.matrix(np.zeros( (3, len(q)) ))
jac2_mx = np.matrix(np.zeros( (3, len(q)) ))
for q_idx in range(len(q)):
col1 = jac1.getColumn(q_idx)
col2 = jac2.getColumn(q_idx)
for row_idx in range(3):
jac1_mx[row_idx, q_idx] = col1[row_idx]
jac2_mx[row_idx, q_idx] = col2[row_idx]
# print "jac1_mx, jac2_mx"
# print jac1_mx
# print jac2_mx
# print "jac1_mx.shape", jac1_mx.shape
Jcol1 = Jd1 * jac1_mx
Jcol2 = Jd2 * jac2_mx
# print "Jcol2.shape", Jcol2.shape
Jcol = np.matrix(np.zeros( (2, len(q)) ))
for q_idx in range(len(q)):
if Jcol1[0, q_idx] < 0.000000001 or not q_idx in affected_dof:#l1_parent:
Jcol1[0, q_idx] = 0.0
if Jcol2[0, q_idx] < 0.000000001 or not q_idx in affected_dof:#l2_parent:
Jcol2[0, q_idx] = 0.0
Jcol[0, q_idx] = Jcol1[0, q_idx]
Jcol[1, q_idx] = Jcol2[0, q_idx]
# print Jcol.shape
# print "Jcol"
# print Jcol
Jcol_pinv = np.linalg.pinv(Jcol)
# Jcol_pinv = Jcol.transpose()
# print "Jcol_pinv"
# print Jcol_pinv
# Ncol1 = identityMatrix(len(q)) - np.linalg.pinv(Jcol1) * Jcol1
# Ncol2 = identityMatrix(len(q)) - np.linalg.pinv(Jcol2) * Jcol2
# Ncol = Ncol * Ncol1
# Ncol = Ncol * Ncol2
# omega_col += np.linalg.pinv(Jcol1) * (-Vrep)
# omega_col += np.linalg.pinv(Jcol2) * (Vrep)
# continue
# activation = min(1.0, depth/0.001)
# a_des = np.matrix(np.zeros( (len(q),len(q)) ))
# a_des[0,0] = a_des[1,1] = 1.0#activation
# U, S, V = numpy.linalg.svd(Jcol, full_matrices=True, compute_uv=True)
# print "V"
# print V
# print "S"
# print S
Ncol12 = identityMatrix(len(q)) - Jcol_pinv * Jcol
# Ncol12 = identityMatrix(len(q)) - Jcol.transpose() * (Jcol_pinv).transpose()
# Ncol12 = identityMatrix(len(q)) - (V * a_des * V.transpose())
# Ncol12 = identityMatrix(len(q)) - (Jcol.transpose() * a_des * Jcol)
Ncol = Ncol * Ncol12
Vrep1 = -Vrep
Vrep2 = Vrep
# if l1_parent:
# Vrep1 = 0.0
# if l2_parent:
# Vrep2 = 0.0
d_omega = Jcol_pinv * np.matrix([-Vrep, Vrep]).transpose()
# print "d_omega", d_omega
# print "Vrep", Vrep
# print q_names
# print "Jcol", Jcol
# print "Jcol_pinv", Jcol_pinv
# print "Jcol_pinv * Jcol", Jcol_pinv * Jcol
# print "Jcol * Jcol_pinv", Jcol * Jcol_pinv
# print "a_des", a_des
omega_col += d_omega
# print "depth", depth
# raw_input(".")
# print "omega_col", omega_col
# print dx_HAND_ref
omega_r_HAND = (J_r_HAND_inv * np.matrix(dx_r_HAND_ref).transpose())
omega_l_HAND = (J_l_HAND_inv * np.matrix(dx_l_HAND_ref).transpose())
Ncol_inv = np.linalg.pinv(Ncol)
N_r_HAND_inv = np.linalg.pinv(N_r_HAND)
# omega = J_JLC_inv * np.matrix(dx_JLC_ref).transpose() + N_JLC_inv * (omega_col + Ncol_inv * (omega_r_HAND))# + N_r_HAND_inv * omega_l_HAND))
omega = J_JLC_inv * np.matrix(dx_JLC_ref).transpose() + N_JLC.transpose() * (omega_col + Ncol.transpose() * (omega_r_HAND))# + N_r_HAND.transpose() * omega_l_HAND))
# print "omega", omega
# print "dx_JLC_ref"
# print dx_JLC_ref
# print "dx_HAND_ref"
# print dx_HAND_ref
omega_vector = np.empty(len(q))
for q_idx in range(len(q)):
omega_vector[q_idx] = omega[q_idx][0]
q += omega_vector * 0.002
if time_elapsed.to_sec() > 0.2:
last_time = rospy.Time.now()
velma.moveJoint(q, velma.getJointStatesVectorNames(), 0.05, start_time=0.14)
# rospy.sleep(0.01)
if __name__ == '__main__':
rospy.init_node('test_hierarchy_control')
task = TestHierarchyControl()
rospy.sleep(0.5)
task.spin()
|
TestHierarchyControl
|
identifier_name
|
test_hierarchy_control.py
|
#!/usr/bin/env python
# Copyright (c) 2015, Robot Control and Pattern Recognition Group,
# Institute of Control and Computation Engineering
# Warsaw University of Technology
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Warsaw University of Technology nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <COPYright HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Author: Dawid Seredynski
#
import roslib
roslib.load_manifest('velma_scripts')
import rospy
import tf
from std_msgs.msg import *
from sensor_msgs.msg import *
from geometry_msgs.msg import *
from visualization_msgs.msg import *
import tf
from tf import *
from tf.transformations import *
import tf_conversions.posemath as pm
from tf2_msgs.msg import *
import PyKDL
import math
import numpy as np
import copy
import matplotlib.pyplot as plt
import thread
import random
import openravepy
from openravepy import *
from optparse import OptionParser
from openravepy.misc import OpenRAVEGlobalArguments
import itertools
import rospkg
import multiprocessing
import velmautils
from velma import Velma
import openraveinstance
import conversions as conv
#import rrt_star_planner_ee
import rrt_star_connect_planner
import tree
import rosparam
import tasks
def identityMatrix(size):
I = np.matrix(numpy.zeros( (size, size) ))
for idx in range(size):
I[idx,idx] = 1.0
return I
class TestHierarchyControl:
"""
"""
def __init__(self):
self.pub_marker = velmautils.MarkerPublisher()
def spin(self):
simulation = True
rospack = rospkg.RosPack()
env_file=rospack.get_path('velma_scripts') + '/data/jar/cabinet_test.env.xml'
srdf_path=rospack.get_path('velma_description') + '/robots/'
print "creating interface for Velma..."
# create the interface for Velma robot
velma = Velma()
print "done."
#
# Initialise Openrave
#
openrave = openraveinstance.OpenraveInstance()
openrave.startOpenraveURDF(env_file=env_file, viewer=True)
openrave.readRobot(srdf_path=srdf_path)
openrave.setCamera(PyKDL.Vector(2.0, 0.0, 2.0), PyKDL.Vector(0.60, 0.0, 1.10))
velma.waitForInit()
openrave.updateRobotConfigurationRos(velma.js_pos)
non_adj_links_ids = openrave.robot_rave.GetNonAdjacentLinks()
velma.switchToJoint()
lim_bo_soft, lim_up_soft = velma.getJointSoftLimitsVectors()
lim_bo, lim_up = velma.getJointLimitsVectors()
velma.fk_ik_solver.createJacobianFkSolvers('torso_base', 'right_HandPalmLink', velma.getJointStatesVectorNames())
velma.fk_ik_solver.createJacobianFkSolvers('torso_base', 'left_HandPalmLink', velma.getJointStatesVectorNames())
velma.fk_ik_solver.createSegmentToJointMap(velma.getJointStatesVectorNames(), velma.getInactiveJointStatesVector())
print velma.getJointStatesVectorNames()
r_HAND_targets = [
# PyKDL.Frame(PyKDL.Vector(0.5,0,1.8)),
# PyKDL.Frame(PyKDL.Rotation.RotY(170.0/180.0*math.pi), PyKDL.Vector(0.5,0,1.6)),
PyKDL.Frame(PyKDL.Rotation.RotY(90.0/180.0*math.pi), PyKDL.Vector(0.2,0.0,1.0)),
PyKDL.Frame(PyKDL.Rotation.RotY(90.0/180.0*math.pi), PyKDL.Vector(0.2,-0.5,1.0)),
]
l_HAND_targets = [
# PyKDL.Frame(PyKDL.Vector(0.5,0,1.8)),
# PyKDL.Frame(PyKDL.Rotation.RotY(170.0/180.0*math.pi), PyKDL.Vector(0.5,0,1.6)),
PyKDL.Frame(PyKDL.Rotation.RotY(90.0/180.0*math.pi) * PyKDL.Rotation.RotZ(180.0/180.0*math.pi), PyKDL.Vector(0.2,0.0,1.0)),
PyKDL.Frame(PyKDL.Rotation.RotY(90.0/180.0*math.pi) * PyKDL.Rotation.RotZ(180.0/180.0*math.pi), PyKDL.Vector(0.2,0.5,1.0)),
]
target_idx = 0
r_HAND_target = r_HAND_targets[target_idx]
l_HAND_target = l_HAND_targets[target_idx]
target_idx += 1
last_time = rospy.Time.now()
q = velma.getJointStatesVector()
q_names = velma.getJointStatesVectorNames()
iq = velma.getInactiveJointStatesVector()
counter = 0
while not rospy.is_shutdown():
if counter > 300:
r_HAND_target = r_HAND_targets[target_idx]
l_HAND_target = l_HAND_targets[target_idx]
target_idx = (target_idx + 1)%len(r_HAND_targets)
counter = 0
counter += 1
time_elapsed = rospy.Time.now() - last_time
J_JLC = np.matrix(numpy.zeros( (len(q), len(q)) ))
delta_V_JLC = np.empty(len(q))
for q_idx in range(len(q)):
if q[q_idx] < lim_bo_soft[q_idx]:
delta_V_JLC[q_idx] = q[q_idx] - lim_bo_soft[q_idx]
J_JLC[q_idx,q_idx] = min(1.0, 10*abs(q[q_idx] - lim_bo_soft[q_idx]) / abs(lim_bo[q_idx] - lim_bo_soft[q_idx]))
elif q[q_idx] > lim_up_soft[q_idx]:
delta_V_JLC[q_idx] = q[q_idx] - lim_up_soft[q_idx]
J_JLC[q_idx,q_idx] = min(1.0, 10*abs(q[q_idx] - lim_up_soft[q_idx]) / abs(lim_up[q_idx] - lim_up_soft[q_idx]))
else:
delta_V_JLC[q_idx] = 0.0
J_JLC[q_idx,q_idx] = 0.0
J_JLC_inv = np.linalg.pinv(J_JLC)
N_JLC = identityMatrix(len(q)) - (J_JLC_inv * J_JLC)
N_JLC_inv = np.linalg.pinv(N_JLC)
v_max_JLC = 20.0/180.0*math.pi
kp_JLC = 1.0
dx_JLC_des = kp_JLC * delta_V_JLC
# min(1.0, v_max_JLC/np.linalg.norm(dx_JLC_des))
if v_max_JLC > np.linalg.norm(dx_JLC_des):
vv_JLC = 1.0
else:
vv_JLC = v_max_JLC/np.linalg.norm(dx_JLC_des)
dx_JLC_ref = - vv_JLC * dx_JLC_des
# right hand
J_r_HAND = velma.fk_ik_solver.getJacobian('torso_base', 'right_HandPalmLink', q)
J_r_HAND_inv = np.linalg.pinv(J_r_HAND)
N_r_HAND = identityMatrix(len(q)) - (J_r_HAND_inv * J_r_HAND)
T_B_E = velma.fk_ik_solver.calculateFk2('torso_base', 'right_HandPalmLink', q)
r_HAND_current = T_B_E
r_HAND_diff = PyKDL.diff(r_HAND_target, r_HAND_current)
delta_V_HAND = np.empty(6)
delta_V_HAND[0] = r_HAND_diff.vel[0]
delta_V_HAND[1] = r_HAND_diff.vel[1]
delta_V_HAND[2] = r_HAND_diff.vel[2]
delta_V_HAND[3] = r_HAND_diff.rot[0]
delta_V_HAND[4] = r_HAND_diff.rot[1]
delta_V_HAND[5] = r_HAND_diff.rot[2]
v_max_HAND = 2.0
kp_HAND = 2.0
dx_HAND_des = kp_HAND * delta_V_HAND
if v_max_HAND > np.linalg.norm(dx_HAND_des):
vv_HAND = 1.0
else:
vv_HAND = v_max_HAND/np.linalg.norm(dx_HAND_des)
dx_r_HAND_ref = - vv_HAND * dx_HAND_des
# left hand
J_l_HAND = velma.fk_ik_solver.getJacobian('torso_base', 'left_HandPalmLink', q)
J_l_HAND_inv = np.linalg.pinv(J_l_HAND)
N_l_HAND = identityMatrix(len(q)) - (J_l_HAND_inv * J_l_HAND)
T_B_E = velma.fk_ik_solver.calculateFk2('torso_base', 'left_HandPalmLink', q)
l_HAND_current = T_B_E
l_HAND_diff = PyKDL.diff(l_HAND_target, l_HAND_current)
delta_V_HAND = np.empty(6)
delta_V_HAND[0] = l_HAND_diff.vel[0]
delta_V_HAND[1] = l_HAND_diff.vel[1]
delta_V_HAND[2] = l_HAND_diff.vel[2]
delta_V_HAND[3] = l_HAND_diff.rot[0]
delta_V_HAND[4] = l_HAND_diff.rot[1]
delta_V_HAND[5] = l_HAND_diff.rot[2]
v_max_HAND = 2.0
kp_HAND = 2.0
dx_HAND_des = kp_HAND * delta_V_HAND
if v_max_HAND > np.linalg.norm(dx_HAND_des):
vv_HAND = 1.0
else:
vv_HAND = v_max_HAND/np.linalg.norm(dx_HAND_des)
dx_l_HAND_ref = - vv_HAND * dx_HAND_des
link_collision_map = {}
openrave.updateRobotConfiguration(q, velma.getJointStatesVectorNames())
if True:
openrave.switchCollisionModel("velmasimplified1")
col_chk = openrave.env.GetCollisionChecker()
col_opt = col_chk.GetCollisionOptions()
col_chk.SetCollisionOptions(0x04) # CO_Contacts(0x04), CO_AllLinkCollisions(0x20)
total_contacts = 0
for link1_idx, link2_idx in non_adj_links_ids:
link1 = openrave.robot_rave.GetLinks()[link1_idx]
link2 = openrave.robot_rave.GetLinks()[link2_idx]
report = CollisionReport()
if col_chk.CheckCollision(link1=link1, link2=link2, report=report):
T_L1_W = conv.OpenraveToKDL(link1.GetTransform()).Inverse()
TR_L1_W = PyKDL.Frame(T_L1_W.M)
T_L2_W = conv.OpenraveToKDL(link2.GetTransform()).Inverse()
TR_L2_W = PyKDL.Frame(T_L2_W.M)
swapped = False
if link1_idx > link2_idx:
link1_idx, link2_idx = link2_idx, link1_idx
swapped = True
if not (link1_idx, link2_idx) in link_collision_map:
link_collision_map[(link1_idx, link2_idx)] = []
for contact in report.contacts:
pos_W = PyKDL.Vector(contact.pos[0], contact.pos[1], contact.pos[2])
norm_W = PyKDL.Vector(contact.norm[0], contact.norm[1], contact.norm[2])
if swapped:
link_collision_map[(link1_idx, link2_idx)].append( (pos_W, -norm_W, T_L2_W * pos_W, T_L1_W * pos_W, TR_L2_W * (-norm_W), TR_L1_W * (-norm_W), contact.depth) )
else:
link_collision_map[(link1_idx, link2_idx)].append( (pos_W, norm_W, T_L1_W * pos_W, T_L2_W * pos_W, TR_L1_W * norm_W, TR_L2_W * norm_W, contact.depth) )
total_contacts += len(report.contacts)
col_chk.SetCollisionOptions(col_opt)
print "links in contact:", len(link_collision_map), "total contacts:", total_contacts
omega_col = np.matrix(np.zeros( (len(q),1) ))
Ncol = identityMatrix(len(q))
for link1_idx, link2_idx in link_collision_map:
link1_name = openrave.robot_rave.GetLinks()[link1_idx].GetName()
link2_name = openrave.robot_rave.GetLinks()[link2_idx].GetName()
# l1_parent = velma.fk_ik_solver.isParent(link1_name, link2_name)
# l2_parent = velma.fk_ik_solver.isParent(link2_name, link1_name)
affected_dof = velma.fk_ik_solver.getAffectedDof(link1_name, link2_name)
# print "affected dof:"
# for dof_idx in affected_dof:
# print q_names[dof_idx]
contacts = link_collision_map[ (link1_idx, link2_idx) ]
for c in contacts:
pos_W, norm_W, pos_L1, pos_L2, norm_L1, norm_L2, depth = c
m_id = self.pub_marker.publishVectorMarker(pos_W, pos_W + norm_W*0.05, 1, 1, 0, 0, frame='world', namespace='default', scale=0.005)
if depth < 0:
print "ERROR: depth < 0:", depth
exit(0)
# print link1_name, link2_name
jac1 = PyKDL.Jacobian(len(q))
velma.fk_ik_solver.getJacobianForX(jac1, link1_name, pos_L1, q, iq)
jac2 = PyKDL.Jacobian(len(q))
velma.fk_ik_solver.getJacobianForX(jac2, link2_name, pos_L2, q, iq)
# repulsive velocity
V_max = 0.1
depth_max = 0.002
if depth > depth_max:
depth = depth_max
Vrep = V_max * depth * depth / (depth_max * depth_max)
# the mapping between motions along contact normal and the Cartesian coordinates
e1 = norm_L1
e2 = norm_L2
Jd1 = np.matrix([e1[0], e1[1], e1[2]])
Jd2 = np.matrix([e2[0], e2[1], e2[2]])
# print "Jd1.shape", Jd1.shape
# rewrite the linear part of the jacobian
jac1_mx = np.matrix(np.zeros( (3, len(q)) ))
jac2_mx = np.matrix(np.zeros( (3, len(q)) ))
for q_idx in range(len(q)):
col1 = jac1.getColumn(q_idx)
col2 = jac2.getColumn(q_idx)
for row_idx in range(3):
jac1_mx[row_idx, q_idx] = col1[row_idx]
jac2_mx[row_idx, q_idx] = col2[row_idx]
# print "jac1_mx, jac2_mx"
# print jac1_mx
# print jac2_mx
# print "jac1_mx.shape", jac1_mx.shape
Jcol1 = Jd1 * jac1_mx
Jcol2 = Jd2 * jac2_mx
# print "Jcol2.shape", Jcol2.shape
Jcol = np.matrix(np.zeros( (2, len(q)) ))
for q_idx in range(len(q)):
if Jcol1[0, q_idx] < 0.000000001 or not q_idx in affected_dof:#l1_parent:
Jcol1[0, q_idx] = 0.0
if Jcol2[0, q_idx] < 0.000000001 or not q_idx in affected_dof:#l2_parent:
Jcol2[0, q_idx] = 0.0
Jcol[0, q_idx] = Jcol1[0, q_idx]
Jcol[1, q_idx] = Jcol2[0, q_idx]
# print Jcol.shape
# print "Jcol"
# print Jcol
Jcol_pinv = np.linalg.pinv(Jcol)
# Jcol_pinv = Jcol.transpose()
# print "Jcol_pinv"
# print Jcol_pinv
# Ncol1 = identityMatrix(len(q)) - np.linalg.pinv(Jcol1) * Jcol1
# Ncol2 = identityMatrix(len(q)) - np.linalg.pinv(Jcol2) * Jcol2
# Ncol = Ncol * Ncol1
# Ncol = Ncol * Ncol2
# omega_col += np.linalg.pinv(Jcol1) * (-Vrep)
# omega_col += np.linalg.pinv(Jcol2) * (Vrep)
# continue
# activation = min(1.0, depth/0.001)
# a_des = np.matrix(np.zeros( (len(q),len(q)) ))
# a_des[0,0] = a_des[1,1] = 1.0#activation
# U, S, V = numpy.linalg.svd(Jcol, full_matrices=True, compute_uv=True)
# print "V"
# print V
# print "S"
# print S
Ncol12 = identityMatrix(len(q)) - Jcol_pinv * Jcol
# Ncol12 = identityMatrix(len(q)) - Jcol.transpose() * (Jcol_pinv).transpose()
# Ncol12 = identityMatrix(len(q)) - (V * a_des * V.transpose())
# Ncol12 = identityMatrix(len(q)) - (Jcol.transpose() * a_des * Jcol)
Ncol = Ncol * Ncol12
Vrep1 = -Vrep
Vrep2 = Vrep
# if l1_parent:
# Vrep1 = 0.0
# if l2_parent:
# Vrep2 = 0.0
d_omega = Jcol_pinv * np.matrix([-Vrep, Vrep]).transpose()
# print "d_omega", d_omega
# print "Vrep", Vrep
# print q_names
|
# print "Jcol * Jcol_pinv", Jcol * Jcol_pinv
# print "a_des", a_des
omega_col += d_omega
# print "depth", depth
# raw_input(".")
# print "omega_col", omega_col
# print dx_HAND_ref
omega_r_HAND = (J_r_HAND_inv * np.matrix(dx_r_HAND_ref).transpose())
omega_l_HAND = (J_l_HAND_inv * np.matrix(dx_l_HAND_ref).transpose())
Ncol_inv = np.linalg.pinv(Ncol)
N_r_HAND_inv = np.linalg.pinv(N_r_HAND)
# omega = J_JLC_inv * np.matrix(dx_JLC_ref).transpose() + N_JLC_inv * (omega_col + Ncol_inv * (omega_r_HAND))# + N_r_HAND_inv * omega_l_HAND))
omega = J_JLC_inv * np.matrix(dx_JLC_ref).transpose() + N_JLC.transpose() * (omega_col + Ncol.transpose() * (omega_r_HAND))# + N_r_HAND.transpose() * omega_l_HAND))
# print "omega", omega
# print "dx_JLC_ref"
# print dx_JLC_ref
# print "dx_HAND_ref"
# print dx_HAND_ref
omega_vector = np.empty(len(q))
for q_idx in range(len(q)):
omega_vector[q_idx] = omega[q_idx][0]
q += omega_vector * 0.002
if time_elapsed.to_sec() > 0.2:
last_time = rospy.Time.now()
velma.moveJoint(q, velma.getJointStatesVectorNames(), 0.05, start_time=0.14)
# rospy.sleep(0.01)
if __name__ == '__main__':
rospy.init_node('test_hierarchy_control')
task = TestHierarchyControl()
rospy.sleep(0.5)
task.spin()
|
# print "Jcol", Jcol
# print "Jcol_pinv", Jcol_pinv
# print "Jcol_pinv * Jcol", Jcol_pinv * Jcol
|
random_line_split
|
predicates.py
|
# -*- coding: utf-8 -*-
#
# Copyright 2014-2021 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Predicates structure for the BigML local AnomalyTree
This module defines an auxiliary Predicates structure that is used in the
AnomalyTree to save the node's predicates info.
"""
from bigml.predicate import Predicate
class Predicates():
"""A list of predicates to be evaluated in an anomaly tree's node.
"""
def __init__(self, predicates_list):
self.predicates = []
for predicate in predicates_list:
if predicate is True:
self.predicates.append(True)
else:
self.predicates.append(
Predicate(predicate.get('op'),
predicate.get('field'),
predicate.get('value'),
|
predicate.get('term')))
def to_rule(self, fields, label='name'):
""" Builds rule string from a predicates list
"""
return " and ".join([predicate.to_rule(fields, label=label) for
predicate in self.predicates
if not isinstance(predicate, bool)])
def apply(self, input_data, fields):
""" Applies the operators defined in each of the predicates to
the provided input data
"""
return all([predicate.apply(input_data, fields) for
predicate in self.predicates
if isinstance(predicate, Predicate)])
|
random_line_split
|
|
predicates.py
|
# -*- coding: utf-8 -*-
#
# Copyright 2014-2021 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Predicates structure for the BigML local AnomalyTree
This module defines an auxiliary Predicates structure that is used in the
AnomalyTree to save the node's predicates info.
"""
from bigml.predicate import Predicate
class Predicates():
"""A list of predicates to be evaluated in an anomaly tree's node.
"""
def __init__(self, predicates_list):
self.predicates = []
for predicate in predicates_list:
if predicate is True:
self.predicates.append(True)
else:
self.predicates.append(
Predicate(predicate.get('op'),
predicate.get('field'),
predicate.get('value'),
predicate.get('term')))
def
|
(self, fields, label='name'):
""" Builds rule string from a predicates list
"""
return " and ".join([predicate.to_rule(fields, label=label) for
predicate in self.predicates
if not isinstance(predicate, bool)])
def apply(self, input_data, fields):
""" Applies the operators defined in each of the predicates to
the provided input data
"""
return all([predicate.apply(input_data, fields) for
predicate in self.predicates
if isinstance(predicate, Predicate)])
|
to_rule
|
identifier_name
|
predicates.py
|
# -*- coding: utf-8 -*-
#
# Copyright 2014-2021 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Predicates structure for the BigML local AnomalyTree
This module defines an auxiliary Predicates structure that is used in the
AnomalyTree to save the node's predicates info.
"""
from bigml.predicate import Predicate
class Predicates():
"""A list of predicates to be evaluated in an anomaly tree's node.
"""
def __init__(self, predicates_list):
self.predicates = []
for predicate in predicates_list:
if predicate is True:
self.predicates.append(True)
else:
|
def to_rule(self, fields, label='name'):
""" Builds rule string from a predicates list
"""
return " and ".join([predicate.to_rule(fields, label=label) for
predicate in self.predicates
if not isinstance(predicate, bool)])
def apply(self, input_data, fields):
""" Applies the operators defined in each of the predicates to
the provided input data
"""
return all([predicate.apply(input_data, fields) for
predicate in self.predicates
if isinstance(predicate, Predicate)])
|
self.predicates.append(
Predicate(predicate.get('op'),
predicate.get('field'),
predicate.get('value'),
predicate.get('term')))
|
conditional_block
|
predicates.py
|
# -*- coding: utf-8 -*-
#
# Copyright 2014-2021 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Predicates structure for the BigML local AnomalyTree
This module defines an auxiliary Predicates structure that is used in the
AnomalyTree to save the node's predicates info.
"""
from bigml.predicate import Predicate
class Predicates():
"""A list of predicates to be evaluated in an anomaly tree's node.
"""
def __init__(self, predicates_list):
|
def to_rule(self, fields, label='name'):
""" Builds rule string from a predicates list
"""
return " and ".join([predicate.to_rule(fields, label=label) for
predicate in self.predicates
if not isinstance(predicate, bool)])
def apply(self, input_data, fields):
""" Applies the operators defined in each of the predicates to
the provided input data
"""
return all([predicate.apply(input_data, fields) for
predicate in self.predicates
if isinstance(predicate, Predicate)])
|
self.predicates = []
for predicate in predicates_list:
if predicate is True:
self.predicates.append(True)
else:
self.predicates.append(
Predicate(predicate.get('op'),
predicate.get('field'),
predicate.get('value'),
predicate.get('term')))
|
identifier_body
|
mod.rs
|
//! Resource data implementations.
//!
//! This module will eventually contain implementations for the record data
//! for all defined resource record types.
//!
//! The types are named identically to the [`Rtype`] variant they implement.
//! They are grouped into submodules for the RFCs they are defined in. All
//! types are also re-exported at the top level here. Ie., for the AAAA
//! record type, you can simple `use domain::rdata::Aaaa` instead of
//! `use domain::rdata::rfc3596::Aaaa` which nobody could possibly remember.
//! There are, however, some helper data types defined here and there which
//! are not re-exported to keep things somewhat tidy.
//!
//! See the [`Rtype`] enum for the complete set of record types and,
//! consequently, those types that are still missing.
//!
//! [`Rtype`]: ../iana/enum.Rtype.html
pub mod rfc1035;
pub mod rfc2782;
pub mod rfc3596;
#[macro_use] mod macros;
mod generic;
use ::bits::{CharStrBuf, DNameBuf};
// The master_types! macro (defined in self::macros) creates the
// MasterRecordData enum produced when parsing master files (aka zone files).
//
// Include all record types that can occur in master files. Place the name of
// the variant (identical to the type name) on the left side of the double
// arrow and the name of the type on the right. If the type is generic, use
// the owned version.
//
// The macro creates the re-export of the record data type.
master_types!{
rfc1035::{
A => A,
Cname => Cname<DNameBuf>,
Hinfo => Hinfo<CharStrBuf>,
Mb => Mb<DNameBuf>,
Md => Md<DNameBuf>,
Mf => Mf<DNameBuf>,
Mg => Mg<DNameBuf>,
Minfo => Minfo<DNameBuf>,
Mr => Mr<DNameBuf>,
Mx => Mx<DNameBuf>,
Ns => Ns<DNameBuf>,
Ptr => Ptr<DNameBuf>,
Soa => Soa<DNameBuf>,
Txt => Txt<Vec<u8>>,
Wks => Wks<rfc1035::WksBitmapBuf>,
}
rfc2782::{
Srv => Srv<DNameBuf>,
}
rfc3596::{
Aaaa => Aaaa,
}
}
// The pseudo_types! macro (defined in self::macros) creates the re-exports
// for all the types not part of master_types! above.
pseudo_types!{
rfc1035::{Null};
//rfc6891::{Opt};
}
/// Formats record data from a message parser in master file format.
///
/// This helper function formats the record data at the start of `parser`
/// using the formatter `f`. It assumes that the record data is for a
/// record of record type `rtype`.
///
/// If the record type is known, the function tries to use the type’s
/// proper master data format. Otherwise the generic format is used.
pub fn fmt_rdata(rtype: ::iana::Rtype, parser: &mut ::bits::Parser,
f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
match try!(fmt_master_data(rtype, parser, f)) {
Some(res) => Ok(res),
None => {
|
}
}
/// Parsed versions of all record data types.
///
/// This module defines or re-exports type aliases for all record data
/// types that use parsed domain names and references to bytes slices where
/// applicable. For convenience, it also includes re-exports for those types
/// that are not in fact generic.
///
/// Use the types from this module when working with wire format DNS messages.
pub mod parsed {
pub use super::rfc1035::parsed::*;
pub use super::rfc3596::Aaaa;
pub type Srv<'a> = super::rfc2782::Srv<::bits::ParsedDName<'a>>;
}
/// Owned versions of all record data types.
///
/// This module defines or re-exports type aliases for all record data
/// types using owned data only. For convenience, it also includes re-exports
/// for those types that are not generic.
///
/// Use the types from this module if you are working with master file data
/// or if you are constructing your own values.
pub mod owned {
pub use super::rfc1035::owned::*;
pub use super::rfc3596::Aaaa;
pub type Srv = super::rfc2782::Srv<::bits::DNameBuf>;
}
|
let mut parser = parser.clone();
let len = parser.remaining();
let data = parser.parse_bytes(len).unwrap();
generic::fmt(data, f)
}
|
conditional_block
|
mod.rs
|
//! Resource data implementations.
//!
//! This module will eventually contain implementations for the record data
//! for all defined resource record types.
//!
//! The types are named identically to the [`Rtype`] variant they implement.
//! They are grouped into submodules for the RFCs they are defined in. All
//! types are also re-exported at the top level here. Ie., for the AAAA
//! record type, you can simple `use domain::rdata::Aaaa` instead of
//! `use domain::rdata::rfc3596::Aaaa` which nobody could possibly remember.
//! There are, however, some helper data types defined here and there which
//! are not re-exported to keep things somewhat tidy.
//!
//! See the [`Rtype`] enum for the complete set of record types and,
//! consequently, those types that are still missing.
//!
//! [`Rtype`]: ../iana/enum.Rtype.html
pub mod rfc1035;
pub mod rfc2782;
pub mod rfc3596;
#[macro_use] mod macros;
mod generic;
use ::bits::{CharStrBuf, DNameBuf};
// The master_types! macro (defined in self::macros) creates the
// MasterRecordData enum produced when parsing master files (aka zone files).
//
// Include all record types that can occur in master files. Place the name of
// the variant (identical to the type name) on the left side of the double
// arrow and the name of the type on the right. If the type is generic, use
// the owned version.
//
// The macro creates the re-export of the record data type.
master_types!{
rfc1035::{
A => A,
Cname => Cname<DNameBuf>,
Hinfo => Hinfo<CharStrBuf>,
Mb => Mb<DNameBuf>,
Md => Md<DNameBuf>,
Mf => Mf<DNameBuf>,
Mg => Mg<DNameBuf>,
Minfo => Minfo<DNameBuf>,
Mr => Mr<DNameBuf>,
Mx => Mx<DNameBuf>,
Ns => Ns<DNameBuf>,
Ptr => Ptr<DNameBuf>,
Soa => Soa<DNameBuf>,
Txt => Txt<Vec<u8>>,
Wks => Wks<rfc1035::WksBitmapBuf>,
}
rfc2782::{
Srv => Srv<DNameBuf>,
}
rfc3596::{
Aaaa => Aaaa,
}
}
// The pseudo_types! macro (defined in self::macros) creates the re-exports
// for all the types not part of master_types! above.
pseudo_types!{
rfc1035::{Null};
//rfc6891::{Opt};
}
/// Formats record data from a message parser in master file format.
///
/// This helper function formats the record data at the start of `parser`
/// using the formatter `f`. It assumes that the record data is for a
/// record of record type `rtype`.
///
/// If the record type is known, the function tries to use the type’s
/// proper master data format. Otherwise the generic format is used.
pub fn fmt_rdata(rtype: ::iana::Rtype, parser: &mut ::bits::Parser,
f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
|
/// Parsed versions of all record data types.
///
/// This module defines or re-exports type aliases for all record data
/// types that use parsed domain names and references to bytes slices where
/// applicable. For convenience, it also includes re-exports for those types
/// that are not in fact generic.
///
/// Use the types from this module when working with wire format DNS messages.
pub mod parsed {
pub use super::rfc1035::parsed::*;
pub use super::rfc3596::Aaaa;
pub type Srv<'a> = super::rfc2782::Srv<::bits::ParsedDName<'a>>;
}
/// Owned versions of all record data types.
///
/// This module defines or re-exports type aliases for all record data
/// types using owned data only. For convenience, it also includes re-exports
/// for those types that are not generic.
///
/// Use the types from this module if you are working with master file data
/// or if you are constructing your own values.
pub mod owned {
pub use super::rfc1035::owned::*;
pub use super::rfc3596::Aaaa;
pub type Srv = super::rfc2782::Srv<::bits::DNameBuf>;
}
|
match try!(fmt_master_data(rtype, parser, f)) {
Some(res) => Ok(res),
None => {
let mut parser = parser.clone();
let len = parser.remaining();
let data = parser.parse_bytes(len).unwrap();
generic::fmt(data, f)
}
}
}
|
identifier_body
|
mod.rs
|
//! Resource data implementations.
//!
//! This module will eventually contain implementations for the record data
//! for all defined resource record types.
//!
//! The types are named identically to the [`Rtype`] variant they implement.
//! They are grouped into submodules for the RFCs they are defined in. All
//! types are also re-exported at the top level here. Ie., for the AAAA
//! record type, you can simple `use domain::rdata::Aaaa` instead of
//! `use domain::rdata::rfc3596::Aaaa` which nobody could possibly remember.
//! There are, however, some helper data types defined here and there which
//! are not re-exported to keep things somewhat tidy.
//!
//! See the [`Rtype`] enum for the complete set of record types and,
//! consequently, those types that are still missing.
//!
//! [`Rtype`]: ../iana/enum.Rtype.html
pub mod rfc1035;
pub mod rfc2782;
pub mod rfc3596;
#[macro_use] mod macros;
mod generic;
use ::bits::{CharStrBuf, DNameBuf};
// The master_types! macro (defined in self::macros) creates the
// MasterRecordData enum produced when parsing master files (aka zone files).
//
// Include all record types that can occur in master files. Place the name of
// the variant (identical to the type name) on the left side of the double
// arrow and the name of the type on the right. If the type is generic, use
// the owned version.
//
// The macro creates the re-export of the record data type.
master_types!{
rfc1035::{
A => A,
Cname => Cname<DNameBuf>,
Hinfo => Hinfo<CharStrBuf>,
Mb => Mb<DNameBuf>,
Md => Md<DNameBuf>,
Mf => Mf<DNameBuf>,
Mg => Mg<DNameBuf>,
Minfo => Minfo<DNameBuf>,
Mr => Mr<DNameBuf>,
Mx => Mx<DNameBuf>,
Ns => Ns<DNameBuf>,
Ptr => Ptr<DNameBuf>,
Soa => Soa<DNameBuf>,
Txt => Txt<Vec<u8>>,
Wks => Wks<rfc1035::WksBitmapBuf>,
}
rfc2782::{
Srv => Srv<DNameBuf>,
}
rfc3596::{
Aaaa => Aaaa,
}
}
// The pseudo_types! macro (defined in self::macros) creates the re-exports
// for all the types not part of master_types! above.
pseudo_types!{
rfc1035::{Null};
//rfc6891::{Opt};
}
/// Formats record data from a message parser in master file format.
///
/// This helper function formats the record data at the start of `parser`
/// using the formatter `f`. It assumes that the record data is for a
/// record of record type `rtype`.
///
/// If the record type is known, the function tries to use the type’s
/// proper master data format. Otherwise the generic format is used.
pub fn fm
|
type: ::iana::Rtype, parser: &mut ::bits::Parser,
f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
match try!(fmt_master_data(rtype, parser, f)) {
Some(res) => Ok(res),
None => {
let mut parser = parser.clone();
let len = parser.remaining();
let data = parser.parse_bytes(len).unwrap();
generic::fmt(data, f)
}
}
}
/// Parsed versions of all record data types.
///
/// This module defines or re-exports type aliases for all record data
/// types that use parsed domain names and references to bytes slices where
/// applicable. For convenience, it also includes re-exports for those types
/// that are not in fact generic.
///
/// Use the types from this module when working with wire format DNS messages.
pub mod parsed {
pub use super::rfc1035::parsed::*;
pub use super::rfc3596::Aaaa;
pub type Srv<'a> = super::rfc2782::Srv<::bits::ParsedDName<'a>>;
}
/// Owned versions of all record data types.
///
/// This module defines or re-exports type aliases for all record data
/// types using owned data only. For convenience, it also includes re-exports
/// for those types that are not generic.
///
/// Use the types from this module if you are working with master file data
/// or if you are constructing your own values.
pub mod owned {
pub use super::rfc1035::owned::*;
pub use super::rfc3596::Aaaa;
pub type Srv = super::rfc2782::Srv<::bits::DNameBuf>;
}
|
t_rdata(r
|
identifier_name
|
mod.rs
|
//! Resource data implementations.
//!
//! This module will eventually contain implementations for the record data
//! for all defined resource record types.
//!
//! The types are named identically to the [`Rtype`] variant they implement.
//! They are grouped into submodules for the RFCs they are defined in. All
//! types are also re-exported at the top level here. Ie., for the AAAA
//! record type, you can simple `use domain::rdata::Aaaa` instead of
//! `use domain::rdata::rfc3596::Aaaa` which nobody could possibly remember.
//! There are, however, some helper data types defined here and there which
//! are not re-exported to keep things somewhat tidy.
//!
//! See the [`Rtype`] enum for the complete set of record types and,
//! consequently, those types that are still missing.
//!
//! [`Rtype`]: ../iana/enum.Rtype.html
pub mod rfc1035;
pub mod rfc2782;
pub mod rfc3596;
#[macro_use] mod macros;
mod generic;
use ::bits::{CharStrBuf, DNameBuf};
// The master_types! macro (defined in self::macros) creates the
// MasterRecordData enum produced when parsing master files (aka zone files).
|
// arrow and the name of the type on the right. If the type is generic, use
// the owned version.
//
// The macro creates the re-export of the record data type.
master_types!{
rfc1035::{
A => A,
Cname => Cname<DNameBuf>,
Hinfo => Hinfo<CharStrBuf>,
Mb => Mb<DNameBuf>,
Md => Md<DNameBuf>,
Mf => Mf<DNameBuf>,
Mg => Mg<DNameBuf>,
Minfo => Minfo<DNameBuf>,
Mr => Mr<DNameBuf>,
Mx => Mx<DNameBuf>,
Ns => Ns<DNameBuf>,
Ptr => Ptr<DNameBuf>,
Soa => Soa<DNameBuf>,
Txt => Txt<Vec<u8>>,
Wks => Wks<rfc1035::WksBitmapBuf>,
}
rfc2782::{
Srv => Srv<DNameBuf>,
}
rfc3596::{
Aaaa => Aaaa,
}
}
// The pseudo_types! macro (defined in self::macros) creates the re-exports
// for all the types not part of master_types! above.
pseudo_types!{
rfc1035::{Null};
//rfc6891::{Opt};
}
/// Formats record data from a message parser in master file format.
///
/// This helper function formats the record data at the start of `parser`
/// using the formatter `f`. It assumes that the record data is for a
/// record of record type `rtype`.
///
/// If the record type is known, the function tries to use the type’s
/// proper master data format. Otherwise the generic format is used.
pub fn fmt_rdata(rtype: ::iana::Rtype, parser: &mut ::bits::Parser,
f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
match try!(fmt_master_data(rtype, parser, f)) {
Some(res) => Ok(res),
None => {
let mut parser = parser.clone();
let len = parser.remaining();
let data = parser.parse_bytes(len).unwrap();
generic::fmt(data, f)
}
}
}
/// Parsed versions of all record data types.
///
/// This module defines or re-exports type aliases for all record data
/// types that use parsed domain names and references to bytes slices where
/// applicable. For convenience, it also includes re-exports for those types
/// that are not in fact generic.
///
/// Use the types from this module when working with wire format DNS messages.
pub mod parsed {
pub use super::rfc1035::parsed::*;
pub use super::rfc3596::Aaaa;
pub type Srv<'a> = super::rfc2782::Srv<::bits::ParsedDName<'a>>;
}
/// Owned versions of all record data types.
///
/// This module defines or re-exports type aliases for all record data
/// types using owned data only. For convenience, it also includes re-exports
/// for those types that are not generic.
///
/// Use the types from this module if you are working with master file data
/// or if you are constructing your own values.
pub mod owned {
pub use super::rfc1035::owned::*;
pub use super::rfc3596::Aaaa;
pub type Srv = super::rfc2782::Srv<::bits::DNameBuf>;
}
|
//
// Include all record types that can occur in master files. Place the name of
// the variant (identical to the type name) on the left side of the double
|
random_line_split
|
lib.rs
|
#![feature(const_fn)]
#![feature(drop_types_in_const)]
#![feature(proc_macro)]
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
extern crate bootstrap_rs as bootstrap;
extern crate fiber;
use fiber::FiberId;
use std::cell::RefCell;
use std::collections::HashMap;
use std::fmt::{self, Display, Formatter};
use std::mem;
use std::sync::Mutex;
use std::time::Duration;
#[cfg(target_os="windows")]
#[path="windows.rs"]
pub mod platform;
pub mod stats;
thread_local! {
static CONTEXT: RefCell<Context> = RefCell::new(Context::new());
}
lazy_static! {
static ref CONTEXT_MAP: Mutex<HashMap<FiberId, Context>> = Mutex::new(HashMap::with_capacity(1024));
static ref EVENTS: Mutex<Vec<Event>> = Mutex::new(Vec::new());
}
/// Swaps the currently tracked execution context with the specified context.
pub fn switch_context(old: FiberId, new: FiberId) {
with_context(|stack| {
let timestamp = platform::timestamp();
// Push an end event for each of the time slices.
for stopwatch in stack.iter().rev() {
push_event(Event {
name: stopwatch.name,
cat: String::new(),
ph: "E",
ts: timestamp,
tid: platform::thread_id(),
pid: 0,
});
}
});
let mut context_map = CONTEXT_MAP.lock().expect("Unable to acquire lock on context map");
let new_context = context_map.remove(&new).unwrap_or(Context::new());
let old_context = with_context(move |context| {
let mut new_context = new_context;
mem::swap(context, &mut new_context);
new_context
});
context_map.insert(old, old_context);
with_context(|stack| {
let timestamp = platform::timestamp();
// Push an end event for each of the time slices.
for stopwatch in stack.iter() {
push_event(Event {
name: stopwatch.name,
cat: String::new(),
ph: "B",
ts: timestamp,
tid: platform::thread_id(),
pid: 0,
});
}
});
}
/// Writes the events history to a string.
pub fn write_events_to_string() -> String {
let events = EVENTS.lock().expect("Events mutex got poisoned");
serde_json::to_string(&*events).unwrap()
}
pub struct Stopwatch {
name: &'static str,
}
impl Stopwatch {
pub fn new(name: &'static str) -> Stopwatch {
push_event(Event {
name: name,
cat: String::new(),
ph: "B",
ts: platform::timestamp(),
tid: platform::thread_id(),
pid: 0, // TODO: Do we care about tracking process ID?
});
with_context(|stack| {
stack.push(StopwatchData { name: name });
});
Stopwatch {
name: name,
}
}
pub fn with_budget(name: &'static str, _budget: Duration) -> Stopwatch {
// TODO: We should actually do something with the budget, right?
Stopwatch::new(name)
}
}
impl Drop for Stopwatch {
fn drop(&mut self) {
with_context(|stack| {
let stopwatch = stack.pop().expect("No stopwatch popped, stack is corrupted");
assert_eq!(self.name, stopwatch.name, "Stack got corrupted I guess");
});
push_event(Event {
name: self.name,
cat: String::new(),
ph: "E",
ts: platform::timestamp(),
tid: platform::thread_id(),
pid: 0, // TODO: Do we care about tracking process ID?
});
}
}
#[derive(Debug, Serialize)]
struct
|
{
/// Human-readable name for the event.
name: &'static str,
/// Event category.
cat: String,
/// Event phase (i.e. the event type).
ph: &'static str,
/// Timestamp in microseconds.
ts: i64,
/// Process ID for the event.
pid: usize,
/// Thread ID for the event.
tid: usize,
}
fn push_event(event: Event) {
let mut events = EVENTS.lock().expect("Events mutex got poisoned");
events.push(event);
}
#[derive(Debug, Clone, Copy)]
struct StopwatchData {
name: &'static str,
}
type Context = Vec<StopwatchData>;
fn with_context<F, T>(func: F) -> T
where F: FnOnce(&mut Context) -> T
{
CONTEXT.with(move |context_cell| {
let mut context = context_cell.borrow_mut();
func(&mut *context)
})
}
pub struct PrettyDuration(pub Duration);
impl Display for PrettyDuration {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {
let mins = self.0.as_secs() / 60;
let secs = self.0.as_secs() % 60;
let millis = self.0.subsec_nanos() as u64 / 1_000_000;
let micros = (self.0.subsec_nanos() / 1_000) % 1_000;
if mins > 0 {
write!(formatter, "{}m {}s {}.{}ms", mins, secs, millis, micros)
} else if secs > 0 {
write!(formatter, "{}s {}.{}ms", secs, millis, micros)
} else {
write!(formatter, "{}.{}ms", millis, micros)
}
}
}
|
Event
|
identifier_name
|
lib.rs
|
#![feature(const_fn)]
#![feature(drop_types_in_const)]
#![feature(proc_macro)]
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
extern crate bootstrap_rs as bootstrap;
extern crate fiber;
use fiber::FiberId;
use std::cell::RefCell;
use std::collections::HashMap;
use std::fmt::{self, Display, Formatter};
use std::mem;
use std::sync::Mutex;
use std::time::Duration;
#[cfg(target_os="windows")]
#[path="windows.rs"]
pub mod platform;
pub mod stats;
thread_local! {
static CONTEXT: RefCell<Context> = RefCell::new(Context::new());
}
lazy_static! {
static ref CONTEXT_MAP: Mutex<HashMap<FiberId, Context>> = Mutex::new(HashMap::with_capacity(1024));
static ref EVENTS: Mutex<Vec<Event>> = Mutex::new(Vec::new());
}
/// Swaps the currently tracked execution context with the specified context.
pub fn switch_context(old: FiberId, new: FiberId) {
with_context(|stack| {
let timestamp = platform::timestamp();
// Push an end event for each of the time slices.
for stopwatch in stack.iter().rev() {
push_event(Event {
name: stopwatch.name,
cat: String::new(),
ph: "E",
ts: timestamp,
tid: platform::thread_id(),
pid: 0,
});
}
});
let mut context_map = CONTEXT_MAP.lock().expect("Unable to acquire lock on context map");
let new_context = context_map.remove(&new).unwrap_or(Context::new());
let old_context = with_context(move |context| {
let mut new_context = new_context;
mem::swap(context, &mut new_context);
new_context
});
context_map.insert(old, old_context);
with_context(|stack| {
let timestamp = platform::timestamp();
// Push an end event for each of the time slices.
for stopwatch in stack.iter() {
push_event(Event {
name: stopwatch.name,
cat: String::new(),
ph: "B",
ts: timestamp,
tid: platform::thread_id(),
pid: 0,
});
}
});
}
/// Writes the events history to a string.
pub fn write_events_to_string() -> String {
let events = EVENTS.lock().expect("Events mutex got poisoned");
serde_json::to_string(&*events).unwrap()
}
pub struct Stopwatch {
name: &'static str,
}
impl Stopwatch {
pub fn new(name: &'static str) -> Stopwatch
|
pub fn with_budget(name: &'static str, _budget: Duration) -> Stopwatch {
// TODO: We should actually do something with the budget, right?
Stopwatch::new(name)
}
}
impl Drop for Stopwatch {
fn drop(&mut self) {
with_context(|stack| {
let stopwatch = stack.pop().expect("No stopwatch popped, stack is corrupted");
assert_eq!(self.name, stopwatch.name, "Stack got corrupted I guess");
});
push_event(Event {
name: self.name,
cat: String::new(),
ph: "E",
ts: platform::timestamp(),
tid: platform::thread_id(),
pid: 0, // TODO: Do we care about tracking process ID?
});
}
}
#[derive(Debug, Serialize)]
struct Event {
/// Human-readable name for the event.
name: &'static str,
/// Event category.
cat: String,
/// Event phase (i.e. the event type).
ph: &'static str,
/// Timestamp in microseconds.
ts: i64,
/// Process ID for the event.
pid: usize,
/// Thread ID for the event.
tid: usize,
}
fn push_event(event: Event) {
let mut events = EVENTS.lock().expect("Events mutex got poisoned");
events.push(event);
}
#[derive(Debug, Clone, Copy)]
struct StopwatchData {
name: &'static str,
}
type Context = Vec<StopwatchData>;
fn with_context<F, T>(func: F) -> T
where F: FnOnce(&mut Context) -> T
{
CONTEXT.with(move |context_cell| {
let mut context = context_cell.borrow_mut();
func(&mut *context)
})
}
pub struct PrettyDuration(pub Duration);
impl Display for PrettyDuration {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {
let mins = self.0.as_secs() / 60;
let secs = self.0.as_secs() % 60;
let millis = self.0.subsec_nanos() as u64 / 1_000_000;
let micros = (self.0.subsec_nanos() / 1_000) % 1_000;
if mins > 0 {
write!(formatter, "{}m {}s {}.{}ms", mins, secs, millis, micros)
} else if secs > 0 {
write!(formatter, "{}s {}.{}ms", secs, millis, micros)
} else {
write!(formatter, "{}.{}ms", millis, micros)
}
}
}
|
{
push_event(Event {
name: name,
cat: String::new(),
ph: "B",
ts: platform::timestamp(),
tid: platform::thread_id(),
pid: 0, // TODO: Do we care about tracking process ID?
});
with_context(|stack| {
stack.push(StopwatchData { name: name });
});
Stopwatch {
name: name,
}
}
|
identifier_body
|
lib.rs
|
#![feature(const_fn)]
#![feature(drop_types_in_const)]
#![feature(proc_macro)]
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
extern crate bootstrap_rs as bootstrap;
extern crate fiber;
use fiber::FiberId;
use std::cell::RefCell;
use std::collections::HashMap;
use std::fmt::{self, Display, Formatter};
use std::mem;
use std::sync::Mutex;
use std::time::Duration;
#[cfg(target_os="windows")]
#[path="windows.rs"]
pub mod platform;
pub mod stats;
thread_local! {
static CONTEXT: RefCell<Context> = RefCell::new(Context::new());
}
lazy_static! {
static ref CONTEXT_MAP: Mutex<HashMap<FiberId, Context>> = Mutex::new(HashMap::with_capacity(1024));
static ref EVENTS: Mutex<Vec<Event>> = Mutex::new(Vec::new());
}
/// Swaps the currently tracked execution context with the specified context.
pub fn switch_context(old: FiberId, new: FiberId) {
with_context(|stack| {
let timestamp = platform::timestamp();
// Push an end event for each of the time slices.
for stopwatch in stack.iter().rev() {
push_event(Event {
name: stopwatch.name,
cat: String::new(),
ph: "E",
ts: timestamp,
tid: platform::thread_id(),
pid: 0,
});
}
});
let mut context_map = CONTEXT_MAP.lock().expect("Unable to acquire lock on context map");
let new_context = context_map.remove(&new).unwrap_or(Context::new());
let old_context = with_context(move |context| {
let mut new_context = new_context;
mem::swap(context, &mut new_context);
new_context
});
context_map.insert(old, old_context);
with_context(|stack| {
let timestamp = platform::timestamp();
// Push an end event for each of the time slices.
for stopwatch in stack.iter() {
push_event(Event {
name: stopwatch.name,
cat: String::new(),
ph: "B",
ts: timestamp,
tid: platform::thread_id(),
pid: 0,
});
}
});
}
/// Writes the events history to a string.
pub fn write_events_to_string() -> String {
let events = EVENTS.lock().expect("Events mutex got poisoned");
serde_json::to_string(&*events).unwrap()
}
pub struct Stopwatch {
name: &'static str,
}
impl Stopwatch {
pub fn new(name: &'static str) -> Stopwatch {
push_event(Event {
name: name,
cat: String::new(),
ph: "B",
ts: platform::timestamp(),
tid: platform::thread_id(),
pid: 0, // TODO: Do we care about tracking process ID?
});
with_context(|stack| {
stack.push(StopwatchData { name: name });
});
Stopwatch {
name: name,
}
}
pub fn with_budget(name: &'static str, _budget: Duration) -> Stopwatch {
// TODO: We should actually do something with the budget, right?
Stopwatch::new(name)
}
}
impl Drop for Stopwatch {
fn drop(&mut self) {
with_context(|stack| {
let stopwatch = stack.pop().expect("No stopwatch popped, stack is corrupted");
assert_eq!(self.name, stopwatch.name, "Stack got corrupted I guess");
});
push_event(Event {
name: self.name,
cat: String::new(),
ph: "E",
ts: platform::timestamp(),
tid: platform::thread_id(),
pid: 0, // TODO: Do we care about tracking process ID?
});
}
}
#[derive(Debug, Serialize)]
struct Event {
/// Human-readable name for the event.
name: &'static str,
/// Event category.
cat: String,
/// Event phase (i.e. the event type).
ph: &'static str,
/// Timestamp in microseconds.
ts: i64,
/// Process ID for the event.
pid: usize,
/// Thread ID for the event.
tid: usize,
}
fn push_event(event: Event) {
let mut events = EVENTS.lock().expect("Events mutex got poisoned");
events.push(event);
}
#[derive(Debug, Clone, Copy)]
struct StopwatchData {
name: &'static str,
}
type Context = Vec<StopwatchData>;
fn with_context<F, T>(func: F) -> T
where F: FnOnce(&mut Context) -> T
{
CONTEXT.with(move |context_cell| {
let mut context = context_cell.borrow_mut();
func(&mut *context)
})
}
pub struct PrettyDuration(pub Duration);
impl Display for PrettyDuration {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {
let mins = self.0.as_secs() / 60;
let secs = self.0.as_secs() % 60;
let millis = self.0.subsec_nanos() as u64 / 1_000_000;
let micros = (self.0.subsec_nanos() / 1_000) % 1_000;
|
write!(formatter, "{}m {}s {}.{}ms", mins, secs, millis, micros)
} else if secs > 0 {
write!(formatter, "{}s {}.{}ms", secs, millis, micros)
} else {
write!(formatter, "{}.{}ms", millis, micros)
}
}
}
|
if mins > 0 {
|
random_line_split
|
lib.rs
|
#![feature(const_fn)]
#![feature(drop_types_in_const)]
#![feature(proc_macro)]
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
extern crate bootstrap_rs as bootstrap;
extern crate fiber;
use fiber::FiberId;
use std::cell::RefCell;
use std::collections::HashMap;
use std::fmt::{self, Display, Formatter};
use std::mem;
use std::sync::Mutex;
use std::time::Duration;
#[cfg(target_os="windows")]
#[path="windows.rs"]
pub mod platform;
pub mod stats;
thread_local! {
static CONTEXT: RefCell<Context> = RefCell::new(Context::new());
}
lazy_static! {
static ref CONTEXT_MAP: Mutex<HashMap<FiberId, Context>> = Mutex::new(HashMap::with_capacity(1024));
static ref EVENTS: Mutex<Vec<Event>> = Mutex::new(Vec::new());
}
/// Swaps the currently tracked execution context with the specified context.
pub fn switch_context(old: FiberId, new: FiberId) {
with_context(|stack| {
let timestamp = platform::timestamp();
// Push an end event for each of the time slices.
for stopwatch in stack.iter().rev() {
push_event(Event {
name: stopwatch.name,
cat: String::new(),
ph: "E",
ts: timestamp,
tid: platform::thread_id(),
pid: 0,
});
}
});
let mut context_map = CONTEXT_MAP.lock().expect("Unable to acquire lock on context map");
let new_context = context_map.remove(&new).unwrap_or(Context::new());
let old_context = with_context(move |context| {
let mut new_context = new_context;
mem::swap(context, &mut new_context);
new_context
});
context_map.insert(old, old_context);
with_context(|stack| {
let timestamp = platform::timestamp();
// Push an end event for each of the time slices.
for stopwatch in stack.iter() {
push_event(Event {
name: stopwatch.name,
cat: String::new(),
ph: "B",
ts: timestamp,
tid: platform::thread_id(),
pid: 0,
});
}
});
}
/// Writes the events history to a string.
pub fn write_events_to_string() -> String {
let events = EVENTS.lock().expect("Events mutex got poisoned");
serde_json::to_string(&*events).unwrap()
}
pub struct Stopwatch {
name: &'static str,
}
impl Stopwatch {
pub fn new(name: &'static str) -> Stopwatch {
push_event(Event {
name: name,
cat: String::new(),
ph: "B",
ts: platform::timestamp(),
tid: platform::thread_id(),
pid: 0, // TODO: Do we care about tracking process ID?
});
with_context(|stack| {
stack.push(StopwatchData { name: name });
});
Stopwatch {
name: name,
}
}
pub fn with_budget(name: &'static str, _budget: Duration) -> Stopwatch {
// TODO: We should actually do something with the budget, right?
Stopwatch::new(name)
}
}
impl Drop for Stopwatch {
fn drop(&mut self) {
with_context(|stack| {
let stopwatch = stack.pop().expect("No stopwatch popped, stack is corrupted");
assert_eq!(self.name, stopwatch.name, "Stack got corrupted I guess");
});
push_event(Event {
name: self.name,
cat: String::new(),
ph: "E",
ts: platform::timestamp(),
tid: platform::thread_id(),
pid: 0, // TODO: Do we care about tracking process ID?
});
}
}
#[derive(Debug, Serialize)]
struct Event {
/// Human-readable name for the event.
name: &'static str,
/// Event category.
cat: String,
/// Event phase (i.e. the event type).
ph: &'static str,
/// Timestamp in microseconds.
ts: i64,
/// Process ID for the event.
pid: usize,
/// Thread ID for the event.
tid: usize,
}
fn push_event(event: Event) {
let mut events = EVENTS.lock().expect("Events mutex got poisoned");
events.push(event);
}
#[derive(Debug, Clone, Copy)]
struct StopwatchData {
name: &'static str,
}
type Context = Vec<StopwatchData>;
fn with_context<F, T>(func: F) -> T
where F: FnOnce(&mut Context) -> T
{
CONTEXT.with(move |context_cell| {
let mut context = context_cell.borrow_mut();
func(&mut *context)
})
}
pub struct PrettyDuration(pub Duration);
impl Display for PrettyDuration {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {
let mins = self.0.as_secs() / 60;
let secs = self.0.as_secs() % 60;
let millis = self.0.subsec_nanos() as u64 / 1_000_000;
let micros = (self.0.subsec_nanos() / 1_000) % 1_000;
if mins > 0 {
write!(formatter, "{}m {}s {}.{}ms", mins, secs, millis, micros)
} else if secs > 0
|
else {
write!(formatter, "{}.{}ms", millis, micros)
}
}
}
|
{
write!(formatter, "{}s {}.{}ms", secs, millis, micros)
}
|
conditional_block
|
ghosthunter-nodependency.js
|
/**
* ghostHunter - 0.4.0
* Copyright (C) 2014 Jamal Neufeld ([email protected])
* MIT Licensed
* @license
*/
(function( $ ) {
/* Include the Lunr library */
var lunr=require('./lunr.min.js');
//This is the main plugin definition
$.fn.ghostHunter = function( options ) {
//Here we use jQuery's extend to set default values if they weren't set by the user
var opts = $.extend( {}, $.fn.ghostHunter.defaults, options );
if( opts.results )
{
pluginMethods.init( this , opts );
return pluginMethods;
}
};
$.fn.ghostHunter.defaults = {
resultsData : false,
onPageLoad : true,
onKeyUp : false,
result_template : "<a href='{{link}}'><p><h2>{{title}}</h2><h4>{{prettyPubDate}}</h4></p></a>",
info_template : "<p>Number of posts found: {{amount}}</p>",
displaySearchInfo : true,
zeroResultsInfo : true,
before : false,
onComplete : false,
includepages : false,
filterfields : false
};
var prettyDate = function(date) {
var d = new Date(date);
var monthNames = ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"];
return d.getDate() + ' ' + monthNames[d.getMonth()] + ' ' + d.getFullYear();
};
var pluginMethods = {
isInit : false,
init : function( target , opts ){
var that = this;
this.target = target;
this.results = opts.results;
this.blogData = {};
this.result_template = opts.result_template;
this.info_template = opts.info_template;
this.zeroResultsInfo = opts.zeroResultsInfo;
this.displaySearchInfo = opts.displaySearchInfo;
this.before = opts.before;
this.onComplete = opts.onComplete;
this.includepages = opts.includepages;
this.filterfields = opts.filterfields;
//This is where we'll build the index for later searching. It's not a big deal to build it on every load as it takes almost no space without data
this.index = lunr(function () {
this.field('title', {boost: 10})
this.field('description')
this.field('link')
this.field('plaintext', {boost: 5})
this.field('pubDate')
this.field('tag')
this.ref('id')
});
if ( opts.onPageLoad ) {
function
|
() {
that.loadAPI();
}
window.setTimeout(miam, 1);
} else {
target.focus(function(){
that.loadAPI();
});
}
target.closest("form").submit(function(e){
e.preventDefault();
that.find(target.val());
});
if( opts.onKeyUp ) {
target.keyup(function() {
that.find(target.val());
});
}
},
loadAPI : function(){
if(this.isInit) return false;
/* Here we load all of the blog posts to the index.
This function will not call on load to avoid unnecessary heavy
operations on a page if a visitor never ends up searching anything. */
var index = this.index,
blogData = this.blogData;
obj = {limit: "all", include: "tags", formats:["plaintext"]};
if ( this.includepages ){
obj.filter="(page:true,page:false)";
}
$.get(ghost.url.api('posts',obj)).done(function(data){
searchData = data.posts;
searchData.forEach(function(arrayItem){
var tag_arr = arrayItem.tags.map(function(v) {
return v.name; // `tag` object has an `name` property which is the value of tag. If you also want other info, check API and get that property
})
if(arrayItem.meta_description == null) { arrayItem.meta_description = '' };
var category = tag_arr.join(", ");
if (category.length < 1){
category = "undefined";
}
var parsedData = {
id : String(arrayItem.id),
title : String(arrayItem.title),
description : String(arrayItem.meta_description),
plaintext : String(arrayItem.plaintext),
pubDate : String(arrayItem.created_at),
tag : category,
featureImage : String(arrayItem.feature_image),
link : String(arrayItem.url)
}
parsedData.prettyPubDate = prettyDate(parsedData.pubDate);
var tempdate = prettyDate(parsedData.pubDate);
index.add(parsedData)
blogData[arrayItem.id] = {
title: arrayItem.title,
description: arrayItem.meta_description,
pubDate: tempdate,
featureImage: arrayItem.feature_image,
link: arrayItem.url
};
});
});
this.isInit = true;
},
find : function(value){
var searchResult = this.index.search(value);
var results = $(this.results);
var resultsData = [];
results.empty();
if(this.before) {
this.before();
};
if(this.zeroResultsInfo || searchResult.length > 0)
{
if(this.displaySearchInfo) results.append(this.format(this.info_template,{"amount":searchResult.length}));
}
for (var i = 0; i < searchResult.length; i++)
{
var lunrref = searchResult[i].ref;
var postData = this.blogData[lunrref];
results.append(this.format(this.result_template,postData));
resultsData.push(postData);
}
if(this.onComplete) {
this.onComplete(resultsData);
};
},
clear : function(){
$(this.results).empty();
this.target.val("");
},
format : function (t, d) {
return t.replace(/{{([^{}]*)}}/g, function (a, b) {
var r = d[b];
return typeof r === 'string' || typeof r === 'number' ? r : a;
});
}
}
})( jQuery );
|
miam
|
identifier_name
|
ghosthunter-nodependency.js
|
/**
* ghostHunter - 0.4.0
* Copyright (C) 2014 Jamal Neufeld ([email protected])
* MIT Licensed
* @license
*/
(function( $ ) {
/* Include the Lunr library */
var lunr=require('./lunr.min.js');
//This is the main plugin definition
$.fn.ghostHunter = function( options ) {
//Here we use jQuery's extend to set default values if they weren't set by the user
var opts = $.extend( {}, $.fn.ghostHunter.defaults, options );
if( opts.results )
{
pluginMethods.init( this , opts );
return pluginMethods;
}
};
$.fn.ghostHunter.defaults = {
resultsData : false,
onPageLoad : true,
onKeyUp : false,
result_template : "<a href='{{link}}'><p><h2>{{title}}</h2><h4>{{prettyPubDate}}</h4></p></a>",
info_template : "<p>Number of posts found: {{amount}}</p>",
displaySearchInfo : true,
zeroResultsInfo : true,
before : false,
onComplete : false,
includepages : false,
filterfields : false
};
var prettyDate = function(date) {
var d = new Date(date);
var monthNames = ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"];
return d.getDate() + ' ' + monthNames[d.getMonth()] + ' ' + d.getFullYear();
};
var pluginMethods = {
isInit : false,
init : function( target , opts ){
var that = this;
this.target = target;
this.results = opts.results;
this.blogData = {};
this.result_template = opts.result_template;
this.info_template = opts.info_template;
this.zeroResultsInfo = opts.zeroResultsInfo;
this.displaySearchInfo = opts.displaySearchInfo;
this.before = opts.before;
this.onComplete = opts.onComplete;
this.includepages = opts.includepages;
this.filterfields = opts.filterfields;
//This is where we'll build the index for later searching. It's not a big deal to build it on every load as it takes almost no space without data
this.index = lunr(function () {
this.field('title', {boost: 10})
this.field('description')
this.field('link')
this.field('plaintext', {boost: 5})
this.field('pubDate')
this.field('tag')
this.ref('id')
});
if ( opts.onPageLoad ) {
function miam () {
that.loadAPI();
}
window.setTimeout(miam, 1);
} else {
target.focus(function(){
that.loadAPI();
});
}
target.closest("form").submit(function(e){
e.preventDefault();
that.find(target.val());
});
if( opts.onKeyUp ) {
target.keyup(function() {
that.find(target.val());
});
}
},
loadAPI : function(){
if(this.isInit) return false;
/* Here we load all of the blog posts to the index.
This function will not call on load to avoid unnecessary heavy
operations on a page if a visitor never ends up searching anything. */
var index = this.index,
blogData = this.blogData;
obj = {limit: "all", include: "tags", formats:["plaintext"]};
if ( this.includepages ){
obj.filter="(page:true,page:false)";
}
$.get(ghost.url.api('posts',obj)).done(function(data){
searchData = data.posts;
searchData.forEach(function(arrayItem){
var tag_arr = arrayItem.tags.map(function(v) {
return v.name; // `tag` object has an `name` property which is the value of tag. If you also want other info, check API and get that property
})
if(arrayItem.meta_description == null) { arrayItem.meta_description = '' };
var category = tag_arr.join(", ");
if (category.length < 1){
category = "undefined";
}
var parsedData = {
id : String(arrayItem.id),
title : String(arrayItem.title),
description : String(arrayItem.meta_description),
plaintext : String(arrayItem.plaintext),
pubDate : String(arrayItem.created_at),
tag : category,
featureImage : String(arrayItem.feature_image),
link : String(arrayItem.url)
}
parsedData.prettyPubDate = prettyDate(parsedData.pubDate);
var tempdate = prettyDate(parsedData.pubDate);
index.add(parsedData)
blogData[arrayItem.id] = {
title: arrayItem.title,
description: arrayItem.meta_description,
pubDate: tempdate,
featureImage: arrayItem.feature_image,
link: arrayItem.url
};
});
});
this.isInit = true;
},
find : function(value){
var searchResult = this.index.search(value);
var results = $(this.results);
var resultsData = [];
results.empty();
if(this.before) {
this.before();
};
if(this.zeroResultsInfo || searchResult.length > 0)
{
if(this.displaySearchInfo) results.append(this.format(this.info_template,{"amount":searchResult.length}));
}
for (var i = 0; i < searchResult.length; i++)
{
var lunrref = searchResult[i].ref;
|
}
if(this.onComplete) {
this.onComplete(resultsData);
};
},
clear : function(){
$(this.results).empty();
this.target.val("");
},
format : function (t, d) {
return t.replace(/{{([^{}]*)}}/g, function (a, b) {
var r = d[b];
return typeof r === 'string' || typeof r === 'number' ? r : a;
});
}
}
})( jQuery );
|
var postData = this.blogData[lunrref];
results.append(this.format(this.result_template,postData));
resultsData.push(postData);
|
random_line_split
|
ghosthunter-nodependency.js
|
/**
* ghostHunter - 0.4.0
* Copyright (C) 2014 Jamal Neufeld ([email protected])
* MIT Licensed
* @license
*/
(function( $ ) {
/* Include the Lunr library */
var lunr=require('./lunr.min.js');
//This is the main plugin definition
$.fn.ghostHunter = function( options ) {
//Here we use jQuery's extend to set default values if they weren't set by the user
var opts = $.extend( {}, $.fn.ghostHunter.defaults, options );
if( opts.results )
{
pluginMethods.init( this , opts );
return pluginMethods;
}
};
$.fn.ghostHunter.defaults = {
resultsData : false,
onPageLoad : true,
onKeyUp : false,
result_template : "<a href='{{link}}'><p><h2>{{title}}</h2><h4>{{prettyPubDate}}</h4></p></a>",
info_template : "<p>Number of posts found: {{amount}}</p>",
displaySearchInfo : true,
zeroResultsInfo : true,
before : false,
onComplete : false,
includepages : false,
filterfields : false
};
var prettyDate = function(date) {
var d = new Date(date);
var monthNames = ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"];
return d.getDate() + ' ' + monthNames[d.getMonth()] + ' ' + d.getFullYear();
};
var pluginMethods = {
isInit : false,
init : function( target , opts ){
var that = this;
this.target = target;
this.results = opts.results;
this.blogData = {};
this.result_template = opts.result_template;
this.info_template = opts.info_template;
this.zeroResultsInfo = opts.zeroResultsInfo;
this.displaySearchInfo = opts.displaySearchInfo;
this.before = opts.before;
this.onComplete = opts.onComplete;
this.includepages = opts.includepages;
this.filterfields = opts.filterfields;
//This is where we'll build the index for later searching. It's not a big deal to build it on every load as it takes almost no space without data
this.index = lunr(function () {
this.field('title', {boost: 10})
this.field('description')
this.field('link')
this.field('plaintext', {boost: 5})
this.field('pubDate')
this.field('tag')
this.ref('id')
});
if ( opts.onPageLoad ) {
function miam () {
that.loadAPI();
}
window.setTimeout(miam, 1);
} else {
target.focus(function(){
that.loadAPI();
});
}
target.closest("form").submit(function(e){
e.preventDefault();
that.find(target.val());
});
if( opts.onKeyUp ) {
target.keyup(function() {
that.find(target.val());
});
}
},
loadAPI : function(){
if(this.isInit) return false;
/* Here we load all of the blog posts to the index.
This function will not call on load to avoid unnecessary heavy
operations on a page if a visitor never ends up searching anything. */
var index = this.index,
blogData = this.blogData;
obj = {limit: "all", include: "tags", formats:["plaintext"]};
if ( this.includepages ){
obj.filter="(page:true,page:false)";
}
$.get(ghost.url.api('posts',obj)).done(function(data){
searchData = data.posts;
searchData.forEach(function(arrayItem){
var tag_arr = arrayItem.tags.map(function(v) {
return v.name; // `tag` object has an `name` property which is the value of tag. If you also want other info, check API and get that property
})
if(arrayItem.meta_description == null) { arrayItem.meta_description = '' };
var category = tag_arr.join(", ");
if (category.length < 1){
category = "undefined";
}
var parsedData = {
id : String(arrayItem.id),
title : String(arrayItem.title),
description : String(arrayItem.meta_description),
plaintext : String(arrayItem.plaintext),
pubDate : String(arrayItem.created_at),
tag : category,
featureImage : String(arrayItem.feature_image),
link : String(arrayItem.url)
}
parsedData.prettyPubDate = prettyDate(parsedData.pubDate);
var tempdate = prettyDate(parsedData.pubDate);
index.add(parsedData)
blogData[arrayItem.id] = {
title: arrayItem.title,
description: arrayItem.meta_description,
pubDate: tempdate,
featureImage: arrayItem.feature_image,
link: arrayItem.url
};
});
});
this.isInit = true;
},
find : function(value){
var searchResult = this.index.search(value);
var results = $(this.results);
var resultsData = [];
results.empty();
if(this.before)
|
;
if(this.zeroResultsInfo || searchResult.length > 0)
{
if(this.displaySearchInfo) results.append(this.format(this.info_template,{"amount":searchResult.length}));
}
for (var i = 0; i < searchResult.length; i++)
{
var lunrref = searchResult[i].ref;
var postData = this.blogData[lunrref];
results.append(this.format(this.result_template,postData));
resultsData.push(postData);
}
if(this.onComplete) {
this.onComplete(resultsData);
};
},
clear : function(){
$(this.results).empty();
this.target.val("");
},
format : function (t, d) {
return t.replace(/{{([^{}]*)}}/g, function (a, b) {
var r = d[b];
return typeof r === 'string' || typeof r === 'number' ? r : a;
});
}
}
})( jQuery );
|
{
this.before();
}
|
conditional_block
|
ghosthunter-nodependency.js
|
/**
* ghostHunter - 0.4.0
* Copyright (C) 2014 Jamal Neufeld ([email protected])
* MIT Licensed
* @license
*/
(function( $ ) {
/* Include the Lunr library */
var lunr=require('./lunr.min.js');
//This is the main plugin definition
$.fn.ghostHunter = function( options ) {
//Here we use jQuery's extend to set default values if they weren't set by the user
var opts = $.extend( {}, $.fn.ghostHunter.defaults, options );
if( opts.results )
{
pluginMethods.init( this , opts );
return pluginMethods;
}
};
$.fn.ghostHunter.defaults = {
resultsData : false,
onPageLoad : true,
onKeyUp : false,
result_template : "<a href='{{link}}'><p><h2>{{title}}</h2><h4>{{prettyPubDate}}</h4></p></a>",
info_template : "<p>Number of posts found: {{amount}}</p>",
displaySearchInfo : true,
zeroResultsInfo : true,
before : false,
onComplete : false,
includepages : false,
filterfields : false
};
var prettyDate = function(date) {
var d = new Date(date);
var monthNames = ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"];
return d.getDate() + ' ' + monthNames[d.getMonth()] + ' ' + d.getFullYear();
};
var pluginMethods = {
isInit : false,
init : function( target , opts ){
var that = this;
this.target = target;
this.results = opts.results;
this.blogData = {};
this.result_template = opts.result_template;
this.info_template = opts.info_template;
this.zeroResultsInfo = opts.zeroResultsInfo;
this.displaySearchInfo = opts.displaySearchInfo;
this.before = opts.before;
this.onComplete = opts.onComplete;
this.includepages = opts.includepages;
this.filterfields = opts.filterfields;
//This is where we'll build the index for later searching. It's not a big deal to build it on every load as it takes almost no space without data
this.index = lunr(function () {
this.field('title', {boost: 10})
this.field('description')
this.field('link')
this.field('plaintext', {boost: 5})
this.field('pubDate')
this.field('tag')
this.ref('id')
});
if ( opts.onPageLoad ) {
function miam ()
|
window.setTimeout(miam, 1);
} else {
target.focus(function(){
that.loadAPI();
});
}
target.closest("form").submit(function(e){
e.preventDefault();
that.find(target.val());
});
if( opts.onKeyUp ) {
target.keyup(function() {
that.find(target.val());
});
}
},
loadAPI : function(){
if(this.isInit) return false;
/* Here we load all of the blog posts to the index.
This function will not call on load to avoid unnecessary heavy
operations on a page if a visitor never ends up searching anything. */
var index = this.index,
blogData = this.blogData;
obj = {limit: "all", include: "tags", formats:["plaintext"]};
if ( this.includepages ){
obj.filter="(page:true,page:false)";
}
$.get(ghost.url.api('posts',obj)).done(function(data){
searchData = data.posts;
searchData.forEach(function(arrayItem){
var tag_arr = arrayItem.tags.map(function(v) {
return v.name; // `tag` object has an `name` property which is the value of tag. If you also want other info, check API and get that property
})
if(arrayItem.meta_description == null) { arrayItem.meta_description = '' };
var category = tag_arr.join(", ");
if (category.length < 1){
category = "undefined";
}
var parsedData = {
id : String(arrayItem.id),
title : String(arrayItem.title),
description : String(arrayItem.meta_description),
plaintext : String(arrayItem.plaintext),
pubDate : String(arrayItem.created_at),
tag : category,
featureImage : String(arrayItem.feature_image),
link : String(arrayItem.url)
}
parsedData.prettyPubDate = prettyDate(parsedData.pubDate);
var tempdate = prettyDate(parsedData.pubDate);
index.add(parsedData)
blogData[arrayItem.id] = {
title: arrayItem.title,
description: arrayItem.meta_description,
pubDate: tempdate,
featureImage: arrayItem.feature_image,
link: arrayItem.url
};
});
});
this.isInit = true;
},
find : function(value){
var searchResult = this.index.search(value);
var results = $(this.results);
var resultsData = [];
results.empty();
if(this.before) {
this.before();
};
if(this.zeroResultsInfo || searchResult.length > 0)
{
if(this.displaySearchInfo) results.append(this.format(this.info_template,{"amount":searchResult.length}));
}
for (var i = 0; i < searchResult.length; i++)
{
var lunrref = searchResult[i].ref;
var postData = this.blogData[lunrref];
results.append(this.format(this.result_template,postData));
resultsData.push(postData);
}
if(this.onComplete) {
this.onComplete(resultsData);
};
},
clear : function(){
$(this.results).empty();
this.target.val("");
},
format : function (t, d) {
return t.replace(/{{([^{}]*)}}/g, function (a, b) {
var r = d[b];
return typeof r === 'string' || typeof r === 'number' ? r : a;
});
}
}
})( jQuery );
|
{
that.loadAPI();
}
|
identifier_body
|
main.rs
|
// Copyright 2015 The Gfx-rs Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#[macro_use]
extern crate gfx;
extern crate gfx_window_glutin;
extern crate glutin;
use gfx::traits::FactoryExt;
use gfx::Device;
|
vertex Vertex {
pos: [f32; 2] = "a_Pos",
color: [f32; 3] = "a_Color",
}
pipeline pipe {
vbuf: gfx::VertexBuffer<Vertex> = (),
out: gfx::RenderTarget<ColorFormat> = "Target0",
}
}
const TRIANGLE: [Vertex; 3] = [
Vertex { pos: [ -0.5, -0.5 ], color: [1.0, 0.0, 0.0] },
Vertex { pos: [ 0.5, -0.5 ], color: [0.0, 1.0, 0.0] },
Vertex { pos: [ 0.0, 0.5 ], color: [0.0, 0.0, 1.0] }
];
const CLEAR_COLOR: [f32; 4] = [0.1, 0.2, 0.3, 1.0];
pub fn main() {
let builder = glutin::WindowBuilder::new()
.with_title("Triangle example".to_string())
.with_dimensions(1024, 768)
.with_vsync();
let (window, mut device, mut factory, main_color, _main_depth) =
gfx_window_glutin::init::<ColorFormat, DepthFormat>(builder);
let mut encoder: gfx::Encoder<_, _> = factory.create_command_buffer().into();
let pso = factory.create_pipeline_simple(
include_bytes!("shader/triangle_150.glslv"),
include_bytes!("shader/triangle_150.glslf"),
pipe::new()
).unwrap();
let (vertex_buffer, slice) = factory.create_vertex_buffer_with_slice(&TRIANGLE, ());
let data = pipe::Data {
vbuf: vertex_buffer,
out: main_color
};
'main: loop {
// loop over events
for event in window.poll_events() {
match event {
glutin::Event::KeyboardInput(_, _, Some(glutin::VirtualKeyCode::Escape)) |
glutin::Event::Closed => break 'main,
_ => {},
}
}
// draw a frame
encoder.clear(&data.out, CLEAR_COLOR);
encoder.draw(&slice, &pso, &data);
encoder.flush(&mut device);
window.swap_buffers().unwrap();
device.cleanup();
}
}
|
pub type ColorFormat = gfx::format::Rgba8;
pub type DepthFormat = gfx::format::DepthStencil;
gfx_defines!{
|
random_line_split
|
main.rs
|
// Copyright 2015 The Gfx-rs Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#[macro_use]
extern crate gfx;
extern crate gfx_window_glutin;
extern crate glutin;
use gfx::traits::FactoryExt;
use gfx::Device;
pub type ColorFormat = gfx::format::Rgba8;
pub type DepthFormat = gfx::format::DepthStencil;
gfx_defines!{
vertex Vertex {
pos: [f32; 2] = "a_Pos",
color: [f32; 3] = "a_Color",
}
pipeline pipe {
vbuf: gfx::VertexBuffer<Vertex> = (),
out: gfx::RenderTarget<ColorFormat> = "Target0",
}
}
const TRIANGLE: [Vertex; 3] = [
Vertex { pos: [ -0.5, -0.5 ], color: [1.0, 0.0, 0.0] },
Vertex { pos: [ 0.5, -0.5 ], color: [0.0, 1.0, 0.0] },
Vertex { pos: [ 0.0, 0.5 ], color: [0.0, 0.0, 1.0] }
];
const CLEAR_COLOR: [f32; 4] = [0.1, 0.2, 0.3, 1.0];
pub fn
|
() {
let builder = glutin::WindowBuilder::new()
.with_title("Triangle example".to_string())
.with_dimensions(1024, 768)
.with_vsync();
let (window, mut device, mut factory, main_color, _main_depth) =
gfx_window_glutin::init::<ColorFormat, DepthFormat>(builder);
let mut encoder: gfx::Encoder<_, _> = factory.create_command_buffer().into();
let pso = factory.create_pipeline_simple(
include_bytes!("shader/triangle_150.glslv"),
include_bytes!("shader/triangle_150.glslf"),
pipe::new()
).unwrap();
let (vertex_buffer, slice) = factory.create_vertex_buffer_with_slice(&TRIANGLE, ());
let data = pipe::Data {
vbuf: vertex_buffer,
out: main_color
};
'main: loop {
// loop over events
for event in window.poll_events() {
match event {
glutin::Event::KeyboardInput(_, _, Some(glutin::VirtualKeyCode::Escape)) |
glutin::Event::Closed => break 'main,
_ => {},
}
}
// draw a frame
encoder.clear(&data.out, CLEAR_COLOR);
encoder.draw(&slice, &pso, &data);
encoder.flush(&mut device);
window.swap_buffers().unwrap();
device.cleanup();
}
}
|
main
|
identifier_name
|
main.rs
|
// Copyright 2015 The Gfx-rs Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#[macro_use]
extern crate gfx;
extern crate gfx_window_glutin;
extern crate glutin;
use gfx::traits::FactoryExt;
use gfx::Device;
pub type ColorFormat = gfx::format::Rgba8;
pub type DepthFormat = gfx::format::DepthStencil;
gfx_defines!{
vertex Vertex {
pos: [f32; 2] = "a_Pos",
color: [f32; 3] = "a_Color",
}
pipeline pipe {
vbuf: gfx::VertexBuffer<Vertex> = (),
out: gfx::RenderTarget<ColorFormat> = "Target0",
}
}
const TRIANGLE: [Vertex; 3] = [
Vertex { pos: [ -0.5, -0.5 ], color: [1.0, 0.0, 0.0] },
Vertex { pos: [ 0.5, -0.5 ], color: [0.0, 1.0, 0.0] },
Vertex { pos: [ 0.0, 0.5 ], color: [0.0, 0.0, 1.0] }
];
const CLEAR_COLOR: [f32; 4] = [0.1, 0.2, 0.3, 1.0];
pub fn main()
|
{
let builder = glutin::WindowBuilder::new()
.with_title("Triangle example".to_string())
.with_dimensions(1024, 768)
.with_vsync();
let (window, mut device, mut factory, main_color, _main_depth) =
gfx_window_glutin::init::<ColorFormat, DepthFormat>(builder);
let mut encoder: gfx::Encoder<_, _> = factory.create_command_buffer().into();
let pso = factory.create_pipeline_simple(
include_bytes!("shader/triangle_150.glslv"),
include_bytes!("shader/triangle_150.glslf"),
pipe::new()
).unwrap();
let (vertex_buffer, slice) = factory.create_vertex_buffer_with_slice(&TRIANGLE, ());
let data = pipe::Data {
vbuf: vertex_buffer,
out: main_color
};
'main: loop {
// loop over events
for event in window.poll_events() {
match event {
glutin::Event::KeyboardInput(_, _, Some(glutin::VirtualKeyCode::Escape)) |
glutin::Event::Closed => break 'main,
_ => {},
}
}
// draw a frame
encoder.clear(&data.out, CLEAR_COLOR);
encoder.draw(&slice, &pso, &data);
encoder.flush(&mut device);
window.swap_buffers().unwrap();
device.cleanup();
}
}
|
identifier_body
|
|
gamma.rs
|
use err::ErrMsg;
use RGSLRng;
#[derive(Debug, Clone)]
pub struct Gamma {
a: f64,
b: f64,
}
impl Gamma {
pub fn new(a: f64, b: f64) -> Result<Gamma, ()> {
if a <= 0.0 || b <= 0.0 {
return Err(());
}
Ok(Gamma { a, b })
}
#[inline]
pub fn pdf(&self, x: f64) -> f64 {
use rgsl::randist::gamma::gamma_pdf;
gamma_pdf(x, self.a, self.b)
}
}
use super::{Sample, CDF};
impl Sample for Gamma {
#[inline]
fn sample(&self, rng: &mut RGSLRng) -> f64 {
use rgsl::randist::gamma::gamma;
gamma(rng.get_gen(), self.a, self.b)
}
}
impl CDF for Gamma {
#[inline]
fn cdf(&self, x: f64) -> f64
|
#[inline]
fn inverse_cdf(&self, x: f64) -> f64 {
use rgsl::randist::gamma::gamma_Pinv;
if x.is_sign_negative() {
panic!(ErrMsg::PositiveReal.panic_msg_with_arg(&self));
}
gamma_Pinv(x, self.a, self.b)
}
}
|
{
use rgsl::randist::gamma::gamma_P;
if x.is_sign_negative() {
panic!(ErrMsg::PositiveReal.panic_msg_with_arg(&self));
}
gamma_P(x, self.a, self.b)
}
|
identifier_body
|
gamma.rs
|
use err::ErrMsg;
use RGSLRng;
#[derive(Debug, Clone)]
pub struct
|
{
a: f64,
b: f64,
}
impl Gamma {
pub fn new(a: f64, b: f64) -> Result<Gamma, ()> {
if a <= 0.0 || b <= 0.0 {
return Err(());
}
Ok(Gamma { a, b })
}
#[inline]
pub fn pdf(&self, x: f64) -> f64 {
use rgsl::randist::gamma::gamma_pdf;
gamma_pdf(x, self.a, self.b)
}
}
use super::{Sample, CDF};
impl Sample for Gamma {
#[inline]
fn sample(&self, rng: &mut RGSLRng) -> f64 {
use rgsl::randist::gamma::gamma;
gamma(rng.get_gen(), self.a, self.b)
}
}
impl CDF for Gamma {
#[inline]
fn cdf(&self, x: f64) -> f64 {
use rgsl::randist::gamma::gamma_P;
if x.is_sign_negative() {
panic!(ErrMsg::PositiveReal.panic_msg_with_arg(&self));
}
gamma_P(x, self.a, self.b)
}
#[inline]
fn inverse_cdf(&self, x: f64) -> f64 {
use rgsl::randist::gamma::gamma_Pinv;
if x.is_sign_negative() {
panic!(ErrMsg::PositiveReal.panic_msg_with_arg(&self));
}
gamma_Pinv(x, self.a, self.b)
}
}
|
Gamma
|
identifier_name
|
gamma.rs
|
use err::ErrMsg;
use RGSLRng;
#[derive(Debug, Clone)]
pub struct Gamma {
a: f64,
b: f64,
}
impl Gamma {
pub fn new(a: f64, b: f64) -> Result<Gamma, ()> {
if a <= 0.0 || b <= 0.0 {
return Err(());
}
Ok(Gamma { a, b })
}
#[inline]
pub fn pdf(&self, x: f64) -> f64 {
use rgsl::randist::gamma::gamma_pdf;
gamma_pdf(x, self.a, self.b)
|
use super::{Sample, CDF};
impl Sample for Gamma {
#[inline]
fn sample(&self, rng: &mut RGSLRng) -> f64 {
use rgsl::randist::gamma::gamma;
gamma(rng.get_gen(), self.a, self.b)
}
}
impl CDF for Gamma {
#[inline]
fn cdf(&self, x: f64) -> f64 {
use rgsl::randist::gamma::gamma_P;
if x.is_sign_negative() {
panic!(ErrMsg::PositiveReal.panic_msg_with_arg(&self));
}
gamma_P(x, self.a, self.b)
}
#[inline]
fn inverse_cdf(&self, x: f64) -> f64 {
use rgsl::randist::gamma::gamma_Pinv;
if x.is_sign_negative() {
panic!(ErrMsg::PositiveReal.panic_msg_with_arg(&self));
}
gamma_Pinv(x, self.a, self.b)
}
}
|
}
}
|
random_line_split
|
gamma.rs
|
use err::ErrMsg;
use RGSLRng;
#[derive(Debug, Clone)]
pub struct Gamma {
a: f64,
b: f64,
}
impl Gamma {
pub fn new(a: f64, b: f64) -> Result<Gamma, ()> {
if a <= 0.0 || b <= 0.0 {
return Err(());
}
Ok(Gamma { a, b })
}
#[inline]
pub fn pdf(&self, x: f64) -> f64 {
use rgsl::randist::gamma::gamma_pdf;
gamma_pdf(x, self.a, self.b)
}
}
use super::{Sample, CDF};
impl Sample for Gamma {
#[inline]
fn sample(&self, rng: &mut RGSLRng) -> f64 {
use rgsl::randist::gamma::gamma;
gamma(rng.get_gen(), self.a, self.b)
}
}
impl CDF for Gamma {
#[inline]
fn cdf(&self, x: f64) -> f64 {
use rgsl::randist::gamma::gamma_P;
if x.is_sign_negative() {
panic!(ErrMsg::PositiveReal.panic_msg_with_arg(&self));
}
gamma_P(x, self.a, self.b)
}
#[inline]
fn inverse_cdf(&self, x: f64) -> f64 {
use rgsl::randist::gamma::gamma_Pinv;
if x.is_sign_negative()
|
gamma_Pinv(x, self.a, self.b)
}
}
|
{
panic!(ErrMsg::PositiveReal.panic_msg_with_arg(&self));
}
|
conditional_block
|
remove.ts
|
import * as vscode from 'vscode';
import * as fs from "fs";
import * as path from "path";
import Helpers from "../shared/helpers";
var projectObj: Object;
var jsonFilePath: string;
// Shows installed packages and gives option to remove them
export function removePackage() {
// Find the path to project.json
jsonFilePath = path.join(vscode.workspace.rootPath, "/project.json");
// Check if the file exists first
fs.exists(jsonFilePath, exists => {
if (exists) {
readFile();
} else {
Helpers.throwError("This project does not contain project.json file.");
}
});
}
function readFile() {
fs.readFile(jsonFilePath, { encoding: "utf8" }, (err, data) => {
if (err) {
console.error(err);
Helpers.throwError("Could not read project.json, please try again.");
} else {
// Store content of project.json file
let projectJsonContent: string = data;
try {
// Parse the json
projectObj = JSON.parse(projectJsonContent);
showDependencies();
} catch (error) {
console.error(error);
Helpers.throwError("Could not load dependencies, make sure the project.json file is valid.");
}
}
})
}
// Shows all the dependencies
function showDependencies() {
var dependenciesList = [];
// Build array list of dependencies
for (var name in projectObj["dependencies"]) {
dependenciesList.push(name);
}
// Make sure dependencies are not empty
if (dependenciesList.length < 1)
|
else {
vscode.window.showQuickPick(dependenciesList)
.then(item => {
if(item){
// Delete the selected item
deleteItem(item);
}
});
}
}
// Delete the dependency object
function deleteItem(item: string) {
delete (projectObj["dependencies"][item]);
writeFile(item);
}
// Create a new JSON string and write to file
function writeFile(deletedItem: string) {
try {
let outputFileString = JSON.stringify(projectObj, null, 2);
fs.writeFile(jsonFilePath, outputFileString, err => {
if (err) {
console.error(err);
Helpers.throwError("Could not write project.json file, make sure it is writeable.");
} else {
Helpers.showMessage("Deleted package " + deletedItem + ", make sure to run 'dotnet restore' now.");
}
})
} catch (error) {
console.error(error);
Helpers.throwError("Could not parse the new project.json structure.");
}
}
|
{
Helpers.throwError("You do not have any dependencies on this project.");
}
|
conditional_block
|
remove.ts
|
import * as vscode from 'vscode';
import * as fs from "fs";
import * as path from "path";
import Helpers from "../shared/helpers";
var projectObj: Object;
var jsonFilePath: string;
// Shows installed packages and gives option to remove them
export function removePackage() {
// Find the path to project.json
jsonFilePath = path.join(vscode.workspace.rootPath, "/project.json");
// Check if the file exists first
fs.exists(jsonFilePath, exists => {
if (exists) {
readFile();
} else {
Helpers.throwError("This project does not contain project.json file.");
}
});
}
function
|
() {
fs.readFile(jsonFilePath, { encoding: "utf8" }, (err, data) => {
if (err) {
console.error(err);
Helpers.throwError("Could not read project.json, please try again.");
} else {
// Store content of project.json file
let projectJsonContent: string = data;
try {
// Parse the json
projectObj = JSON.parse(projectJsonContent);
showDependencies();
} catch (error) {
console.error(error);
Helpers.throwError("Could not load dependencies, make sure the project.json file is valid.");
}
}
})
}
// Shows all the dependencies
function showDependencies() {
var dependenciesList = [];
// Build array list of dependencies
for (var name in projectObj["dependencies"]) {
dependenciesList.push(name);
}
// Make sure dependencies are not empty
if (dependenciesList.length < 1) {
Helpers.throwError("You do not have any dependencies on this project.");
} else {
vscode.window.showQuickPick(dependenciesList)
.then(item => {
if(item){
// Delete the selected item
deleteItem(item);
}
});
}
}
// Delete the dependency object
function deleteItem(item: string) {
delete (projectObj["dependencies"][item]);
writeFile(item);
}
// Create a new JSON string and write to file
function writeFile(deletedItem: string) {
try {
let outputFileString = JSON.stringify(projectObj, null, 2);
fs.writeFile(jsonFilePath, outputFileString, err => {
if (err) {
console.error(err);
Helpers.throwError("Could not write project.json file, make sure it is writeable.");
} else {
Helpers.showMessage("Deleted package " + deletedItem + ", make sure to run 'dotnet restore' now.");
}
})
} catch (error) {
console.error(error);
Helpers.throwError("Could not parse the new project.json structure.");
}
}
|
readFile
|
identifier_name
|
remove.ts
|
import * as vscode from 'vscode';
import * as fs from "fs";
import * as path from "path";
import Helpers from "../shared/helpers";
var projectObj: Object;
var jsonFilePath: string;
// Shows installed packages and gives option to remove them
export function removePackage() {
// Find the path to project.json
jsonFilePath = path.join(vscode.workspace.rootPath, "/project.json");
// Check if the file exists first
fs.exists(jsonFilePath, exists => {
if (exists) {
readFile();
} else {
|
}
function readFile() {
fs.readFile(jsonFilePath, { encoding: "utf8" }, (err, data) => {
if (err) {
console.error(err);
Helpers.throwError("Could not read project.json, please try again.");
} else {
// Store content of project.json file
let projectJsonContent: string = data;
try {
// Parse the json
projectObj = JSON.parse(projectJsonContent);
showDependencies();
} catch (error) {
console.error(error);
Helpers.throwError("Could not load dependencies, make sure the project.json file is valid.");
}
}
})
}
// Shows all the dependencies
function showDependencies() {
var dependenciesList = [];
// Build array list of dependencies
for (var name in projectObj["dependencies"]) {
dependenciesList.push(name);
}
// Make sure dependencies are not empty
if (dependenciesList.length < 1) {
Helpers.throwError("You do not have any dependencies on this project.");
} else {
vscode.window.showQuickPick(dependenciesList)
.then(item => {
if(item){
// Delete the selected item
deleteItem(item);
}
});
}
}
// Delete the dependency object
function deleteItem(item: string) {
delete (projectObj["dependencies"][item]);
writeFile(item);
}
// Create a new JSON string and write to file
function writeFile(deletedItem: string) {
try {
let outputFileString = JSON.stringify(projectObj, null, 2);
fs.writeFile(jsonFilePath, outputFileString, err => {
if (err) {
console.error(err);
Helpers.throwError("Could not write project.json file, make sure it is writeable.");
} else {
Helpers.showMessage("Deleted package " + deletedItem + ", make sure to run 'dotnet restore' now.");
}
})
} catch (error) {
console.error(error);
Helpers.throwError("Could not parse the new project.json structure.");
}
}
|
Helpers.throwError("This project does not contain project.json file.");
}
});
|
random_line_split
|
remove.ts
|
import * as vscode from 'vscode';
import * as fs from "fs";
import * as path from "path";
import Helpers from "../shared/helpers";
var projectObj: Object;
var jsonFilePath: string;
// Shows installed packages and gives option to remove them
export function removePackage()
|
function readFile() {
fs.readFile(jsonFilePath, { encoding: "utf8" }, (err, data) => {
if (err) {
console.error(err);
Helpers.throwError("Could not read project.json, please try again.");
} else {
// Store content of project.json file
let projectJsonContent: string = data;
try {
// Parse the json
projectObj = JSON.parse(projectJsonContent);
showDependencies();
} catch (error) {
console.error(error);
Helpers.throwError("Could not load dependencies, make sure the project.json file is valid.");
}
}
})
}
// Shows all the dependencies
function showDependencies() {
var dependenciesList = [];
// Build array list of dependencies
for (var name in projectObj["dependencies"]) {
dependenciesList.push(name);
}
// Make sure dependencies are not empty
if (dependenciesList.length < 1) {
Helpers.throwError("You do not have any dependencies on this project.");
} else {
vscode.window.showQuickPick(dependenciesList)
.then(item => {
if(item){
// Delete the selected item
deleteItem(item);
}
});
}
}
// Delete the dependency object
function deleteItem(item: string) {
delete (projectObj["dependencies"][item]);
writeFile(item);
}
// Create a new JSON string and write to file
function writeFile(deletedItem: string) {
try {
let outputFileString = JSON.stringify(projectObj, null, 2);
fs.writeFile(jsonFilePath, outputFileString, err => {
if (err) {
console.error(err);
Helpers.throwError("Could not write project.json file, make sure it is writeable.");
} else {
Helpers.showMessage("Deleted package " + deletedItem + ", make sure to run 'dotnet restore' now.");
}
})
} catch (error) {
console.error(error);
Helpers.throwError("Could not parse the new project.json structure.");
}
}
|
{
// Find the path to project.json
jsonFilePath = path.join(vscode.workspace.rootPath, "/project.json");
// Check if the file exists first
fs.exists(jsonFilePath, exists => {
if (exists) {
readFile();
} else {
Helpers.throwError("This project does not contain project.json file.");
}
});
}
|
identifier_body
|
index.d.ts
|
// Type definitions for conf 1.1
// Project: https://github.com/sindresorhus/conf
// Definitions by: Sam Verschueren <https://github.com/SamVerschueren>
// BendingBender <https://github.com/BendingBender>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 2.3
interface Options<T> {
defaults?: {[key: string]: T};
configName?: string;
projectName?: string;
cwd?: string;
}
declare class
|
<T = any> implements Iterable<[string, T]> {
store: {[key: string]: T};
readonly path: string;
readonly size: number;
constructor(options?: Options<T>);
get(key: string, defaultValue?: T): T;
set(key: string, val: T): void;
set(object: {[key: string]: T}): void;
has(key: string): boolean;
delete(key: string): void;
clear(): void;
[Symbol.iterator](): Iterator<[string, T]>;
}
export = Conf;
|
Conf
|
identifier_name
|
index.d.ts
|
// Type definitions for conf 1.1
// Project: https://github.com/sindresorhus/conf
// Definitions by: Sam Verschueren <https://github.com/SamVerschueren>
// BendingBender <https://github.com/BendingBender>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 2.3
interface Options<T> {
defaults?: {[key: string]: T};
configName?: string;
projectName?: string;
cwd?: string;
}
declare class Conf<T = any> implements Iterable<[string, T]> {
store: {[key: string]: T};
readonly path: string;
readonly size: number;
constructor(options?: Options<T>);
get(key: string, defaultValue?: T): T;
set(key: string, val: T): void;
set(object: {[key: string]: T}): void;
has(key: string): boolean;
delete(key: string): void;
clear(): void;
[Symbol.iterator](): Iterator<[string, T]>;
}
|
export = Conf;
|
random_line_split
|
|
bonus.py
|
import os
import common_pygame
import random
pygame = common_pygame.pygame
screen = common_pygame.screen
class
|
():
def __init__(self, sounds, menu):
self.menu = menu
self.sounds = sounds
self.bonusType = 0
self.bonusAnim = 0
self.font = pygame.font.Font(None, 64)
self.bonusList = list()
self.bonusList.append(self.font.render(
str("plasma gun !"), True, (255, 255, 0)))
self.score = 0
self.bonuscount = 1
def ProcessBonus(self, ship):
# if ship.score %200 ==0 and ship.weapon==1 and ship.score>0:
if ship.score > 400 * self.bonuscount and self.score < 400 * self.bonuscount:
self.menu.play_sound(self.sounds["plasmagun.wav"])
ship.setWeapon(2)
self.bonusType = 0
self.bonusAnim = 30
self.score = ship.score
self.bonuscount = self.bonuscount + 1
if self.bonusAnim > 0:
self.bonusAnim = self.bonusAnim - 1
# show bonus for the plasma weapon
if self.bonusType == 0:
screen.blit(self.bonusList[0], (250, 250))
|
Bonus
|
identifier_name
|
bonus.py
|
import os
import common_pygame
import random
pygame = common_pygame.pygame
screen = common_pygame.screen
class Bonus():
def __init__(self, sounds, menu):
|
def ProcessBonus(self, ship):
# if ship.score %200 ==0 and ship.weapon==1 and ship.score>0:
if ship.score > 400 * self.bonuscount and self.score < 400 * self.bonuscount:
self.menu.play_sound(self.sounds["plasmagun.wav"])
ship.setWeapon(2)
self.bonusType = 0
self.bonusAnim = 30
self.score = ship.score
self.bonuscount = self.bonuscount + 1
if self.bonusAnim > 0:
self.bonusAnim = self.bonusAnim - 1
# show bonus for the plasma weapon
if self.bonusType == 0:
screen.blit(self.bonusList[0], (250, 250))
|
self.menu = menu
self.sounds = sounds
self.bonusType = 0
self.bonusAnim = 0
self.font = pygame.font.Font(None, 64)
self.bonusList = list()
self.bonusList.append(self.font.render(
str("plasma gun !"), True, (255, 255, 0)))
self.score = 0
self.bonuscount = 1
|
identifier_body
|
bonus.py
|
import os
import common_pygame
import random
pygame = common_pygame.pygame
screen = common_pygame.screen
class Bonus():
def __init__(self, sounds, menu):
self.menu = menu
self.sounds = sounds
self.bonusType = 0
self.bonusAnim = 0
self.font = pygame.font.Font(None, 64)
self.bonusList = list()
self.bonusList.append(self.font.render(
str("plasma gun !"), True, (255, 255, 0)))
self.score = 0
self.bonuscount = 1
def ProcessBonus(self, ship):
# if ship.score %200 ==0 and ship.weapon==1 and ship.score>0:
if ship.score > 400 * self.bonuscount and self.score < 400 * self.bonuscount:
self.menu.play_sound(self.sounds["plasmagun.wav"])
ship.setWeapon(2)
self.bonusType = 0
self.bonusAnim = 30
self.score = ship.score
self.bonuscount = self.bonuscount + 1
if self.bonusAnim > 0:
|
self.bonusAnim = self.bonusAnim - 1
# show bonus for the plasma weapon
if self.bonusType == 0:
screen.blit(self.bonusList[0], (250, 250))
|
conditional_block
|
|
bonus.py
|
import os
import common_pygame
import random
pygame = common_pygame.pygame
screen = common_pygame.screen
class Bonus():
def __init__(self, sounds, menu):
self.menu = menu
self.sounds = sounds
self.bonusType = 0
self.bonusAnim = 0
self.font = pygame.font.Font(None, 64)
self.bonusList = list()
self.bonusList.append(self.font.render(
str("plasma gun !"), True, (255, 255, 0)))
self.score = 0
|
def ProcessBonus(self, ship):
# if ship.score %200 ==0 and ship.weapon==1 and ship.score>0:
if ship.score > 400 * self.bonuscount and self.score < 400 * self.bonuscount:
self.menu.play_sound(self.sounds["plasmagun.wav"])
ship.setWeapon(2)
self.bonusType = 0
self.bonusAnim = 30
self.score = ship.score
self.bonuscount = self.bonuscount + 1
if self.bonusAnim > 0:
self.bonusAnim = self.bonusAnim - 1
# show bonus for the plasma weapon
if self.bonusType == 0:
screen.blit(self.bonusList[0], (250, 250))
|
self.bonuscount = 1
|
random_line_split
|
test_basics.js
|
/*
Copyright (c) 2006, Tim Becker All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
function dumpProperties (obj) {
var result = ""
print(obj)
for (var p in obj){
print(""+p+" = "+obj[p]+"")
}
// print ("<dl>")
// for (var p in obj){
// print("<dt>"+p+"</dt><dd><pre>"+obj[p]+"</pre></dd>")
// }
// print ("<dl>")
}
function print (obj) {
document.writeln(obj+"<br>")
}
function write (obj) {
document.writeln(obj);
}
function
|
(elId) {
var e = document.getElementById(elId);
if (e.style.display == "block")
e.style.display = "none";
else
e.style.display = "block";
return false;
}
function debugInfo () {
print ("Browser Infos (<a href=\"#\" onclick=\"toggle(\'debugInfo\')\">+</a>)")
print("<small><div id=\"debugInfo\" style=\"display:none\" class=\"source\">")
dumpProperties (navigator)
print("")
print("</div></small>")
}
function escapeHTML (str) {
str= str.replace(/</g, "<")
str= str.replace(/>/g, ">")
str= str.replace(/&/g, "&")
return str
}
function runTests (testArr) {
write ("<h1>"+document.title+"</h1>")
var numFailed = 0
var numException = 0
for (var i=0; i!=testArr.length; ++i) {
var result
try {
result = testArr[i]()
if (!result) ++numFailed
}catch (e) {
print("<hr><h3>Exception executing: "+i+"</h3>")
dumpProperties (e)
++numException
++numFailed
}
//print("<hr>")
print ("Test #"+i+" passed: "+result+"")
write ("<small>Show test(<a href=\"#\" onclick=\"toggle(\'test"+i+"\')\">+</small></a>) <div id=\"test"+i+"\" style=\"display:none\" class=\"source\"><pre>")
write (""+escapeHTML(testArr[i].toString())+"")
print("</pre></div>")
}
write("<hr>")
print ("numFailed ="+numFailed+" with Exception: "+numException)
if (parent.frames.menu) {
try {
parent.frames.menu.addResult (document.title, testArr.length, numFailed, numException, this.location)
} catch (e) {
alert(e)
}
}
debugInfo()
}
|
toggle
|
identifier_name
|
test_basics.js
|
/*
Copyright (c) 2006, Tim Becker All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
function dumpProperties (obj) {
var result = ""
print(obj)
for (var p in obj){
print(""+p+" = "+obj[p]+"")
}
// print ("<dl>")
// for (var p in obj){
// print("<dt>"+p+"</dt><dd><pre>"+obj[p]+"</pre></dd>")
// }
// print ("<dl>")
}
function print (obj) {
document.writeln(obj+"<br>")
}
function write (obj)
|
function toggle(elId) {
var e = document.getElementById(elId);
if (e.style.display == "block")
e.style.display = "none";
else
e.style.display = "block";
return false;
}
function debugInfo () {
print ("Browser Infos (<a href=\"#\" onclick=\"toggle(\'debugInfo\')\">+</a>)")
print("<small><div id=\"debugInfo\" style=\"display:none\" class=\"source\">")
dumpProperties (navigator)
print("")
print("</div></small>")
}
function escapeHTML (str) {
str= str.replace(/</g, "<")
str= str.replace(/>/g, ">")
str= str.replace(/&/g, "&")
return str
}
function runTests (testArr) {
write ("<h1>"+document.title+"</h1>")
var numFailed = 0
var numException = 0
for (var i=0; i!=testArr.length; ++i) {
var result
try {
result = testArr[i]()
if (!result) ++numFailed
}catch (e) {
print("<hr><h3>Exception executing: "+i+"</h3>")
dumpProperties (e)
++numException
++numFailed
}
//print("<hr>")
print ("Test #"+i+" passed: "+result+"")
write ("<small>Show test(<a href=\"#\" onclick=\"toggle(\'test"+i+"\')\">+</small></a>) <div id=\"test"+i+"\" style=\"display:none\" class=\"source\"><pre>")
write (""+escapeHTML(testArr[i].toString())+"")
print("</pre></div>")
}
write("<hr>")
print ("numFailed ="+numFailed+" with Exception: "+numException)
if (parent.frames.menu) {
try {
parent.frames.menu.addResult (document.title, testArr.length, numFailed, numException, this.location)
} catch (e) {
alert(e)
}
}
debugInfo()
}
|
{
document.writeln(obj);
}
|
identifier_body
|
test_basics.js
|
/*
Copyright (c) 2006, Tim Becker All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
function dumpProperties (obj) {
var result = ""
print(obj)
for (var p in obj){
print(""+p+" = "+obj[p]+"")
}
// print ("<dl>")
// for (var p in obj){
// print("<dt>"+p+"</dt><dd><pre>"+obj[p]+"</pre></dd>")
// }
// print ("<dl>")
}
function print (obj) {
document.writeln(obj+"<br>")
}
function write (obj) {
document.writeln(obj);
}
function toggle(elId) {
var e = document.getElementById(elId);
if (e.style.display == "block")
e.style.display = "none";
else
e.style.display = "block";
return false;
}
function debugInfo () {
print ("Browser Infos (<a href=\"#\" onclick=\"toggle(\'debugInfo\')\">+</a>)")
print("<small><div id=\"debugInfo\" style=\"display:none\" class=\"source\">")
dumpProperties (navigator)
print("")
print("</div></small>")
}
function escapeHTML (str) {
str= str.replace(/</g, "<")
str= str.replace(/>/g, ">")
str= str.replace(/&/g, "&")
return str
}
function runTests (testArr) {
write ("<h1>"+document.title+"</h1>")
var numFailed = 0
var numException = 0
for (var i=0; i!=testArr.length; ++i) {
var result
try {
result = testArr[i]()
if (!result) ++numFailed
}catch (e) {
print("<hr><h3>Exception executing: "+i+"</h3>")
dumpProperties (e)
++numException
++numFailed
}
//print("<hr>")
print ("Test #"+i+" passed: "+result+"")
write ("<small>Show test(<a href=\"#\" onclick=\"toggle(\'test"+i+"\')\">+</small></a>) <div id=\"test"+i+"\" style=\"display:none\" class=\"source\"><pre>")
write (""+escapeHTML(testArr[i].toString())+"")
print("</pre></div>")
}
write("<hr>")
print ("numFailed ="+numFailed+" with Exception: "+numException)
if (parent.frames.menu) {
try {
parent.frames.menu.addResult (document.title, testArr.length, numFailed, numException, this.location)
} catch (e) {
alert(e)
}
}
|
debugInfo()
}
|
random_line_split
|
|
_mod_clickMenu.py
|
# -*- coding: utf-8 -*-
#----------------------------------------------------------------------------
# Menu for quickly adding waypoints when on move
#----------------------------------------------------------------------------
# Copyright 2007-2008, Oliver White
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#---------------------------------------------------------------------------
from modules.base_module import RanaModule
import cairo
from time import time
from math import pi
def getModule(*args, **kwargs):
return ClickMenu(*args, **kwargs)
class ClickMenu(RanaModule):
"""Overlay info on the map"""
def __init__(self, *args, **kwargs):
RanaModule.__init__(self, *args, **kwargs)
self.lastWaypoint = "(none)"
self.lastWaypointAddTime = 0
self.messageLingerTime = 2
def handleMessage(self, message, messageType, args):
|
def drawMapOverlay(self, cr):
"""Draw an overlay on top of the map, showing various information
about position etc."""
# waypoins will be done in another way, so this is disabled for the time being
# (x,y,w,h) = self.get('viewport')
#
# dt = time() - self.lastWaypointAddTime
# if(dt > 0 and dt < self.messageLingerTime):
# self.drawNewWaypoint(cr, x+0.5*w, y+0.5*h, w*0.3)
# else:
# m = self.m.get('clickHandler', None)
# if(m != None):
# m.registerXYWH(x+0.25*w,y+0.25*h,w*0.5,h*0.5, "clickMenu:addWaypoint")
def drawNewWaypoint(self, cr, x, y, size):
text = self.lastWaypoint
cr.set_font_size(200)
extents = cr.text_extents(text)
(w, h) = (extents[2], extents[3])
cr.set_source_rgb(0, 0, 0.5)
cr.arc(x, y, size, 0, 2 * pi)
cr.fill()
x1 = x - 0.5 * w
y1 = y + 0.5 * h
border = 20
cr.set_source_rgb(1, 1, 1)
cr.move_to(x1, y1)
cr.show_text(text)
cr.fill()
|
if message == "addWaypoint":
m = self.m.get("waypoints", None)
if m is not None:
self.lastWaypoint = m.newWaypoint()
self.lastWaypointAddTime = time()
|
identifier_body
|
_mod_clickMenu.py
|
# -*- coding: utf-8 -*-
#----------------------------------------------------------------------------
# Menu for quickly adding waypoints when on move
#----------------------------------------------------------------------------
# Copyright 2007-2008, Oliver White
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#---------------------------------------------------------------------------
from modules.base_module import RanaModule
import cairo
from time import time
from math import pi
def getModule(*args, **kwargs):
return ClickMenu(*args, **kwargs)
class ClickMenu(RanaModule):
"""Overlay info on the map"""
def
|
(self, *args, **kwargs):
RanaModule.__init__(self, *args, **kwargs)
self.lastWaypoint = "(none)"
self.lastWaypointAddTime = 0
self.messageLingerTime = 2
def handleMessage(self, message, messageType, args):
if message == "addWaypoint":
m = self.m.get("waypoints", None)
if m is not None:
self.lastWaypoint = m.newWaypoint()
self.lastWaypointAddTime = time()
def drawMapOverlay(self, cr):
"""Draw an overlay on top of the map, showing various information
about position etc."""
# waypoins will be done in another way, so this is disabled for the time being
# (x,y,w,h) = self.get('viewport')
#
# dt = time() - self.lastWaypointAddTime
# if(dt > 0 and dt < self.messageLingerTime):
# self.drawNewWaypoint(cr, x+0.5*w, y+0.5*h, w*0.3)
# else:
# m = self.m.get('clickHandler', None)
# if(m != None):
# m.registerXYWH(x+0.25*w,y+0.25*h,w*0.5,h*0.5, "clickMenu:addWaypoint")
def drawNewWaypoint(self, cr, x, y, size):
text = self.lastWaypoint
cr.set_font_size(200)
extents = cr.text_extents(text)
(w, h) = (extents[2], extents[3])
cr.set_source_rgb(0, 0, 0.5)
cr.arc(x, y, size, 0, 2 * pi)
cr.fill()
x1 = x - 0.5 * w
y1 = y + 0.5 * h
border = 20
cr.set_source_rgb(1, 1, 1)
cr.move_to(x1, y1)
cr.show_text(text)
cr.fill()
|
__init__
|
identifier_name
|
_mod_clickMenu.py
|
# -*- coding: utf-8 -*-
#----------------------------------------------------------------------------
# Menu for quickly adding waypoints when on move
#----------------------------------------------------------------------------
# Copyright 2007-2008, Oliver White
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#---------------------------------------------------------------------------
from modules.base_module import RanaModule
import cairo
from time import time
from math import pi
def getModule(*args, **kwargs):
return ClickMenu(*args, **kwargs)
class ClickMenu(RanaModule):
"""Overlay info on the map"""
def __init__(self, *args, **kwargs):
RanaModule.__init__(self, *args, **kwargs)
self.lastWaypoint = "(none)"
self.lastWaypointAddTime = 0
self.messageLingerTime = 2
def handleMessage(self, message, messageType, args):
if message == "addWaypoint":
m = self.m.get("waypoints", None)
if m is not None:
|
def drawMapOverlay(self, cr):
"""Draw an overlay on top of the map, showing various information
about position etc."""
# waypoins will be done in another way, so this is disabled for the time being
# (x,y,w,h) = self.get('viewport')
#
# dt = time() - self.lastWaypointAddTime
# if(dt > 0 and dt < self.messageLingerTime):
# self.drawNewWaypoint(cr, x+0.5*w, y+0.5*h, w*0.3)
# else:
# m = self.m.get('clickHandler', None)
# if(m != None):
# m.registerXYWH(x+0.25*w,y+0.25*h,w*0.5,h*0.5, "clickMenu:addWaypoint")
def drawNewWaypoint(self, cr, x, y, size):
text = self.lastWaypoint
cr.set_font_size(200)
extents = cr.text_extents(text)
(w, h) = (extents[2], extents[3])
cr.set_source_rgb(0, 0, 0.5)
cr.arc(x, y, size, 0, 2 * pi)
cr.fill()
x1 = x - 0.5 * w
y1 = y + 0.5 * h
border = 20
cr.set_source_rgb(1, 1, 1)
cr.move_to(x1, y1)
cr.show_text(text)
cr.fill()
|
self.lastWaypoint = m.newWaypoint()
self.lastWaypointAddTime = time()
|
conditional_block
|
_mod_clickMenu.py
|
# -*- coding: utf-8 -*-
#----------------------------------------------------------------------------
# Menu for quickly adding waypoints when on move
#----------------------------------------------------------------------------
# Copyright 2007-2008, Oliver White
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#---------------------------------------------------------------------------
from modules.base_module import RanaModule
import cairo
from time import time
from math import pi
def getModule(*args, **kwargs):
return ClickMenu(*args, **kwargs)
class ClickMenu(RanaModule):
"""Overlay info on the map"""
def __init__(self, *args, **kwargs):
RanaModule.__init__(self, *args, **kwargs)
self.lastWaypoint = "(none)"
self.lastWaypointAddTime = 0
self.messageLingerTime = 2
def handleMessage(self, message, messageType, args):
|
self.lastWaypoint = m.newWaypoint()
self.lastWaypointAddTime = time()
def drawMapOverlay(self, cr):
"""Draw an overlay on top of the map, showing various information
about position etc."""
# waypoins will be done in another way, so this is disabled for the time being
# (x,y,w,h) = self.get('viewport')
#
# dt = time() - self.lastWaypointAddTime
# if(dt > 0 and dt < self.messageLingerTime):
# self.drawNewWaypoint(cr, x+0.5*w, y+0.5*h, w*0.3)
# else:
# m = self.m.get('clickHandler', None)
# if(m != None):
# m.registerXYWH(x+0.25*w,y+0.25*h,w*0.5,h*0.5, "clickMenu:addWaypoint")
def drawNewWaypoint(self, cr, x, y, size):
text = self.lastWaypoint
cr.set_font_size(200)
extents = cr.text_extents(text)
(w, h) = (extents[2], extents[3])
cr.set_source_rgb(0, 0, 0.5)
cr.arc(x, y, size, 0, 2 * pi)
cr.fill()
x1 = x - 0.5 * w
y1 = y + 0.5 * h
border = 20
cr.set_source_rgb(1, 1, 1)
cr.move_to(x1, y1)
cr.show_text(text)
cr.fill()
|
if message == "addWaypoint":
m = self.m.get("waypoints", None)
if m is not None:
|
random_line_split
|
FloatingLabel.js
|
import React, { PureComponent } from 'react';
import PropTypes from 'prop-types';
import { Animated, StyleSheet } from 'react-native';
import { H6 } from '@ui/typography';
import styled from '@ui/styled';
export const LabelText = styled(({ theme }) => ({
color: theme.colors.text.secondary,
backgroundColor: 'transparent',
paddingVertical: theme.sizing.baseUnit / 4,
}), 'FloatingLabel.LabelText')(H6);
const styles = StyleSheet.create({
floatLabelView: {
position: 'absolute',
bottom: 0,
top: 0,
justifyContent: 'center',
},
});
class FloatingLabel extends PureComponent {
static propTypes = {
children: PropTypes.node,
animation: PropTypes.shape({
interpolate: PropTypes.func,
}),
scaleSize: PropTypes.number, // how much smaller to make label when focused
floatingOpacity: PropTypes.number,
};
static defaultProps = {
animation: new Animated.Value(0),
scaleSize: 0.8,
floatingOpacity: 0.8,
};
state = {
labelWidth: 0,
labelHeight: 0,
};
handleLayout = ({ nativeEvent: { layout } }) => {
this.setState({
labelWidth: layout.width,
labelHeight: layout.height,
});
};
render() {
const scaledWidth = this.state.labelWidth * (1.05 - this.props.scaleSize);
const sideScaledWidth = scaledWidth / 2;
const scale = this.props.animation.interpolate({
inputRange: [0, 1],
outputRange: [1, this.props.scaleSize],
});
const opacity = this.props.animation.interpolate({
inputRange: [0, 1],
outputRange: [1, this.props.floatingOpacity],
});
const translateY = this.props.animation.interpolate({
inputRange: [0, 1],
outputRange: [0, -(this.state.labelHeight * 0.7)],
|
inputRange: [0, 1],
outputRange: [0, -sideScaledWidth],
});
const wrapperStyles = {
transform: [{ scale }, { translateX }, { translateY }],
opacity,
};
return (
<Animated.View
pointerEvents="none"
onLayout={this.handleLayout}
style={[styles.floatLabelView, wrapperStyles]}
>
<LabelText>
{this.props.children}
</LabelText>
</Animated.View>
);
}
}
export default FloatingLabel;
|
});
const translateX = this.props.animation.interpolate({
|
random_line_split
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.